kafka-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ij...@apache.org
Subject [kafka] branch trunk updated: MINOR: Remove unused TopicAndPartition and remove unused symbols (#7119)
Date Sun, 28 Jul 2019 16:38:47 GMT
This is an automated email from the ASF dual-hosted git repository.

ijuma pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/kafka.git


The following commit(s) were added to refs/heads/trunk by this push:
     new acf507c  MINOR: Remove unused TopicAndPartition and remove unused symbols (#7119)
acf507c is described below

commit acf507ce9cd0a6037da3e4c561bd4c50ba526f8a
Author: Ismael Juma <ismael@juma.me.uk>
AuthorDate: Sun Jul 28 09:38:26 2019 -0700

    MINOR: Remove unused TopicAndPartition and remove unused symbols (#7119)
    
    With the removal of ZkUtils and AdminUtils, TopicAndPartition is finally
    unused.
    
    Reviewers: Manikumar Reddy <manikumar.reddy@gmail.com>
---
 core/src/main/scala/kafka/admin/AclCommand.scala   |  4 +--
 .../scala/kafka/common/TopicAndPartition.scala     | 30 ----------------------
 .../scala/kafka/controller/KafkaController.scala   |  2 +-
 .../coordinator/group/GroupMetadataManager.scala   |  2 +-
 .../main/scala/kafka/log/LogCleanerManager.scala   |  2 +-
 .../kafka/security/auth/SimpleAclAuthorizer.scala  |  5 ++--
 core/src/main/scala/kafka/server/KafkaApis.scala   |  2 +-
 core/src/main/scala/kafka/server/KafkaConfig.scala |  4 +--
 core/src/main/scala/kafka/zk/KafkaZkClient.scala   |  2 +-
 9 files changed, 11 insertions(+), 42 deletions(-)

diff --git a/core/src/main/scala/kafka/admin/AclCommand.scala b/core/src/main/scala/kafka/admin/AclCommand.scala
index 7ab379d..6f5b06c 100644
--- a/core/src/main/scala/kafka/admin/AclCommand.scala
+++ b/core/src/main/scala/kafka/admin/AclCommand.scala
@@ -280,9 +280,9 @@ object AclCommand extends Logging {
     private def getAcls(authorizer: Authorizer, filter: ResourcePatternFilter,
                         listPrincipal: Option[KafkaPrincipal] = None): Map[Resource, Set[Acl]]
=
       if (listPrincipal.isEmpty)
-        authorizer.getAcls().filter { case (resource, acl) => filter.matches(resource.toPattern)
}
+        authorizer.getAcls().filter { case (resource, _) => filter.matches(resource.toPattern)
}
       else
-        authorizer.getAcls(listPrincipal.get).filter { case (resource, acl) => filter.matches(resource.toPattern)
}
+        authorizer.getAcls(listPrincipal.get).filter { case (resource, _) => filter.matches(resource.toPattern)
}
 
   }
 
diff --git a/core/src/main/scala/kafka/common/TopicAndPartition.scala b/core/src/main/scala/kafka/common/TopicAndPartition.scala
deleted file mode 100644
index 6c27695..0000000
--- a/core/src/main/scala/kafka/common/TopicAndPartition.scala
+++ /dev/null
@@ -1,30 +0,0 @@
-package kafka.common
-
-import org.apache.kafka.common.TopicPartition
-
-/**
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * Convenience case class since (topic, partition) pairs are ubiquitous.
- */
-case class TopicAndPartition(topic: String, partition: Int) {
-
-  def this(topicPartition: TopicPartition) = this(topicPartition.topic, topicPartition.partition)
-
-  override def toString: String = s"$topic-$partition"
-}
diff --git a/core/src/main/scala/kafka/controller/KafkaController.scala b/core/src/main/scala/kafka/controller/KafkaController.scala
index 75aeba2..4ca4b49 100644
--- a/core/src/main/scala/kafka/controller/KafkaController.scala
+++ b/core/src/main/scala/kafka/controller/KafkaController.scala
@@ -976,7 +976,7 @@ class KafkaController(val config: KafkaConfig,
               finalLeaderIsrAndControllerEpoch = Some(LeaderIsrAndControllerEpoch(leaderAndIsr,
epoch))
               info(s"Updated leader epoch for partition $partition to ${leaderAndIsr.leaderEpoch}")
               true
-            case (partition, Left(e)) =>
+            case (_, Left(e)) =>
               throw e
           }.getOrElse(false)
         case None =>
diff --git a/core/src/main/scala/kafka/coordinator/group/GroupMetadataManager.scala b/core/src/main/scala/kafka/coordinator/group/GroupMetadataManager.scala
index 6ef8ff3..c666975 100644
--- a/core/src/main/scala/kafka/coordinator/group/GroupMetadataManager.scala
+++ b/core/src/main/scala/kafka/coordinator/group/GroupMetadataManager.scala
@@ -312,7 +312,7 @@ class GroupMetadataManager(brokerId: Int,
     // construct the message set to append
     if (filteredOffsetMetadata.isEmpty) {
       // compute the final error codes for the commit response
-      val commitStatus = offsetMetadata.map { case (k, v) => k -> Errors.OFFSET_METADATA_TOO_LARGE
}
+      val commitStatus = offsetMetadata.map { case (k, _) => k -> Errors.OFFSET_METADATA_TOO_LARGE
}
       responseCallback(commitStatus)
       None
     } else {
diff --git a/core/src/main/scala/kafka/log/LogCleanerManager.scala b/core/src/main/scala/kafka/log/LogCleanerManager.scala
index 2397e02..d46dd94 100755
--- a/core/src/main/scala/kafka/log/LogCleanerManager.scala
+++ b/core/src/main/scala/kafka/log/LogCleanerManager.scala
@@ -333,7 +333,7 @@ private[log] class LogCleanerManager(val logDirs: Seq[File],
       case None => false
       case Some(state) =>
         state match {
-          case LogCleaningPaused(s) =>
+          case _: LogCleaningPaused =>
             true
           case _ =>
             false
diff --git a/core/src/main/scala/kafka/security/auth/SimpleAclAuthorizer.scala b/core/src/main/scala/kafka/security/auth/SimpleAclAuthorizer.scala
index 2eba792..b4cbcfa 100644
--- a/core/src/main/scala/kafka/security/auth/SimpleAclAuthorizer.scala
+++ b/core/src/main/scala/kafka/security/auth/SimpleAclAuthorizer.scala
@@ -271,15 +271,14 @@ class SimpleAclAuthorizer extends Authorizer with Logging {
         for (rType <- resourceTypes) {
           val resourceType = Try(ResourceType.fromString(rType))
           resourceType match {
-            case Success(resourceTypeObj) => {
+            case Success(resourceTypeObj) =>
               val resourceNames = zkClient.getResourceNames(store.patternType, resourceTypeObj)
               for (resourceName <- resourceNames) {
                 val resource = new Resource(resourceTypeObj, resourceName, store.patternType)
                 val versionedAcls = getAclsFromZk(resource)
                 updateCache(resource, versionedAcls)
               }
-            }
-            case Failure(f) => warn(s"Ignoring unknown ResourceType: $rType")
+            case Failure(_) => warn(s"Ignoring unknown ResourceType: $rType")
           }
         }
       })
diff --git a/core/src/main/scala/kafka/server/KafkaApis.scala b/core/src/main/scala/kafka/server/KafkaApis.scala
index ecd6c94..2a0fbd3 100644
--- a/core/src/main/scala/kafka/server/KafkaApis.scala
+++ b/core/src/main/scala/kafka/server/KafkaApis.scala
@@ -1336,7 +1336,7 @@ class KafkaApis(val requestChannel: RequestChannel,
   private def authorizedOperations(session: RequestChannel.Session, resource: Resource):
Int = {
     val authorizedOps = authorizer match {
       case None => resource.resourceType.supportedOperations
-      case Some(auth) => resource.resourceType.supportedOperations
+      case Some(_) => resource.resourceType.supportedOperations
         .filter(operation => authorize(session, operation, resource))
     }
 
diff --git a/core/src/main/scala/kafka/server/KafkaConfig.scala b/core/src/main/scala/kafka/server/KafkaConfig.scala
index 4765031..5223ebd 100755
--- a/core/src/main/scala/kafka/server/KafkaConfig.scala
+++ b/core/src/main/scala/kafka/server/KafkaConfig.scala
@@ -1311,8 +1311,8 @@ class KafkaConfig(val props: java.util.Map[_, _], doLog: Boolean, dynamicConfigO
 
   def interBrokerListenerName = getInterBrokerListenerNameAndSecurityProtocol._1
   def interBrokerSecurityProtocol = getInterBrokerListenerNameAndSecurityProtocol._2
-  def controlPlaneListenerName = getControlPlaneListenerNameAndSecurityProtocol.map { case
(listenerName, securityProtocol) => listenerName }
-  def controlPlaneSecurityProtocol = getControlPlaneListenerNameAndSecurityProtocol.map {
case (listenerName, securityProtocol) => securityProtocol }
+  def controlPlaneListenerName = getControlPlaneListenerNameAndSecurityProtocol.map { case
(listenerName, _) => listenerName }
+  def controlPlaneSecurityProtocol = getControlPlaneListenerNameAndSecurityProtocol.map {
case (_, securityProtocol) => securityProtocol }
   def saslMechanismInterBrokerProtocol = getString(KafkaConfig.SaslMechanismInterBrokerProtocolProp)
   val saslInterBrokerHandshakeRequestEnable = interBrokerProtocolVersion >= KAFKA_0_10_0_IV1
 
diff --git a/core/src/main/scala/kafka/zk/KafkaZkClient.scala b/core/src/main/scala/kafka/zk/KafkaZkClient.scala
index ce82492..ad3069c 100644
--- a/core/src/main/scala/kafka/zk/KafkaZkClient.scala
+++ b/core/src/main/scala/kafka/zk/KafkaZkClient.scala
@@ -1721,7 +1721,7 @@ class KafkaZkClient private[zk] (zooKeeperClient: ZooKeeperClient, isSecure:
Boo
           SetDataOp(path, data, 0)))
       )
       val stat = response.resultCode match {
-        case code@ Code.OK =>
+        case Code.OK =>
           val setDataResult = response.zkOpResults(1).rawOpResult.asInstanceOf[SetDataResult]
           setDataResult.getStat
         case Code.NODEEXISTS =>


Mime
View raw message