kafka-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From rsiva...@apache.org
Subject [kafka] branch trunk updated: MINOR: Fix some compiler warnings (#4726)
Date Mon, 19 Mar 2018 15:04:13 GMT
This is an automated email from the ASF dual-hosted git repository.

rsivaram pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/kafka.git


The following commit(s) were added to refs/heads/trunk by this push:
     new 6fab286  MINOR: Fix some compiler warnings (#4726)
6fab286 is described below

commit 6fab286da2735517f6d0c945328abcb77fa0a7a4
Author: Ismael Juma <ismael@juma.me.uk>
AuthorDate: Mon Mar 19 08:03:58 2018 -0700

    MINOR: Fix some compiler warnings (#4726)
---
 core/src/main/scala/kafka/server/AbstractFetcherManager.scala     | 1 -
 core/src/main/scala/kafka/server/ConfigHandler.scala              | 1 -
 .../scala/integration/kafka/api/AdminClientIntegrationTest.scala  | 6 +++---
 .../test/scala/integration/kafka/api/LegacyAdminClientTest.scala  | 8 ++++----
 .../test/scala/unit/kafka/admin/DeleteConsumerGroupsTest.scala    | 4 ++--
 .../scala/unit/kafka/admin/ResetConsumerGroupOffsetTest.scala     | 2 +-
 .../scala/unit/kafka/coordinator/group/GroupCoordinatorTest.scala | 2 +-
 .../src/test/scala/unit/kafka/log/LogCleanerIntegrationTest.scala | 2 +-
 core/src/test/scala/unit/kafka/server/FetchSessionTest.scala      | 4 ++--
 .../scala/unit/kafka/server/ServerGenerateClusterIdTest.scala     | 2 +-
 core/src/test/scala/unit/kafka/server/ServerStartupTest.scala     | 2 +-
 11 files changed, 16 insertions(+), 18 deletions(-)

diff --git a/core/src/main/scala/kafka/server/AbstractFetcherManager.scala b/core/src/main/scala/kafka/server/AbstractFetcherManager.scala
index aa08585..80d07e5 100755
--- a/core/src/main/scala/kafka/server/AbstractFetcherManager.scala
+++ b/core/src/main/scala/kafka/server/AbstractFetcherManager.scala
@@ -26,7 +26,6 @@ import org.apache.kafka.common.utils.Utils
 
 import scala.collection.mutable
 import scala.collection.{Map, Set}
-import scala.collection.JavaConverters._
 
 abstract class AbstractFetcherManager(protected val name: String, clientId: String, numFetchers:
Int = 1)
   extends Logging with KafkaMetricsGroup {
diff --git a/core/src/main/scala/kafka/server/ConfigHandler.scala b/core/src/main/scala/kafka/server/ConfigHandler.scala
index 7cad118..78c3abf 100644
--- a/core/src/main/scala/kafka/server/ConfigHandler.scala
+++ b/core/src/main/scala/kafka/server/ConfigHandler.scala
@@ -25,7 +25,6 @@ import kafka.log.{LogConfig, LogManager}
 import kafka.security.CredentialProvider
 import kafka.server.Constants._
 import kafka.server.QuotaFactory.QuotaManagers
-import kafka.utils.Implicits._
 import kafka.utils.Logging
 import org.apache.kafka.common.config.ConfigDef.Validator
 import org.apache.kafka.common.config.ConfigException
diff --git a/core/src/test/scala/integration/kafka/api/AdminClientIntegrationTest.scala b/core/src/test/scala/integration/kafka/api/AdminClientIntegrationTest.scala
index 867e03d..c59fe6d 100644
--- a/core/src/test/scala/integration/kafka/api/AdminClientIntegrationTest.scala
+++ b/core/src/test/scala/integration/kafka/api/AdminClientIntegrationTest.scala
@@ -809,15 +809,15 @@ class AdminClientIntegrationTest extends IntegrationTestHarness with
Logging {
     subscribeAndWaitForAssignment(topic, consumer)
 
     sendRecords(producers.head, 10, topicPartition)
-    assertEquals(0L, consumer.offsetsForTimes(Map(topicPartition -> new JLong(0L)).asJava).get(topicPartition).offset())
+    assertEquals(0L, consumer.offsetsForTimes(Map(topicPartition -> JLong.valueOf(0L)).asJava).get(topicPartition).offset())
 
     var result = client.deleteRecords(Map(topicPartition -> RecordsToDelete.beforeOffset(5L)).asJava)
     result.all().get()
-    assertEquals(5L, consumer.offsetsForTimes(Map(topicPartition -> new JLong(0L)).asJava).get(topicPartition).offset())
+    assertEquals(5L, consumer.offsetsForTimes(Map(topicPartition -> JLong.valueOf(0L)).asJava).get(topicPartition).offset())
 
     result = client.deleteRecords(Map(topicPartition -> RecordsToDelete.beforeOffset(DeleteRecordsRequest.HIGH_WATERMARK)).asJava)
     result.all().get()
-    assertNull(consumer.offsetsForTimes(Map(topicPartition -> new JLong(0L)).asJava).get(topicPartition))
+    assertNull(consumer.offsetsForTimes(Map(topicPartition -> JLong.valueOf(0L)).asJava).get(topicPartition))
 
     client.close()
   }
diff --git a/core/src/test/scala/integration/kafka/api/LegacyAdminClientTest.scala b/core/src/test/scala/integration/kafka/api/LegacyAdminClientTest.scala
index 57a2b20..17e2bb9 100644
--- a/core/src/test/scala/integration/kafka/api/LegacyAdminClientTest.scala
+++ b/core/src/test/scala/integration/kafka/api/LegacyAdminClientTest.scala
@@ -160,7 +160,7 @@ class LegacyAdminClientTest extends IntegrationTestHarness with Logging
{
   @Test
   def testOffsetsForTimesWhenOffsetNotFound() {
     val consumer = consumers.head
-    assertNull(consumer.offsetsForTimes(Map(tp -> new JLong(0L)).asJava).get(tp))
+    assertNull(consumer.offsetsForTimes(Map(tp -> JLong.valueOf(0L)).asJava).get(tp))
   }
 
   @Test
@@ -169,13 +169,13 @@ class LegacyAdminClientTest extends IntegrationTestHarness with Logging
{
     subscribeAndWaitForAssignment(topic, consumer)
 
     sendRecords(producers.head, 10, tp)
-    assertEquals(0L, consumer.offsetsForTimes(Map(tp -> new JLong(0L)).asJava).get(tp).offset())
+    assertEquals(0L, consumer.offsetsForTimes(Map(tp -> JLong.valueOf(0L)).asJava).get(tp).offset())
 
     client.deleteRecordsBefore(Map((tp, 5L))).get()
-    assertEquals(5L, consumer.offsetsForTimes(Map(tp -> new JLong(0L)).asJava).get(tp).offset())
+    assertEquals(5L, consumer.offsetsForTimes(Map(tp -> JLong.valueOf(0L)).asJava).get(tp).offset())
 
     client.deleteRecordsBefore(Map((tp, DeleteRecordsRequest.HIGH_WATERMARK))).get()
-    assertNull(consumer.offsetsForTimes(Map(tp -> new JLong(0L)).asJava).get(tp))
+    assertNull(consumer.offsetsForTimes(Map(tp -> JLong.valueOf(0L)).asJava).get(tp))
   }
 
   @Test
diff --git a/core/src/test/scala/unit/kafka/admin/DeleteConsumerGroupsTest.scala b/core/src/test/scala/unit/kafka/admin/DeleteConsumerGroupsTest.scala
index cc236d5..effa55d 100644
--- a/core/src/test/scala/unit/kafka/admin/DeleteConsumerGroupsTest.scala
+++ b/core/src/test/scala/unit/kafka/admin/DeleteConsumerGroupsTest.scala
@@ -227,7 +227,7 @@ class DeleteConsumerGroupTest extends ConsumerGroupCommandTest {
   @Test
   def testDeleteCmdWithShortInitialization() {
     // run one consumer in the group
-    val executor = addConsumerGroupExecutor(numConsumers = 1)
+    addConsumerGroupExecutor(numConsumers = 1)
     val cgcArgs = Array("--bootstrap-server", brokerList, "--delete", "--group", group)
     val service = getConsumerGroupService(cgcArgs)
 
@@ -239,7 +239,7 @@ class DeleteConsumerGroupTest extends ConsumerGroupCommandTest {
   @Test
   def testDeleteWithShortInitialization() {
     // run one consumer in the group
-    val executor = addConsumerGroupExecutor(numConsumers = 1)
+    addConsumerGroupExecutor(numConsumers = 1)
     val cgcArgs = Array("--bootstrap-server", brokerList, "--delete", "--group", group)
     val service = getConsumerGroupService(cgcArgs)
 
diff --git a/core/src/test/scala/unit/kafka/admin/ResetConsumerGroupOffsetTest.scala b/core/src/test/scala/unit/kafka/admin/ResetConsumerGroupOffsetTest.scala
index 3d8e895..9674486 100644
--- a/core/src/test/scala/unit/kafka/admin/ResetConsumerGroupOffsetTest.scala
+++ b/core/src/test/scala/unit/kafka/admin/ResetConsumerGroupOffsetTest.scala
@@ -365,7 +365,7 @@ class ResetConsumerGroupOffsetTest extends ConsumerGroupCommandTest {
 
   private def resetAndAssertOffsetsCommitted(consumerGroupService: ConsumerGroupService,
                                              expectedOffsets: Map[TopicPartition, Long],
-                                             topic: String = topic): Unit = {
+                                             topic: String): Unit = {
     val allResetOffsets = resetOffsets(consumerGroupService)
     allResetOffsets.foreach { case (tp, offset) =>
       assertEquals(offset, expectedOffsets(tp))
diff --git a/core/src/test/scala/unit/kafka/coordinator/group/GroupCoordinatorTest.scala b/core/src/test/scala/unit/kafka/coordinator/group/GroupCoordinatorTest.scala
index 2c9e81d..1598547 100644
--- a/core/src/test/scala/unit/kafka/coordinator/group/GroupCoordinatorTest.scala
+++ b/core/src/test/scala/unit/kafka/coordinator/group/GroupCoordinatorTest.scala
@@ -1274,7 +1274,7 @@ class GroupCoordinatorTest extends JUnitSuite {
   @Test
   def testDeleteNonEmptyGroup() {
     val memberId = JoinGroupRequest.UNKNOWN_MEMBER_ID
-    val joinGroupResult = joinGroup(groupId, memberId, protocolType, protocols)
+    joinGroup(groupId, memberId, protocolType, protocols)
 
     val result = groupCoordinator.handleDeleteGroups(Set(groupId).toSet)
     assert(result.size == 1 && result.contains(groupId) && result.get(groupId).contains(Errors.NON_EMPTY_GROUP))
diff --git a/core/src/test/scala/unit/kafka/log/LogCleanerIntegrationTest.scala b/core/src/test/scala/unit/kafka/log/LogCleanerIntegrationTest.scala
index 4f5ba5c..64e8b38 100755
--- a/core/src/test/scala/unit/kafka/log/LogCleanerIntegrationTest.scala
+++ b/core/src/test/scala/unit/kafka/log/LogCleanerIntegrationTest.scala
@@ -238,7 +238,7 @@ class LogCleanerIntegrationTest(compressionCodec: String) extends AbstractLogCle
       cleanerIoBufferSize = Some(1))
     val log = cleaner.logs.get(topicPartitions(0))
 
-    val appends = writeDups(numKeys = 100, numDups = 3, log = log, codec = codec)
+    writeDups(numKeys = 100, numDups = 3, log = log, codec = codec)
     val startSize = log.size
     cleaner.startup()
     assertEquals(1, cleaner.cleanerCount)
diff --git a/core/src/test/scala/unit/kafka/server/FetchSessionTest.scala b/core/src/test/scala/unit/kafka/server/FetchSessionTest.scala
index 3320b63..8264c1b 100755
--- a/core/src/test/scala/unit/kafka/server/FetchSessionTest.scala
+++ b/core/src/test/scala/unit/kafka/server/FetchSessionTest.scala
@@ -22,7 +22,7 @@ import java.util.Collections
 import kafka.utils.MockTime
 import org.apache.kafka.common.TopicPartition
 import org.apache.kafka.common.protocol.Errors
-import org.apache.kafka.common.requests.FetchMetadata.{FINAL_EPOCH, INITIAL_EPOCH, INVALID_SESSION_ID}
+import org.apache.kafka.common.requests.FetchMetadata.{FINAL_EPOCH, INVALID_SESSION_ID}
 import org.apache.kafka.common.requests.{FetchRequest, FetchResponse, FetchMetadata =>
JFetchMetadata}
 import org.junit.{Rule, Test}
 import org.junit.Assert._
@@ -201,7 +201,7 @@ class FetchSessionTest {
       context6.updateAndGenerateResponseData(respData2).error())
 
     // Close the incremental fetch session.
-    var prevSessionId = resp5.sessionId()
+    val prevSessionId = resp5.sessionId
     var nextSessionId = prevSessionId
     do {
       val reqData7 = new util.LinkedHashMap[TopicPartition, FetchRequest.PartitionData]
diff --git a/core/src/test/scala/unit/kafka/server/ServerGenerateClusterIdTest.scala b/core/src/test/scala/unit/kafka/server/ServerGenerateClusterIdTest.scala
index 0317da3..e00e6c1 100755
--- a/core/src/test/scala/unit/kafka/server/ServerGenerateClusterIdTest.scala
+++ b/core/src/test/scala/unit/kafka/server/ServerGenerateClusterIdTest.scala
@@ -19,7 +19,7 @@ package kafka.server
 import scala.concurrent._
 import ExecutionContext.Implicits._
 import scala.concurrent.duration._
-import kafka.utils.{TestUtils, ZkUtils}
+import kafka.utils.TestUtils
 import kafka.zk.ZooKeeperTestHarness
 import org.junit.Assert._
 import org.junit.{Before, After, Test}
diff --git a/core/src/test/scala/unit/kafka/server/ServerStartupTest.scala b/core/src/test/scala/unit/kafka/server/ServerStartupTest.scala
index 4c05d98..64647de 100755
--- a/core/src/test/scala/unit/kafka/server/ServerStartupTest.scala
+++ b/core/src/test/scala/unit/kafka/server/ServerStartupTest.scala
@@ -18,7 +18,7 @@
 package kafka.server
 
 import kafka.common.KafkaException
-import kafka.utils.{TestUtils, ZkUtils}
+import kafka.utils.TestUtils
 import kafka.zk.ZooKeeperTestHarness
 import org.apache.zookeeper.KeeperException.NodeExistsException
 import org.easymock.EasyMock

-- 
To stop receiving notification emails like this one, please contact
rsivaram@apache.org.

Mime
View raw message