kafka-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From guozh...@apache.org
Subject kafka git commit: KAFKA-4603: Disallow abbreviations in OptionParser constructor
Date Tue, 30 May 2017 20:53:36 GMT
Repository: kafka
Updated Branches:
  refs/heads/trunk b3788d8dc -> f0745cd51


KAFKA-4603: Disallow abbreviations in OptionParser constructor

KAFKA-4603 the command parsed error
Using "new OptionParser" might result in parse error

Change all the OptionParser constructor in Kafka into "new OptionParser(false)"

Author: xinlihua <xin.lihua1@zte.com.cn>
Author: unknown <00067310@A23338408.zte.intra>
Author: auroraxlh <xin.lihua1@zte.com.cn>
Author: xin <xin.lihua1@zte.com.cn>

Reviewers: Damian Guy, Guozhang Wang

Closes #2349 from auroraxlh/fix_OptionParser_bug


Project: http://git-wip-us.apache.org/repos/asf/kafka/repo
Commit: http://git-wip-us.apache.org/repos/asf/kafka/commit/f0745cd5
Tree: http://git-wip-us.apache.org/repos/asf/kafka/tree/f0745cd5
Diff: http://git-wip-us.apache.org/repos/asf/kafka/diff/f0745cd5

Branch: refs/heads/trunk
Commit: f0745cd514a8af6ea22e1fddfef66f0e69ae8b1c
Parents: b3788d8
Author: xinlihua <xin.lihua1@zte.com.cn>
Authored: Tue May 30 13:53:32 2017 -0700
Committer: Guozhang Wang <wangguoz@gmail.com>
Committed: Tue May 30 13:53:32 2017 -0700

----------------------------------------------------------------------
 core/src/main/scala/kafka/Kafka.scala                            | 2 +-
 core/src/main/scala/kafka/admin/AclCommand.scala                 | 2 +-
 core/src/main/scala/kafka/admin/BrokerApiVersionsCommand.scala   | 2 +-
 core/src/main/scala/kafka/admin/ConfigCommand.scala              | 2 +-
 core/src/main/scala/kafka/admin/ConsumerGroupCommand.scala       | 2 +-
 core/src/main/scala/kafka/admin/DeleteRecordsCommand.scala       | 2 +-
 .../kafka/admin/PreferredReplicaLeaderElectionCommand.scala      | 2 +-
 core/src/main/scala/kafka/admin/ReassignPartitionsCommand.scala  | 2 +-
 core/src/main/scala/kafka/admin/TopicCommand.scala               | 2 +-
 core/src/main/scala/kafka/admin/ZkSecurityMigrator.scala         | 2 +-
 core/src/main/scala/kafka/tools/ConsoleConsumer.scala            | 2 +-
 core/src/main/scala/kafka/tools/ConsoleProducer.scala            | 2 +-
 core/src/main/scala/kafka/tools/ConsumerOffsetChecker.scala      | 2 +-
 core/src/main/scala/kafka/tools/DumpLogSegments.scala            | 2 +-
 core/src/main/scala/kafka/tools/ExportZkOffsets.scala            | 2 +-
 core/src/main/scala/kafka/tools/GetOffsetShell.scala             | 2 +-
 core/src/main/scala/kafka/tools/ImportZkOffsets.scala            | 2 +-
 core/src/main/scala/kafka/tools/JmxTool.scala                    | 2 +-
 core/src/main/scala/kafka/tools/MirrorMaker.scala                | 2 +-
 core/src/main/scala/kafka/tools/PerfConfig.scala                 | 2 +-
 core/src/main/scala/kafka/tools/ReplayLogProducer.scala          | 2 +-
 core/src/main/scala/kafka/tools/ReplicaVerificationTool.scala    | 2 +-
 core/src/main/scala/kafka/tools/SimpleConsumerShell.scala        | 2 +-
 core/src/main/scala/kafka/tools/StateChangeLogMerger.scala       | 2 +-
 core/src/main/scala/kafka/tools/StreamsResetter.java             | 3 ++-
 core/src/main/scala/kafka/tools/VerifyConsumerRebalance.scala    | 2 +-
 core/src/test/scala/kafka/tools/TestLogCleaning.scala            | 2 +-
 core/src/test/scala/other/kafka/TestLinearWriteSpeed.scala       | 2 +-
 core/src/test/scala/other/kafka/TestOffsetManager.scala          | 2 +-
 core/src/test/scala/other/kafka/TestPurgatoryPerformance.scala   | 2 +-
 docs/security.html                                               | 4 ++--
 .../apache/kafka/streams/integration/ResetIntegrationTest.java   | 4 ++--
 32 files changed, 35 insertions(+), 34 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kafka/blob/f0745cd5/core/src/main/scala/kafka/Kafka.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/kafka/Kafka.scala b/core/src/main/scala/kafka/Kafka.scala
index b43a2b7..1c16c96 100755
--- a/core/src/main/scala/kafka/Kafka.scala
+++ b/core/src/main/scala/kafka/Kafka.scala
@@ -29,7 +29,7 @@ import scala.collection.JavaConverters._
 object Kafka extends Logging {
 
   def getPropsFromArgs(args: Array[String]): Properties = {
-    val optionParser = new OptionParser
+    val optionParser = new OptionParser(false)
     val overrideOpt = optionParser.accepts("override", "Optional property that should override
values set in server.properties file")
       .withRequiredArg()
       .ofType(classOf[String])

http://git-wip-us.apache.org/repos/asf/kafka/blob/f0745cd5/core/src/main/scala/kafka/admin/AclCommand.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/kafka/admin/AclCommand.scala b/core/src/main/scala/kafka/admin/AclCommand.scala
index 47659a9..8cbd8a6 100644
--- a/core/src/main/scala/kafka/admin/AclCommand.scala
+++ b/core/src/main/scala/kafka/admin/AclCommand.scala
@@ -268,7 +268,7 @@ object AclCommand {
   }
 
   class AclCommandOptions(args: Array[String]) {
-    val parser = new OptionParser
+    val parser = new OptionParser(false)
     val authorizerOpt = parser.accepts("authorizer", "Fully qualified class name of the authorizer,
defaults to kafka.security.auth.SimpleAclAuthorizer.")
       .withRequiredArg
       .describedAs("authorizer")

http://git-wip-us.apache.org/repos/asf/kafka/blob/f0745cd5/core/src/main/scala/kafka/admin/BrokerApiVersionsCommand.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/kafka/admin/BrokerApiVersionsCommand.scala b/core/src/main/scala/kafka/admin/BrokerApiVersionsCommand.scala
index b87c856..4aea3c0 100644
--- a/core/src/main/scala/kafka/admin/BrokerApiVersionsCommand.scala
+++ b/core/src/main/scala/kafka/admin/BrokerApiVersionsCommand.scala
@@ -64,7 +64,7 @@ object BrokerApiVersionsCommand {
     val BootstrapServerDoc = "REQUIRED: The server to connect to."
     val CommandConfigDoc = "A property file containing configs to be passed to Admin Client."
 
-    val parser = new OptionParser
+    val parser = new OptionParser(false)
     val commandConfigOpt = parser.accepts("command-config", CommandConfigDoc)
                                  .withRequiredArg
                                  .describedAs("command config property file")

http://git-wip-us.apache.org/repos/asf/kafka/blob/f0745cd5/core/src/main/scala/kafka/admin/ConfigCommand.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/kafka/admin/ConfigCommand.scala b/core/src/main/scala/kafka/admin/ConfigCommand.scala
index f74d31d..b18dcc9 100644
--- a/core/src/main/scala/kafka/admin/ConfigCommand.scala
+++ b/core/src/main/scala/kafka/admin/ConfigCommand.scala
@@ -278,7 +278,7 @@ object ConfigCommand extends Config {
   }
 
   class ConfigCommandOptions(args: Array[String]) {
-    val parser = new OptionParser
+    val parser = new OptionParser(false)
     val zkConnectOpt = parser.accepts("zookeeper", "REQUIRED: The connection string for the
zookeeper connection in the form host:port. " +
             "Multiple URLS can be given to allow fail-over.")
             .withRequiredArg

http://git-wip-us.apache.org/repos/asf/kafka/blob/f0745cd5/core/src/main/scala/kafka/admin/ConsumerGroupCommand.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/kafka/admin/ConsumerGroupCommand.scala b/core/src/main/scala/kafka/admin/ConsumerGroupCommand.scala
index 7715a95..fb589a2 100755
--- a/core/src/main/scala/kafka/admin/ConsumerGroupCommand.scala
+++ b/core/src/main/scala/kafka/admin/ConsumerGroupCommand.scala
@@ -713,7 +713,7 @@ object ConsumerGroupCommand extends Logging {
     val ResetToLatestDoc = "Reset offsets to latest offset."
     val ResetShiftByDoc = "Reset offsets shifting current offset by 'n', where 'n' can be
positive or negative"
 
-    val parser = new OptionParser
+    val parser = new OptionParser(false)
     val zkConnectOpt = parser.accepts("zookeeper", ZkConnectDoc)
                              .withRequiredArg
                              .describedAs("urls")

http://git-wip-us.apache.org/repos/asf/kafka/blob/f0745cd5/core/src/main/scala/kafka/admin/DeleteRecordsCommand.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/kafka/admin/DeleteRecordsCommand.scala b/core/src/main/scala/kafka/admin/DeleteRecordsCommand.scala
index b85a6ff..71dae8a 100644
--- a/core/src/main/scala/kafka/admin/DeleteRecordsCommand.scala
+++ b/core/src/main/scala/kafka/admin/DeleteRecordsCommand.scala
@@ -97,7 +97,7 @@ object DeleteRecordsCommand {
                                  "{\"partitions\":\n  [{\"topic\": \"foo\", \"partition\":
1, \"offset\": 1}],\n \"version\":1\n}"
     val CommandConfigDoc = "A property file containing configs to be passed to Admin Client."
 
-    val parser = new OptionParser
+    val parser = new OptionParser(false)
     val bootstrapServerOpt = parser.accepts("bootstrap-server", BootstrapServerDoc)
                                    .withRequiredArg
                                    .describedAs("server(s) to use for bootstrapping")

http://git-wip-us.apache.org/repos/asf/kafka/blob/f0745cd5/core/src/main/scala/kafka/admin/PreferredReplicaLeaderElectionCommand.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/kafka/admin/PreferredReplicaLeaderElectionCommand.scala b/core/src/main/scala/kafka/admin/PreferredReplicaLeaderElectionCommand.scala
index 2078774..f45c81a 100755
--- a/core/src/main/scala/kafka/admin/PreferredReplicaLeaderElectionCommand.scala
+++ b/core/src/main/scala/kafka/admin/PreferredReplicaLeaderElectionCommand.scala
@@ -28,7 +28,7 @@ import org.apache.kafka.common.security.JaasUtils
 object PreferredReplicaLeaderElectionCommand extends Logging {
 
   def main(args: Array[String]): Unit = {
-    val parser = new OptionParser
+    val parser = new OptionParser(false)
     val jsonFileOpt = parser.accepts("path-to-json-file", "The JSON file with the list of
partitions " +
       "for which preferred replica leader election should be done, in the following format
- \n" +
        "{\"partitions\":\n\t[{\"topic\": \"foo\", \"partition\": 1},\n\t {\"topic\": \"foobar\",
\"partition\": 2}]\n}\n" +

http://git-wip-us.apache.org/repos/asf/kafka/blob/f0745cd5/core/src/main/scala/kafka/admin/ReassignPartitionsCommand.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/kafka/admin/ReassignPartitionsCommand.scala b/core/src/main/scala/kafka/admin/ReassignPartitionsCommand.scala
index 3addb77..cae14b1 100755
--- a/core/src/main/scala/kafka/admin/ReassignPartitionsCommand.scala
+++ b/core/src/main/scala/kafka/admin/ReassignPartitionsCommand.scala
@@ -279,7 +279,7 @@ object ReassignPartitionsCommand extends Logging {
   }
 
   class ReassignPartitionsCommandOptions(args: Array[String]) {
-    val parser = new OptionParser
+    val parser = new OptionParser(false)
 
     val zkConnectOpt = parser.accepts("zookeeper", "REQUIRED: The connection string for the
zookeeper connection in the " +
                       "form host:port. Multiple URLS can be given to allow fail-over.")

http://git-wip-us.apache.org/repos/asf/kafka/blob/f0745cd5/core/src/main/scala/kafka/admin/TopicCommand.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/kafka/admin/TopicCommand.scala b/core/src/main/scala/kafka/admin/TopicCommand.scala
index 942d70e..efb9237 100755
--- a/core/src/main/scala/kafka/admin/TopicCommand.scala
+++ b/core/src/main/scala/kafka/admin/TopicCommand.scala
@@ -272,7 +272,7 @@ object TopicCommand extends Logging {
   }
 
   class TopicCommandOptions(args: Array[String]) {
-    val parser = new OptionParser
+    val parser = new OptionParser(false)
     val zkConnectOpt = parser.accepts("zookeeper", "REQUIRED: The connection string for the
zookeeper connection in the form host:port. " +
                                       "Multiple URLS can be given to allow fail-over.")
                            .withRequiredArg

http://git-wip-us.apache.org/repos/asf/kafka/blob/f0745cd5/core/src/main/scala/kafka/admin/ZkSecurityMigrator.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/kafka/admin/ZkSecurityMigrator.scala b/core/src/main/scala/kafka/admin/ZkSecurityMigrator.scala
index 172233b..71153d1 100644
--- a/core/src/main/scala/kafka/admin/ZkSecurityMigrator.scala
+++ b/core/src/main/scala/kafka/admin/ZkSecurityMigrator.scala
@@ -65,7 +65,7 @@ object ZkSecurityMigrator extends Logging {
 
   def run(args: Array[String]) {
     var jaasFile = System.getProperty(JaasUtils.JAVA_LOGIN_CONFIG_PARAM)
-    val parser = new OptionParser
+    val parser = new OptionParser(false)
     val zkAclOpt = parser.accepts("zookeeper.acl", "Indicates whether to make the Kafka znodes
in ZooKeeper secure or unsecure."
         + " The options are 'secure' and 'unsecure'").withRequiredArg().ofType(classOf[String])
     val zkUrlOpt = parser.accepts("zookeeper.connect", "Sets the ZooKeeper connect string
(ensemble). This parameter " +

http://git-wip-us.apache.org/repos/asf/kafka/blob/f0745cd5/core/src/main/scala/kafka/tools/ConsoleConsumer.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/kafka/tools/ConsoleConsumer.scala b/core/src/main/scala/kafka/tools/ConsoleConsumer.scala
index 8a41386..193a344 100755
--- a/core/src/main/scala/kafka/tools/ConsoleConsumer.scala
+++ b/core/src/main/scala/kafka/tools/ConsoleConsumer.scala
@@ -205,7 +205,7 @@ object ConsoleConsumer extends Logging {
   }
 
   class ConsumerConfig(args: Array[String]) {
-    val parser = new OptionParser
+    val parser = new OptionParser(false)
     val topicIdOpt = parser.accepts("topic", "The topic id to consume on.")
       .withRequiredArg
       .describedAs("topic")

http://git-wip-us.apache.org/repos/asf/kafka/blob/f0745cd5/core/src/main/scala/kafka/tools/ConsoleProducer.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/kafka/tools/ConsoleProducer.scala b/core/src/main/scala/kafka/tools/ConsoleProducer.scala
index ffb3458..1b22140 100644
--- a/core/src/main/scala/kafka/tools/ConsoleProducer.scala
+++ b/core/src/main/scala/kafka/tools/ConsoleProducer.scala
@@ -133,7 +133,7 @@ object ConsoleProducer {
   }
 
   class ProducerConfig(args: Array[String]) {
-    val parser = new OptionParser
+    val parser = new OptionParser(false)
     val topicOpt = parser.accepts("topic", "REQUIRED: The topic id to produce messages to.")
       .withRequiredArg
       .describedAs("topic")

http://git-wip-us.apache.org/repos/asf/kafka/blob/f0745cd5/core/src/main/scala/kafka/tools/ConsumerOffsetChecker.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/kafka/tools/ConsumerOffsetChecker.scala b/core/src/main/scala/kafka/tools/ConsumerOffsetChecker.scala
index bcf2b58..35d7ba4 100644
--- a/core/src/main/scala/kafka/tools/ConsumerOffsetChecker.scala
+++ b/core/src/main/scala/kafka/tools/ConsumerOffsetChecker.scala
@@ -102,7 +102,7 @@ object ConsumerOffsetChecker extends Logging {
   def main(args: Array[String]) {
     warn("WARNING: ConsumerOffsetChecker is deprecated and will be dropped in releases following
0.9.0. Use ConsumerGroupCommand instead.")
 
-    val parser = new OptionParser()
+    val parser = new OptionParser(false)
 
     val zkConnectOpt = parser.accepts("zookeeper", "ZooKeeper connect string.").
             withRequiredArg().defaultsTo("localhost:2181").ofType(classOf[String])

http://git-wip-us.apache.org/repos/asf/kafka/blob/f0745cd5/core/src/main/scala/kafka/tools/DumpLogSegments.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/kafka/tools/DumpLogSegments.scala b/core/src/main/scala/kafka/tools/DumpLogSegments.scala
index 4de546f..4b38c27 100755
--- a/core/src/main/scala/kafka/tools/DumpLogSegments.scala
+++ b/core/src/main/scala/kafka/tools/DumpLogSegments.scala
@@ -37,7 +37,7 @@ import scala.collection.JavaConverters._
 object DumpLogSegments {
 
   def main(args: Array[String]) {
-    val parser = new OptionParser
+    val parser = new OptionParser(false)
     val printOpt = parser.accepts("print-data-log", "if set, printing the messages content
when dumping data logs. Automatically set if any decoder option is specified.")
     val verifyOpt = parser.accepts("verify-index-only", "if set, just verify the index log
without printing its content.")
     val indexSanityOpt = parser.accepts("index-sanity-check", "if set, just checks the index
sanity without printing its content. " +

http://git-wip-us.apache.org/repos/asf/kafka/blob/f0745cd5/core/src/main/scala/kafka/tools/ExportZkOffsets.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/kafka/tools/ExportZkOffsets.scala b/core/src/main/scala/kafka/tools/ExportZkOffsets.scala
index eeae270..3de530f 100644
--- a/core/src/main/scala/kafka/tools/ExportZkOffsets.scala
+++ b/core/src/main/scala/kafka/tools/ExportZkOffsets.scala
@@ -46,7 +46,7 @@ import scala.collection.JavaConverters._
 object ExportZkOffsets extends Logging {
 
   def main(args: Array[String]) {
-    val parser = new OptionParser
+    val parser = new OptionParser(false)
 
     val zkConnectOpt = parser.accepts("zkconnect", "ZooKeeper connect string.")
                             .withRequiredArg()

http://git-wip-us.apache.org/repos/asf/kafka/blob/f0745cd5/core/src/main/scala/kafka/tools/GetOffsetShell.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/kafka/tools/GetOffsetShell.scala b/core/src/main/scala/kafka/tools/GetOffsetShell.scala
index 9a19b1f..f06c412 100644
--- a/core/src/main/scala/kafka/tools/GetOffsetShell.scala
+++ b/core/src/main/scala/kafka/tools/GetOffsetShell.scala
@@ -29,7 +29,7 @@ import kafka.utils.{CommandLineUtils, Exit, ToolsUtils}
 object GetOffsetShell {
 
   def main(args: Array[String]): Unit = {
-    val parser = new OptionParser
+    val parser = new OptionParser(false)
     val brokerListOpt = parser.accepts("broker-list", "REQUIRED: The list of hostname and
port of the server to connect to.")
                            .withRequiredArg
                            .describedAs("hostname:port,...,hostname:port")

http://git-wip-us.apache.org/repos/asf/kafka/blob/f0745cd5/core/src/main/scala/kafka/tools/ImportZkOffsets.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/kafka/tools/ImportZkOffsets.scala b/core/src/main/scala/kafka/tools/ImportZkOffsets.scala
index 77d6bc1..d96569b 100644
--- a/core/src/main/scala/kafka/tools/ImportZkOffsets.scala
+++ b/core/src/main/scala/kafka/tools/ImportZkOffsets.scala
@@ -44,7 +44,7 @@ import org.apache.kafka.common.security.JaasUtils
 object ImportZkOffsets extends Logging {
 
   def main(args: Array[String]) {
-    val parser = new OptionParser
+    val parser = new OptionParser(false)
     
     val zkConnectOpt = parser.accepts("zkconnect", "ZooKeeper connect string.")
                             .withRequiredArg()

http://git-wip-us.apache.org/repos/asf/kafka/blob/f0745cd5/core/src/main/scala/kafka/tools/JmxTool.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/kafka/tools/JmxTool.scala b/core/src/main/scala/kafka/tools/JmxTool.scala
index e980084..6e8a094 100644
--- a/core/src/main/scala/kafka/tools/JmxTool.scala
+++ b/core/src/main/scala/kafka/tools/JmxTool.scala
@@ -41,7 +41,7 @@ object JmxTool extends Logging {
 
   def main(args: Array[String]) {
     // Parse command line
-    val parser = new OptionParser
+    val parser = new OptionParser(false)
     val objectNameOpt =
       parser.accepts("object-name", "A JMX object name to use as a query. This can contain
wild cards, and this option " +
         "can be given multiple times to specify more than one query. If no objects are specified
" +

http://git-wip-us.apache.org/repos/asf/kafka/blob/f0745cd5/core/src/main/scala/kafka/tools/MirrorMaker.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/kafka/tools/MirrorMaker.scala b/core/src/main/scala/kafka/tools/MirrorMaker.scala
index d61b355..e8207ea 100755
--- a/core/src/main/scala/kafka/tools/MirrorMaker.scala
+++ b/core/src/main/scala/kafka/tools/MirrorMaker.scala
@@ -87,7 +87,7 @@ object MirrorMaker extends Logging with KafkaMetricsGroup {
 
     info("Starting mirror maker")
     try {
-      val parser = new OptionParser
+      val parser = new OptionParser(false)
 
       val consumerConfigOpt = parser.accepts("consumer.config",
         "Embedded consumer config for consuming from the source cluster.")

http://git-wip-us.apache.org/repos/asf/kafka/blob/f0745cd5/core/src/main/scala/kafka/tools/PerfConfig.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/kafka/tools/PerfConfig.scala b/core/src/main/scala/kafka/tools/PerfConfig.scala
index 26704c2..a285a1c 100644
--- a/core/src/main/scala/kafka/tools/PerfConfig.scala
+++ b/core/src/main/scala/kafka/tools/PerfConfig.scala
@@ -21,7 +21,7 @@ import joptsimple.OptionParser
 
 
 class PerfConfig(args: Array[String]) {
-  val parser = new OptionParser
+  val parser = new OptionParser(false)
   val numMessagesOpt = parser.accepts("messages", "REQUIRED: The number of messages to send
or consume")
     .withRequiredArg
     .describedAs("count")

http://git-wip-us.apache.org/repos/asf/kafka/blob/f0745cd5/core/src/main/scala/kafka/tools/ReplayLogProducer.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/kafka/tools/ReplayLogProducer.scala b/core/src/main/scala/kafka/tools/ReplayLogProducer.scala
index 049f129..5d4cc23 100644
--- a/core/src/main/scala/kafka/tools/ReplayLogProducer.scala
+++ b/core/src/main/scala/kafka/tools/ReplayLogProducer.scala
@@ -61,7 +61,7 @@ object ReplayLogProducer extends Logging {
   }
 
   class Config(args: Array[String]) {
-    val parser = new OptionParser
+    val parser = new OptionParser(false)
     val zkConnectOpt = parser.accepts("zookeeper", "REQUIRED: The connection string for the
zookeeper connection in the form host:port. " +
       "Multiple URLS can be given to allow fail-over.")
       .withRequiredArg

http://git-wip-us.apache.org/repos/asf/kafka/blob/f0745cd5/core/src/main/scala/kafka/tools/ReplicaVerificationTool.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/kafka/tools/ReplicaVerificationTool.scala b/core/src/main/scala/kafka/tools/ReplicaVerificationTool.scala
index e765f41..71f3368 100644
--- a/core/src/main/scala/kafka/tools/ReplicaVerificationTool.scala
+++ b/core/src/main/scala/kafka/tools/ReplicaVerificationTool.scala
@@ -65,7 +65,7 @@ object ReplicaVerificationTool extends Logging {
   }
 
   def main(args: Array[String]): Unit = {
-    val parser = new OptionParser
+    val parser = new OptionParser(false)
     val brokerListOpt = parser.accepts("broker-list", "REQUIRED: The list of hostname and
port of the server to connect to.")
                          .withRequiredArg
                          .describedAs("hostname:port,...,hostname:port")

http://git-wip-us.apache.org/repos/asf/kafka/blob/f0745cd5/core/src/main/scala/kafka/tools/SimpleConsumerShell.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/kafka/tools/SimpleConsumerShell.scala b/core/src/main/scala/kafka/tools/SimpleConsumerShell.scala
index c098bbd..098826c 100755
--- a/core/src/main/scala/kafka/tools/SimpleConsumerShell.scala
+++ b/core/src/main/scala/kafka/tools/SimpleConsumerShell.scala
@@ -38,7 +38,7 @@ object SimpleConsumerShell extends Logging {
 
   def main(args: Array[String]): Unit = {
 
-    val parser = new OptionParser
+    val parser = new OptionParser(false)
     val brokerListOpt = parser.accepts("broker-list", "REQUIRED: The list of hostname and
port of the server to connect to.")
                            .withRequiredArg
                            .describedAs("hostname:port,...,hostname:port")

http://git-wip-us.apache.org/repos/asf/kafka/blob/f0745cd5/core/src/main/scala/kafka/tools/StateChangeLogMerger.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/kafka/tools/StateChangeLogMerger.scala b/core/src/main/scala/kafka/tools/StateChangeLogMerger.scala
index b4b3722..f2b929a 100755
--- a/core/src/main/scala/kafka/tools/StateChangeLogMerger.scala
+++ b/core/src/main/scala/kafka/tools/StateChangeLogMerger.scala
@@ -60,7 +60,7 @@ object StateChangeLogMerger extends Logging {
   def main(args: Array[String]) {
 
     // Parse input arguments.
-    val parser = new OptionParser
+    val parser = new OptionParser(false)
     val filesOpt = parser.accepts("logs", "Comma separated list of state change logs or a
regex for the log file names")
                               .withRequiredArg
                               .describedAs("file1,file2,...")

http://git-wip-us.apache.org/repos/asf/kafka/blob/f0745cd5/core/src/main/scala/kafka/tools/StreamsResetter.java
----------------------------------------------------------------------
diff --git a/core/src/main/scala/kafka/tools/StreamsResetter.java b/core/src/main/scala/kafka/tools/StreamsResetter.java
index 4be5f37..d2c5e14 100644
--- a/core/src/main/scala/kafka/tools/StreamsResetter.java
+++ b/core/src/main/scala/kafka/tools/StreamsResetter.java
@@ -137,7 +137,8 @@ public class StreamsResetter {
     }
 
     private void parseArguments(final String[] args) throws IOException {
-        final OptionParser optionParser = new OptionParser();
+
+        final OptionParser optionParser = new OptionParser(false);
         applicationIdOption = optionParser.accepts("application-id", "The Kafka Streams application
ID (application.id).")
             .withRequiredArg()
             .ofType(String.class)

http://git-wip-us.apache.org/repos/asf/kafka/blob/f0745cd5/core/src/main/scala/kafka/tools/VerifyConsumerRebalance.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/kafka/tools/VerifyConsumerRebalance.scala b/core/src/main/scala/kafka/tools/VerifyConsumerRebalance.scala
index 0e5d518..b80b7da 100644
--- a/core/src/main/scala/kafka/tools/VerifyConsumerRebalance.scala
+++ b/core/src/main/scala/kafka/tools/VerifyConsumerRebalance.scala
@@ -23,7 +23,7 @@ import kafka.utils.{CommandLineUtils, Exit, Logging, ZKGroupTopicDirs, ZkUtils}
 
 object VerifyConsumerRebalance extends Logging {
   def main(args: Array[String]) {
-    val parser = new OptionParser()
+    val parser = new OptionParser(false)
 
     val zkConnectOpt = parser.accepts("zookeeper.connect", "ZooKeeper connect string.").
       withRequiredArg().defaultsTo("localhost:2181").ofType(classOf[String])

http://git-wip-us.apache.org/repos/asf/kafka/blob/f0745cd5/core/src/test/scala/kafka/tools/TestLogCleaning.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/kafka/tools/TestLogCleaning.scala b/core/src/test/scala/kafka/tools/TestLogCleaning.scala
index ca03ba8..0725601 100755
--- a/core/src/test/scala/kafka/tools/TestLogCleaning.scala
+++ b/core/src/test/scala/kafka/tools/TestLogCleaning.scala
@@ -49,7 +49,7 @@ import scala.collection.JavaConverters._
 object TestLogCleaning {
 
   def main(args: Array[String]) {
-    val parser = new OptionParser
+    val parser = new OptionParser(false)
     val numMessagesOpt = parser.accepts("messages", "The number of messages to send or consume.")
                                .withRequiredArg
                                .describedAs("count")

http://git-wip-us.apache.org/repos/asf/kafka/blob/f0745cd5/core/src/test/scala/other/kafka/TestLinearWriteSpeed.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/other/kafka/TestLinearWriteSpeed.scala b/core/src/test/scala/other/kafka/TestLinearWriteSpeed.scala
index 658e3a0..bd66d25 100755
--- a/core/src/test/scala/other/kafka/TestLinearWriteSpeed.scala
+++ b/core/src/test/scala/other/kafka/TestLinearWriteSpeed.scala
@@ -38,7 +38,7 @@ import scala.math._
 object TestLinearWriteSpeed {
 
   def main(args: Array[String]): Unit = {
-    val parser = new OptionParser
+    val parser = new OptionParser(false)
     val dirOpt = parser.accepts("dir", "The directory to write to.")
                            .withRequiredArg
                            .describedAs("path")

http://git-wip-us.apache.org/repos/asf/kafka/blob/f0745cd5/core/src/test/scala/other/kafka/TestOffsetManager.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/other/kafka/TestOffsetManager.scala b/core/src/test/scala/other/kafka/TestOffsetManager.scala
index d908175..69531d4 100644
--- a/core/src/test/scala/other/kafka/TestOffsetManager.scala
+++ b/core/src/test/scala/other/kafka/TestOffsetManager.scala
@@ -197,7 +197,7 @@ object TestOffsetManager {
   }
 
   def main(args: Array[String]) {
-    val parser = new OptionParser
+    val parser = new OptionParser(false)
     val zookeeperOpt = parser.accepts("zookeeper", "The ZooKeeper connection URL.")
       .withRequiredArg
       .describedAs("ZooKeeper URL")

http://git-wip-us.apache.org/repos/asf/kafka/blob/f0745cd5/core/src/test/scala/other/kafka/TestPurgatoryPerformance.scala
----------------------------------------------------------------------
diff --git a/core/src/test/scala/other/kafka/TestPurgatoryPerformance.scala b/core/src/test/scala/other/kafka/TestPurgatoryPerformance.scala
index 1e41c31..4b540a4 100644
--- a/core/src/test/scala/other/kafka/TestPurgatoryPerformance.scala
+++ b/core/src/test/scala/other/kafka/TestPurgatoryPerformance.scala
@@ -36,7 +36,7 @@ import scala.collection.JavaConverters._
 object TestPurgatoryPerformance {
 
   def main(args: Array[String]): Unit = {
-    val parser = new OptionParser
+    val parser = new OptionParser(false)
     val keySpaceSizeOpt = parser.accepts("key-space-size", "The total number of possible
keys")
       .withRequiredArg
       .describedAs("total_num_possible_keys")

http://git-wip-us.apache.org/repos/asf/kafka/blob/f0745cd5/docs/security.html
----------------------------------------------------------------------
diff --git a/docs/security.html b/docs/security.html
index 99f692a..b2d4759 100644
--- a/docs/security.html
+++ b/docs/security.html
@@ -908,11 +908,11 @@
     </ol>
     Here is an example of how to run the migration tool:
     <pre>
-    ./bin/zookeeper-security-migration --zookeeper.acl=secure --zookeeper.connect=localhost:2181
+    ./bin/zookeeper-security-migration.sh --zookeeper.acl=secure --zookeeper.connect=localhost:2181
     </pre>
     <p>Run this to see the full list of parameters:</p>
     <pre>
-    ./bin/zookeeper-security-migration --help
+    ./bin/zookeeper-security-migration.sh --help
     </pre>
     <h4><a id="zk_authz_ensemble" href="#zk_authz_ensemble">7.6.3 Migrating the
ZooKeeper ensemble</a></h4>
     It is also necessary to enable authentication on the ZooKeeper ensemble. To do it, we
need to perform a rolling restart of the server and set a few properties. Please refer to
the ZooKeeper documentation for more detail:

http://git-wip-us.apache.org/repos/asf/kafka/blob/f0745cd5/streams/src/test/java/org/apache/kafka/streams/integration/ResetIntegrationTest.java
----------------------------------------------------------------------
diff --git a/streams/src/test/java/org/apache/kafka/streams/integration/ResetIntegrationTest.java
b/streams/src/test/java/org/apache/kafka/streams/integration/ResetIntegrationTest.java
index 47a679e..3cff7f7 100644
--- a/streams/src/test/java/org/apache/kafka/streams/integration/ResetIntegrationTest.java
+++ b/streams/src/test/java/org/apache/kafka/streams/integration/ResetIntegrationTest.java
@@ -340,7 +340,7 @@ public class ResetIntegrationTest {
         if (intermediateUserTopic != null) {
             parameters = new String[]{
                 "--application-id", APP_ID + testNo,
-                "--bootstrap-server", CLUSTER.bootstrapServers(),
+                "--bootstrap-servers", CLUSTER.bootstrapServers(),
                 "--zookeeper", CLUSTER.zKConnectString(),
                 "--input-topics", INPUT_TOPIC,
                 "--intermediate-topics", INTERMEDIATE_USER_TOPIC
@@ -348,7 +348,7 @@ public class ResetIntegrationTest {
         } else {
             parameters = new String[]{
                 "--application-id", APP_ID + testNo,
-                "--bootstrap-server", CLUSTER.bootstrapServers(),
+                "--bootstrap-servers", CLUSTER.bootstrapServers(),
                 "--zookeeper", CLUSTER.zKConnectString(),
                 "--input-topics", INPUT_TOPIC
             };


Mime
View raw message