kafka-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From j...@apache.org
Subject [3/3] kafka git commit: MINOR: Fix typos in documentation
Date Mon, 10 Oct 2016 22:58:24 GMT
MINOR: Fix typos in documentation

And improve readability by adding proper punctuations.

Author: Vahid Hashemian <vahidhashemian@us.ibm.com>

Reviewers: Jason Gustafson <jason@confluent.io>

Closes #2002 from vahidhashemian/doc/fix_typos


Project: http://git-wip-us.apache.org/repos/asf/kafka/repo
Commit: http://git-wip-us.apache.org/repos/asf/kafka/commit/e972d2af
Tree: http://git-wip-us.apache.org/repos/asf/kafka/tree/e972d2af
Diff: http://git-wip-us.apache.org/repos/asf/kafka/diff/e972d2af

Branch: refs/heads/trunk
Commit: e972d2afd31913acdef8979c6cac7cf48f99da7b
Parents: 06d6d98
Author: Vahid Hashemian <vahidhashemian@us.ibm.com>
Authored: Mon Oct 10 15:58:37 2016 -0700
Committer: Jason Gustafson <jason@confluent.io>
Committed: Mon Oct 10 15:58:37 2016 -0700

----------------------------------------------------------------------
 .../kafka/clients/producer/ProducerConfig.java  |   2 +-
 .../apache/kafka/common/config/SslConfigs.java  |   4 +-
 .../runtime/distributed/DistributedConfig.java  |   2 +-
 docs/api.html                                   |   2 +-
 docs/configuration.html                         |   2 +-
 docs/connect.html                               |   4 +-
 docs/design.html                                | 378 ++++++++++++++-----
 docs/implementation.html                        |   2 +-
 docs/introduction.html                          |  12 +-
 docs/ops.html                                   |   7 +-
 docs/quickstart.html                            |   8 +-
 docs/security.html                              |   6 +-
 docs/upgrade.html                               |   4 +-
 13 files changed, 308 insertions(+), 125 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kafka/blob/e972d2af/clients/src/main/java/org/apache/kafka/clients/producer/ProducerConfig.java
----------------------------------------------------------------------
diff --git a/clients/src/main/java/org/apache/kafka/clients/producer/ProducerConfig.java b/clients/src/main/java/org/apache/kafka/clients/producer/ProducerConfig.java
index 927229d..4b0e999 100644
--- a/clients/src/main/java/org/apache/kafka/clients/producer/ProducerConfig.java
+++ b/clients/src/main/java/org/apache/kafka/clients/producer/ProducerConfig.java
@@ -148,7 +148,7 @@ public class ProducerConfig extends AbstractConfig {
     private static final String BLOCK_ON_BUFFER_FULL_DOC = "When our memory buffer is exhausted
we must either stop accepting new records (block) or throw errors. "
                                                            + "By default this setting is
false and the producer will no longer throw a BufferExhaustException but instead will use
the <code>" + MAX_BLOCK_MS_CONFIG + "</code> "
                                                            + "value to block, after which
it will throw a TimeoutException. Setting this property to true will set the <code>"
+ MAX_BLOCK_MS_CONFIG + "</code> to Long.MAX_VALUE. "
-                                                           + "<em>Also if this property
is set to true, parameter <code>" + METADATA_FETCH_TIMEOUT_CONFIG + "</code> is
not longer honored.</em>"
+                                                           + "<em>Also if this property
is set to true, parameter <code>" + METADATA_FETCH_TIMEOUT_CONFIG + "</code> is
no longer honored.</em>"
                                                            + "<p>This parameter is
deprecated and will be removed in a future release. "
                                                            + "Parameter <code>" + MAX_BLOCK_MS_CONFIG
+ "</code> should be used instead.";
 

http://git-wip-us.apache.org/repos/asf/kafka/blob/e972d2af/clients/src/main/java/org/apache/kafka/common/config/SslConfigs.java
----------------------------------------------------------------------
diff --git a/clients/src/main/java/org/apache/kafka/common/config/SslConfigs.java b/clients/src/main/java/org/apache/kafka/common/config/SslConfigs.java
index ba1ff6b..7272591 100644
--- a/clients/src/main/java/org/apache/kafka/common/config/SslConfigs.java
+++ b/clients/src/main/java/org/apache/kafka/common/config/SslConfigs.java
@@ -38,7 +38,7 @@ public class SslConfigs {
     public static final String SSL_PROVIDER_DOC = "The name of the security provider used
for SSL connections. Default value is the default security provider of the JVM.";
 
     public static final String SSL_CIPHER_SUITES_CONFIG = "ssl.cipher.suites";
-    public static final String SSL_CIPHER_SUITES_DOC = "A list of cipher suites. This is
a named combination of authentication, encryption, MAC and key exchange algorithm used to
negotiate the security settings for a network connection using TLS or SSL network protocol."
+    public static final String SSL_CIPHER_SUITES_DOC = "A list of cipher suites. This is
a named combination of authentication, encryption, MAC and key exchange algorithm used to
negotiate the security settings for a network connection using TLS or SSL network protocol.
"
             + "By default all the available cipher suites are supported.";
 
     public static final String SSL_ENABLED_PROTOCOLS_CONFIG = "ssl.enabled.protocols";
@@ -55,7 +55,7 @@ public class SslConfigs {
         + "This is optional for client and can be used for two-way authentication for client.";
 
     public static final String SSL_KEYSTORE_PASSWORD_CONFIG = "ssl.keystore.password";
-    public static final String SSL_KEYSTORE_PASSWORD_DOC = "The store password for the key
store file."
+    public static final String SSL_KEYSTORE_PASSWORD_DOC = "The store password for the key
store file. "
         + "This is optional for client and only needed if ssl.keystore.location is configured.
";
 
     public static final String SSL_KEY_PASSWORD_CONFIG = "ssl.key.password";

http://git-wip-us.apache.org/repos/asf/kafka/blob/e972d2af/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/distributed/DistributedConfig.java
----------------------------------------------------------------------
diff --git a/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/distributed/DistributedConfig.java
b/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/distributed/DistributedConfig.java
index 6e9d7b4..1617d59 100644
--- a/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/distributed/DistributedConfig.java
+++ b/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/distributed/DistributedConfig.java
@@ -43,7 +43,7 @@ public class DistributedConfig extends WorkerConfig {
      * <code>session.timeout.ms</code>
      */
     public static final String SESSION_TIMEOUT_MS_CONFIG = "session.timeout.ms";
-    private static final String SESSION_TIMEOUT_MS_DOC = "The timeout used to detect worker
failures." +
+    private static final String SESSION_TIMEOUT_MS_DOC = "The timeout used to detect worker
failures. " +
             "The worker sends periodic heartbeats to indicate its liveness to the broker.
If no heartbeats are " +
             "received by the broker before the expiration of this session timeout, then the
broker will remove the " +
             "worker from the group and initiate a rebalance. Note that the value must be
in the allowable range as " +

http://git-wip-us.apache.org/repos/asf/kafka/blob/e972d2af/docs/api.html
----------------------------------------------------------------------
diff --git a/docs/api.html b/docs/api.html
index 699b000..686b265 100644
--- a/docs/api.html
+++ b/docs/api.html
@@ -89,6 +89,6 @@ Those who want to implement custom connectors can see the <a href="/0100/javadoc
 <h3><a id="legacyapis" href="#streamsapi">Legacy APIs</a></h3>
 
 <p>
-A more limited legacy producer and consumer api is also included in Kafka. These old Scala
APIs are deprecated and only still available for compatability purposes. Information on them
can be found here <a href="/081/documentation.html#producerapi"  title="Kafka 0.8.1 Docs">
+A more limited legacy producer and consumer api is also included in Kafka. These old Scala
APIs are deprecated and only still available for compatibility purposes. Information on them
can be found here <a href="/081/documentation.html#producerapi"  title="Kafka 0.8.1 Docs">
 here</a>.
 </p>

http://git-wip-us.apache.org/repos/asf/kafka/blob/e972d2af/docs/configuration.html
----------------------------------------------------------------------
diff --git a/docs/configuration.html b/docs/configuration.html
index 35f1475..53343fa 100644
--- a/docs/configuration.html
+++ b/docs/configuration.html
@@ -101,7 +101,7 @@ The essential old consumer configurations are the following:
       <td colspan="1"></td>
           <td>Specifies the ZooKeeper connection string in the form <code>hostname:port</code>
where host and port are the host and port of a ZooKeeper server. To allow connecting through
other ZooKeeper nodes when that ZooKeeper machine is down you can also specify multiple hosts
in the form <code>hostname1:port1,hostname2:port2,hostname3:port3</code>.
         <p>
-    The server may also have a ZooKeeper chroot path as part of it's ZooKeeper connection
string which puts its data under some path in the global ZooKeeper namespace. If so the consumer
should use the same chroot path in its connection string. For example to give a chroot path
of <code>/chroot/path</code> you would give the connection string as  <code>hostname1:port1,hostname2:port2,hostname3:port3/chroot/path</code>.</td>
+    The server may also have a ZooKeeper chroot path as part of its ZooKeeper connection
string which puts its data under some path in the global ZooKeeper namespace. If so the consumer
should use the same chroot path in its connection string. For example to give a chroot path
of <code>/chroot/path</code> you would give the connection string as  <code>hostname1:port1,hostname2:port2,hostname3:port3/chroot/path</code>.</td>
     </tr>
     <tr>
       <td>consumer.id</td>

http://git-wip-us.apache.org/repos/asf/kafka/blob/e972d2af/docs/connect.html
----------------------------------------------------------------------
diff --git a/docs/connect.html b/docs/connect.html
index d8431a8..d3fc7d7 100644
--- a/docs/connect.html
+++ b/docs/connect.html
@@ -101,11 +101,11 @@ For any other options, you should consult the documentation for the
connector.
 
 <h4><a id="connect_rest" href="#connect_rest">REST API</a></h4>
 
-Since Kafka Connect is intended to be run as a service, it also provides a REST API for managing
connectors. By default this service runs on port 8083. The following are the currently supported
endpoints:
+Since Kafka Connect is intended to be run as a service, it also provides a REST API for managing
connectors. By default, this service runs on port 8083. The following are the currently supported
endpoints:
 
 <ul>
     <li><code>GET /connectors</code> - return a list of active connectors</li>
-    <li><code>POST /connectors</code> - create a new connector; the request
body should be a JSON object containing a string <code>name</code> field and a
object <code>config</code> field with the connector configuration parameters</li>
+    <li><code>POST /connectors</code> - create a new connector; the request
body should be a JSON object containing a string <code>name</code> field and an
object <code>config</code> field with the connector configuration parameters</li>
     <li><code>GET /connectors/{name}</code> - get information about a specific
connector</li>
     <li><code>GET /connectors/{name}/config</code> - get the configuration
parameters for a specific connector</li>
     <li><code>PUT /connectors/{name}/config</code> - update the configuration
parameters for a specific connector</li>


Mime
View raw message