kafka-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From guozh...@apache.org
Subject kafka git commit: MINOR: Update docs for new version
Date Fri, 03 Nov 2017 15:53:35 GMT
Repository: kafka
Updated Branches:
  refs/heads/1.0 4b6fa9074 -> c3d8bcc2d


MINOR: Update docs for new version

1. Update the Streams hello world examples with the new API.
2. Update the version references in various places.
3. Update version templates to 1.1.x.

Author: Guozhang Wang <wangguoz@gmail.com>

Reviewers: Ismael Juma <ismael@juma.me.uk>, Damian Guy <damian.guy@gmail.com>,
Derrick Or <derrickor@gmail.com>

Closes #4169 from guozhangwang/KMINOR-streams-docs

(cherry picked from commit 487436b1a46b728904e543456c8bcc0d3ceea55a)
Signed-off-by: Guozhang Wang <wangguoz@gmail.com>

change template version from 11 to 10


Project: http://git-wip-us.apache.org/repos/asf/kafka/repo
Commit: http://git-wip-us.apache.org/repos/asf/kafka/commit/c3d8bcc2
Tree: http://git-wip-us.apache.org/repos/asf/kafka/tree/c3d8bcc2
Diff: http://git-wip-us.apache.org/repos/asf/kafka/diff/c3d8bcc2

Branch: refs/heads/1.0
Commit: c3d8bcc2df09f12f34255a139e6edd7eaa85c1a7
Parents: 4b6fa90
Author: Guozhang Wang <wangguoz@gmail.com>
Authored: Fri Nov 3 08:51:44 2017 -0700
Committer: Guozhang Wang <wangguoz@gmail.com>
Committed: Fri Nov 3 08:53:15 2017 -0700

----------------------------------------------------------------------
 docs/documentation.html |  8 ++++----
 docs/js/templateData.js |  4 ++--
 docs/streams/index.html | 47 +++++++++++++++++++++++++-------------------
 3 files changed, 33 insertions(+), 26 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/kafka/blob/c3d8bcc2/docs/documentation.html
----------------------------------------------------------------------
diff --git a/docs/documentation.html b/docs/documentation.html
index 7f297cc..8d83952 100644
--- a/docs/documentation.html
+++ b/docs/documentation.html
@@ -26,8 +26,8 @@
 	<div class="right">
 		<!--#include virtual="../includes/_docs_banner.htm" -->
     <h1>Documentation</h1>
-    <h3>Kafka 0.11.0 Documentation</h3>
-    Prior releases: <a href="/07/documentation.html">0.7.x</a>, <a href="/08/documentation.html">0.8.0</a>,
<a href="/081/documentation.html">0.8.1.X</a>, <a href="/082/documentation.html">0.8.2.X</a>,
<a href="/090/documentation.html">0.9.0.X</a>, <a href="/0100/documentation.html">0.10.0.X</a>,
<a href="/0101/documentation.html">0.10.1.X</a>, <a href="/0102/documentation.html">0.10.2.X</a>.
+    <h3>Kafka 1.0 Documentation</h3>
+    Prior releases: <a href="/07/documentation.html">0.7.x</a>, <a href="/08/documentation.html">0.8.0</a>,
<a href="/081/documentation.html">0.8.1.X</a>, <a href="/082/documentation.html">0.8.2.X</a>,
<a href="/090/documentation.html">0.9.0.X</a>, <a href="/0100/documentation.html">0.10.0.X</a>,
<a href="/0101/documentation.html">0.10.1.X</a>, <a href="/0102/documentation.html">0.10.2.X</a>,
<a href="/0110/documentation.html">0.11.0.X</a>.
 
     <!--#include virtual="toc.html" -->
 
@@ -69,7 +69,7 @@
     <h2><a id="connect" href="#connect">8. Kafka Connect</a></h2>
     <!--#include virtual="connect.html" -->
 
-    <h2><a id="streams" href="/0110/documentation/streams">9. Kafka Streams</a></h2>
+    <h2><a id="streams" href="/10/documentation/streams">9. Kafka Streams</a></h2>
     <p>
         Kafka Streams is a client library for processing and analyzing data stored in Kafka.
It builds upon important stream processing concepts such as properly distinguishing between
event time and processing time, windowing support, exactly-once processing semantics and simple
yet efficient management of application state.
     </p>
@@ -77,7 +77,7 @@
         Kafka Streams has a <b>low barrier to entry</b>: You can quickly write
and run a small-scale proof-of-concept on a single machine; and you only need to run additional
instances of your application on multiple machines to scale up to high-volume production workloads.
Kafka Streams transparently handles the load balancing of multiple instances of the same application
by leveraging Kafka's parallelism model.
     </p>
 
-    <p>Learn More about Kafka Streams read <a href="/0110/documentation/streams">this</a>
Section.</p>
+    <p>Learn More about Kafka Streams read <a href="/10/documentation/streams">this</a>
Section.</p>
 
 <!--#include virtual="../includes/_footer.htm" -->
 <!--#include virtual="../includes/_docs_footer.htm" -->

http://git-wip-us.apache.org/repos/asf/kafka/blob/c3d8bcc2/docs/js/templateData.js
----------------------------------------------------------------------
diff --git a/docs/js/templateData.js b/docs/js/templateData.js
index 4b57914..2238b08 100644
--- a/docs/js/templateData.js
+++ b/docs/js/templateData.js
@@ -17,8 +17,8 @@ limitations under the License.
 
 // Define variables for doc templates
 var context={
-    "version": "100",
+    "version": "10",
     "dotVersion": "1.0",
-    "fullDotVersion": "1.0.0",
+    "fullDotVersion": "1.0.1",
     "scalaVersion": "2.11"
 };

http://git-wip-us.apache.org/repos/asf/kafka/blob/c3d8bcc2/docs/streams/index.html
----------------------------------------------------------------------
diff --git a/docs/streams/index.html b/docs/streams/index.html
index c3a2762..ab72c87 100644
--- a/docs/streams/index.html
+++ b/docs/streams/index.html
@@ -152,10 +152,12 @@
                <pre class="brush: java;">
                    import org.apache.kafka.common.serialization.Serdes;
                    import org.apache.kafka.streams.KafkaStreams;
+                   import org.apache.kafka.streams.StreamsBuilder;
                    import org.apache.kafka.streams.StreamsConfig;
-                   import org.apache.kafka.streams.kstream.KStream;
-                   import org.apache.kafka.streams.kstream.KStreamBuilder;
-                   import org.apache.kafka.streams.kstream.KTable;
+                   import org.apache.kafka.streams.Topology;
+                   import org.apache.kafka.streams.kstream.Materialized;
+                   import org.apache.kafka.streams.kstream.Produced;
+                   import org.apache.kafka.streams.state.KeyValueStore;
        
                    import java.util.Arrays;
                    import java.util.Properties;
@@ -169,15 +171,15 @@
                            config.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
                            config.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass());
        
-                           KStreamBuilder builder = new KStreamBuilder();
+                           StreamsBuilder builder = new StreamsBuilder();
                            KStream&lt;String, String&gt; textLines = builder.stream("TextLinesTopic");
                            KTable&lt;String, Long&gt; wordCounts = textLines
                                .flatMapValues(textLine -> Arrays.asList(textLine.toLowerCase().split("\\W+")))
                                .groupBy((key, word) -> word)
-                               .count("Counts");
-                           wordCounts.to(Serdes.String(), Serdes.Long(), "WordsWithCountsTopic");
+                               .count(Materialized.&lt;String, Long, KeyValueStore&lt;Bytes,
byte[]&gt;&gt;as("counts-store"));
+                           wordCounts.toStream().to("WordsWithCountsTopic", Produced.with(Serdes.String(),
Serdes.Long()));
        
-                           KafkaStreams streams = new KafkaStreams(builder, config);
+                           KafkaStreams streams = new KafkaStreams(builder.build(), config);
                            streams.start();
                        }
        
@@ -189,13 +191,15 @@
                <pre class="brush: java;">
                    import org.apache.kafka.common.serialization.Serdes;
                    import org.apache.kafka.streams.KafkaStreams;
+                   import org.apache.kafka.streams.StreamsBuilder;
                    import org.apache.kafka.streams.StreamsConfig;
-                   import org.apache.kafka.streams.kstream.KStream;
-                   import org.apache.kafka.streams.kstream.KStreamBuilder;
-                   import org.apache.kafka.streams.kstream.KTable;
+                   import org.apache.kafka.streams.Topology;
                    import org.apache.kafka.streams.kstream.KeyValueMapper;
+                   import org.apache.kafka.streams.kstream.Materialized;
+                   import org.apache.kafka.streams.kstream.Produced;
                    import org.apache.kafka.streams.kstream.ValueMapper;
-       
+                   import org.apache.kafka.streams.state.KeyValueStore;
+
                    import java.util.Arrays;
                    import java.util.Properties;
        
@@ -208,7 +212,7 @@
                            config.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
                            config.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass());
        
-                           KStreamBuilder builder = new KStreamBuilder();
+                           StreamsBuilder builder = new StreamsBuilder();
                            KStream&lt;String, String&gt; textLines = builder.stream("TextLinesTopic");
                            KTable&lt;String, Long&gt; wordCounts = textLines
                                .flatMapValues(new ValueMapper&lt;String, Iterable&lt;String&gt;&gt;()
{
@@ -223,10 +227,12 @@
                                        return word;
                                    }
                                })
-                               .count("Counts");
-                           wordCounts.to(Serdes.String(), Serdes.Long(), "WordsWithCountsTopic");
+                               .count(Materialized.&lt;String, Long, KeyValueStore&lt;Bytes,
byte[]&gt;&gt;as("counts-store"));
+
+
+                           wordCounts.toStream().to("WordsWithCountsTopic", Produced.with(Serdes.String(),
Serdes.Long()));
        
-                           KafkaStreams streams = new KafkaStreams(builder, config);
+                           KafkaStreams streams = new KafkaStreams(builder.build(), config);
                            streams.start();
                        }
        
@@ -242,7 +248,8 @@
        
                    import org.apache.kafka.common.serialization._
                    import org.apache.kafka.streams._
-                   import org.apache.kafka.streams.kstream.{KStream, KStreamBuilder, KTable}
+                   import org.apache.kafka.streams.kstream.{KeyValueMapper, Materialized,
Produced, ValueMapper}
+                   import org.apache.kafka.streams.state.KeyValueStore;
        
                    import scala.collection.JavaConverters.asJavaIterableConverter
        
@@ -258,15 +265,15 @@
                                p
                            }
        
-                           val builder: KStreamBuilder = new KStreamBuilder()
+                           val builder: StreamsBuilder = new StreamsBuilder()
                            val textLines: KStream[String, String] = builder.stream("TextLinesTopic")
                            val wordCounts: KTable[String, Long] = textLines
                                .flatMapValues(textLine => textLine.toLowerCase.split("\\W+").toIterable.asJava)
                                .groupBy((_, word) => word)
-                               .count("Counts")
-                           wordCounts.to(Serdes.String(), Serdes.Long(), "WordsWithCountsTopic")
+                               .count(Materialized.as("counts-store").asInstanceOf[Materialized[String,
Long, KeyValueStore[Bytes, Array[Byte]]]])
+                           wordCounts.toStream().to("WordsWithCountsTopic", Produced.with(Serdes.String(),
Serdes.Long()))
        
-                           val streams: KafkaStreams = new KafkaStreams(builder, config)
+                           val streams: KafkaStreams = new KafkaStreams(builder.build(),
config)
                            streams.start()
        
                            Runtime.getRuntime.addShutdownHook(new Thread(() => {


Mime
View raw message