This is an automated email from the ASF dual-hosted git repository.
guozhang pushed a commit to branch 2.0
in repository https://gitbox.apache.org/repos/asf/kafka.git
The following commit(s) were added to refs/heads/2.0 by this push:
new 38fd13d MINOR: standby task test throughput too low 2.0 (#6062)
38fd13d is described below
commit 38fd13d9c08778bf3e9a950c0062ef8e41c5282f
Author: Bill Bejeck <bbejeck@gmail.com>
AuthorDate: Sat Dec 22 01:41:28 2018 -0500
MINOR: standby task test throughput too low 2.0 (#6062)
Previous PR #6043 reduced throughput for VerifiableProducer in base class, but the streams_standby_replica_test
needs higher throughput for consumer to complete verification in 60 seconds. Same update as
#6060 and #6061
Reviewers: Guozhang Wang <wangguoz@gmail.com>
---
tests/kafkatest/tests/streams/base_streams_test.py | 4 ++--
tests/kafkatest/tests/streams/streams_standby_replica_test.py | 2 +-
2 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/tests/kafkatest/tests/streams/base_streams_test.py b/tests/kafkatest/tests/streams/base_streams_test.py
index 6e005dd..9a9704e 100644
--- a/tests/kafkatest/tests/streams/base_streams_test.py
+++ b/tests/kafkatest/tests/streams/base_streams_test.py
@@ -38,14 +38,14 @@ class BaseStreamsTest(KafkaTest):
client_id,
max_messages=num_messages)
- def get_producer(self, topic, num_messages, repeating_keys=None):
+ def get_producer(self, topic, num_messages, throughput=1000, repeating_keys=None):
return VerifiableProducer(self.test_context,
1,
self.kafka,
topic,
max_messages=num_messages,
acks=1,
- throughput=1000,
+ throughput=throughput,
repeating_keys=repeating_keys)
def assert_produce_consume(self,
diff --git a/tests/kafkatest/tests/streams/streams_standby_replica_test.py b/tests/kafkatest/tests/streams/streams_standby_replica_test.py
index 416a110..8425e14 100644
--- a/tests/kafkatest/tests/streams/streams_standby_replica_test.py
+++ b/tests/kafkatest/tests/streams/streams_standby_replica_test.py
@@ -46,7 +46,7 @@ class StreamsStandbyTask(BaseStreamsTest):
self.streams_sink_topic_1,
self.streams_sink_topic_2))
- producer = self.get_producer(self.streams_source_topic, self.num_messages, repeating_keys=6)
+ producer = self.get_producer(self.streams_source_topic, self.num_messages, throughput=15000,
repeating_keys=6)
producer.start()
processor_1 = StreamsStandbyTaskService(self.test_context, self.kafka, configs)
|