kafka-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ewe...@apache.org
Subject [1/4] kafka git commit: Revert "KAFKA-4345; Run decktape test for each pull request"
Date Tue, 29 Nov 2016 17:11:55 GMT
Repository: kafka
Updated Branches:
  refs/heads/trunk 3e3b7a010 -> a5d28149f


http://git-wip-us.apache.org/repos/asf/kafka/blob/a5d28149/tests/kafkatest/tests/security2/security_rolling_upgrade_test.py
----------------------------------------------------------------------
diff --git a/tests/kafkatest/tests/security2/security_rolling_upgrade_test.py b/tests/kafkatest/tests/security2/security_rolling_upgrade_test.py
deleted file mode 100644
index 51b2e60..0000000
--- a/tests/kafkatest/tests/security2/security_rolling_upgrade_test.py
+++ /dev/null
@@ -1,190 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-from kafkatest.services.zookeeper import ZookeeperService
-from kafkatest.services.kafka import KafkaService
-from kafkatest.services.verifiable_producer import VerifiableProducer
-from kafkatest.services.console_consumer import ConsoleConsumer
-from kafkatest.utils import is_int
-from kafkatest.tests.produce_consume_validate import ProduceConsumeValidateTest
-from ducktape.mark import parametrize
-from ducktape.mark import matrix
-from kafkatest.services.security.kafka_acls import ACLs
-import time
-
-
-class TestSecurityRollingUpgrade(ProduceConsumeValidateTest):
-    """Tests a rolling upgrade from PLAINTEXT to a secured cluster
-    """
-
-    def __init__(self, test_context):
-        super(TestSecurityRollingUpgrade, self).__init__(test_context=test_context)
-
-    def setUp(self):
-        self.acls = ACLs(self.test_context)
-        self.topic = "test_topic"
-        self.group = "group"
-        self.producer_throughput = 100
-        self.num_producers = 1
-        self.num_consumers = 1
-        self.zk = ZookeeperService(self.test_context, num_nodes=1)
-        self.kafka = KafkaService(self.test_context, num_nodes=3, zk=self.zk, topics={self.topic:
{
-            "partitions": 3,
-            "replication-factor": 3,
-            'configs': {"min.insync.replicas": 2}}})
-        self.zk.start()
-
-    def create_producer_and_consumer(self):
-        self.producer = VerifiableProducer(
-            self.test_context, self.num_producers, self.kafka, self.topic,
-            throughput=self.producer_throughput)
-
-        self.consumer = ConsoleConsumer(
-            self.test_context, self.num_consumers, self.kafka, self.topic,
-            consumer_timeout_ms=60000, message_validator=is_int)
-
-        self.consumer.group_id = "group"
-
-    def bounce(self):
-        self.kafka.start_minikdc()
-        for node in self.kafka.nodes:
-            self.kafka.stop_node(node)
-            self.kafka.start_node(node)
-            time.sleep(10)
-
-    def roll_in_secured_settings(self, client_protocol, broker_protocol):
-
-        # Roll cluster to include inter broker security protocol.
-        self.kafka.interbroker_security_protocol = broker_protocol
-        self.kafka.open_port(client_protocol)
-        self.kafka.open_port(broker_protocol)
-        self.bounce()
-
-        # Roll cluster to disable PLAINTEXT port
-        self.kafka.close_port('PLAINTEXT')
-        self.set_authorizer_and_bounce(client_protocol, broker_protocol)
-
-    def set_authorizer_and_bounce(self, client_protocol, broker_protocol):
-        self.kafka.authorizer_class_name = KafkaService.SIMPLE_AUTHORIZER
-        self.acls.set_acls(client_protocol, self.kafka, self.zk, self.topic, self.group)
-        self.acls.set_acls(broker_protocol, self.kafka, self.zk, self.topic, self.group)
-        self.bounce()
-
-    def open_secured_port(self, client_protocol):
-        self.kafka.security_protocol = client_protocol
-        self.kafka.open_port(client_protocol)
-        self.kafka.start_minikdc()
-        self.bounce()
-
-    def add_sasl_mechanism(self, new_client_sasl_mechanism):
-        self.kafka.client_sasl_mechanism = new_client_sasl_mechanism
-        self.kafka.start_minikdc()
-        self.bounce()
-
-    def roll_in_sasl_mechanism(self, security_protocol, new_sasl_mechanism):
-        # Roll cluster to update inter-broker SASL mechanism. This disables the old mechanism.
-        self.kafka.interbroker_sasl_mechanism = new_sasl_mechanism
-        self.bounce()
-
-        # Bounce again with ACLs for new mechanism
-        self.set_authorizer_and_bounce(security_protocol, security_protocol)
-
-    @matrix(client_protocol=["SSL", "SASL_PLAINTEXT", "SASL_SSL"])
-    def test_rolling_upgrade_phase_one(self, client_protocol):
-        """
-        Start with a PLAINTEXT cluster, open a SECURED port, via a rolling upgrade, ensuring
we could produce
-        and consume throughout over PLAINTEXT. Finally check we can produce and consume the
new secured port.
-        """
-        self.kafka.interbroker_security_protocol = "PLAINTEXT"
-        self.kafka.security_protocol = "PLAINTEXT"
-        self.kafka.start()
-
-        # Create PLAINTEXT producer and consumer
-        self.create_producer_and_consumer()
-
-        # Rolling upgrade, opening a secure protocol, ensuring the Plaintext producer/consumer
continues to run
-        self.run_produce_consume_validate(self.open_secured_port, client_protocol)
-
-        # Now we can produce and consume via the secured port
-        self.kafka.security_protocol = client_protocol
-        self.create_producer_and_consumer()
-        self.run_produce_consume_validate(lambda: time.sleep(1))
-
-    @matrix(client_protocol=["SASL_SSL", "SSL", "SASL_PLAINTEXT"], broker_protocol=["SASL_SSL",
"SSL", "SASL_PLAINTEXT"])
-    def test_rolling_upgrade_phase_two(self, client_protocol, broker_protocol):
-        """
-        Start with a PLAINTEXT cluster with a second Secured port open (i.e. result of phase
one).
-        Start an Producer and Consumer via the SECURED port
-        Incrementally upgrade to add inter-broker be the secure protocol
-        Incrementally upgrade again to add ACLs as well as disabling the PLAINTEXT port
-        Ensure the producer and consumer ran throughout
-        """
-        #Given we have a broker that has both secure and PLAINTEXT ports open
-        self.kafka.security_protocol = client_protocol
-        self.kafka.interbroker_security_protocol = "PLAINTEXT"
-        self.kafka.start()
-
-        #Create Secured Producer and Consumer
-        self.create_producer_and_consumer()
-
-        #Roll in the security protocol. Disable Plaintext. Ensure we can produce and Consume
throughout
-        self.run_produce_consume_validate(self.roll_in_secured_settings, client_protocol,
broker_protocol)
-
-    @parametrize(new_client_sasl_mechanism='PLAIN')
-    def test_rolling_upgrade_sasl_mechanism_phase_one(self, new_client_sasl_mechanism):
-        """
-        Start with a SASL/GSSAPI cluster, add new SASL mechanism, via a rolling upgrade,
ensuring we could produce
-        and consume throughout over SASL/GSSAPI. Finally check we can produce and consume
using new mechanism.
-        """
-        self.kafka.interbroker_security_protocol = "SASL_SSL"
-        self.kafka.security_protocol = "SASL_SSL"
-        self.kafka.client_sasl_mechanism = "GSSAPI"
-        self.kafka.interbroker_sasl_mechanism = "GSSAPI"
-        self.kafka.start()
-
-        # Create SASL/GSSAPI producer and consumer
-        self.create_producer_and_consumer()
-
-        # Rolling upgrade, adding new SASL mechanism, ensuring the GSSAPI producer/consumer
continues to run
-        self.run_produce_consume_validate(self.add_sasl_mechanism, new_client_sasl_mechanism)
-
-        # Now we can produce and consume using the new SASL mechanism
-        self.kafka.client_sasl_mechanism = new_client_sasl_mechanism
-        self.create_producer_and_consumer()
-        self.run_produce_consume_validate(lambda: time.sleep(1))
-
-    @parametrize(new_sasl_mechanism='PLAIN')
-    def test_rolling_upgrade_sasl_mechanism_phase_two(self, new_sasl_mechanism):
-        """
-        Start with a SASL cluster with GSSAPI for inter-broker and a second mechanism for
clients (i.e. result of phase one).
-        Start Producer and Consumer using the second mechanism
-        Incrementally upgrade to set inter-broker to the second mechanism and disable GSSAPI
-        Incrementally upgrade again to add ACLs
-        Ensure the producer and consumer run throughout
-        """
-        #Start with a broker that has GSSAPI for inter-broker and a second mechanism for
clients
-        self.kafka.security_protocol = "SASL_SSL"
-        self.kafka.interbroker_security_protocol = "SASL_SSL"
-        self.kafka.client_sasl_mechanism = new_sasl_mechanism
-        self.kafka.interbroker_sasl_mechanism = "GSSAPI"
-        self.kafka.start()
-
-        #Create Producer and Consumer using second mechanism
-        self.create_producer_and_consumer()
-
-        #Roll in the second SASL mechanism for inter-broker, disabling first mechanism. Ensure
we can produce and consume throughout
-        self.run_produce_consume_validate(self.roll_in_sasl_mechanism, self.kafka.security_protocol,
new_sasl_mechanism)
-

http://git-wip-us.apache.org/repos/asf/kafka/blob/a5d28149/tests/kafkatest/tests/upgrade/__init__.py
----------------------------------------------------------------------
diff --git a/tests/kafkatest/tests/upgrade/__init__.py b/tests/kafkatest/tests/upgrade/__init__.py
deleted file mode 100644
index e69de29..0000000

http://git-wip-us.apache.org/repos/asf/kafka/blob/a5d28149/tests/kafkatest/tests/upgrade/upgrade_test.py
----------------------------------------------------------------------
diff --git a/tests/kafkatest/tests/upgrade/upgrade_test.py b/tests/kafkatest/tests/upgrade/upgrade_test.py
deleted file mode 100644
index 26c7099..0000000
--- a/tests/kafkatest/tests/upgrade/upgrade_test.py
+++ /dev/null
@@ -1,128 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from ducktape.mark import parametrize
-
-import json
-
-from kafkatest.services.console_consumer import ConsoleConsumer
-from kafkatest.services.kafka import KafkaService
-from kafkatest.services.kafka import config_property
-from kafkatest.services.verifiable_producer import VerifiableProducer
-from kafkatest.services.zookeeper import ZookeeperService
-from kafkatest.tests.produce_consume_validate import ProduceConsumeValidateTest
-from kafkatest.utils import is_int
-from kafkatest.version import LATEST_0_8_2, LATEST_0_9, LATEST_0_10_0, TRUNK, KafkaVersion
-
-class TestUpgrade(ProduceConsumeValidateTest):
-
-    def __init__(self, test_context):
-        super(TestUpgrade, self).__init__(test_context=test_context)
-
-    def setUp(self):
-        self.topic = "test_topic"
-        self.zk = ZookeeperService(self.test_context, num_nodes=1)
-        self.zk.start()
-
-        # Producer and consumer
-        self.producer_throughput = 10000
-        self.num_producers = 1
-        self.num_consumers = 1
-
-    def perform_upgrade(self, from_kafka_version, to_message_format_version=None):
-        self.logger.info("First pass bounce - rolling upgrade")
-        for node in self.kafka.nodes:
-            self.kafka.stop_node(node)
-            node.version = TRUNK
-            node.config[config_property.INTER_BROKER_PROTOCOL_VERSION] = from_kafka_version
-            node.config[config_property.MESSAGE_FORMAT_VERSION] = from_kafka_version
-            self.kafka.start_node(node)
-
-        self.logger.info("Second pass bounce - remove inter.broker.protocol.version config")
-        for node in self.kafka.nodes:
-            self.kafka.stop_node(node)
-            del node.config[config_property.INTER_BROKER_PROTOCOL_VERSION]
-            if to_message_format_version is None:
-                del node.config[config_property.MESSAGE_FORMAT_VERSION]
-            else:
-                node.config[config_property.MESSAGE_FORMAT_VERSION] = to_message_format_version
-            self.kafka.start_node(node)
-
-    @parametrize(from_kafka_version=str(LATEST_0_10_0), to_message_format_version=None, compression_types=["snappy"],
new_consumer=False)
-    @parametrize(from_kafka_version=str(LATEST_0_10_0), to_message_format_version=None, compression_types=["snappy"])
-    @parametrize(from_kafka_version=str(LATEST_0_9), to_message_format_version=None, compression_types=["none"],
new_consumer=False)
-    @parametrize(from_kafka_version=str(LATEST_0_9), to_message_format_version=None, compression_types=["none"],
security_protocol="SASL_SSL")
-    @parametrize(from_kafka_version=str(LATEST_0_9), to_message_format_version=None, compression_types=["snappy"])
-    @parametrize(from_kafka_version=str(LATEST_0_9), to_message_format_version=None, compression_types=["lz4"],
new_consumer=False)
-    @parametrize(from_kafka_version=str(LATEST_0_9), to_message_format_version=None, compression_types=["lz4"])
-    @parametrize(from_kafka_version=str(LATEST_0_9), to_message_format_version=str(LATEST_0_9),
compression_types=["none"], new_consumer=False)
-    @parametrize(from_kafka_version=str(LATEST_0_9), to_message_format_version=str(LATEST_0_9),
compression_types=["snappy"])
-    @parametrize(from_kafka_version=str(LATEST_0_9), to_message_format_version=str(LATEST_0_9),
compression_types=["lz4"], new_consumer=False)
-    @parametrize(from_kafka_version=str(LATEST_0_9), to_message_format_version=str(LATEST_0_9),
compression_types=["lz4"])
-    @parametrize(from_kafka_version=str(LATEST_0_8_2), to_message_format_version=None, compression_types=["none"],
new_consumer=False)
-    @parametrize(from_kafka_version=str(LATEST_0_8_2), to_message_format_version=None, compression_types=["snappy"],
new_consumer=False)
-    def test_upgrade(self, from_kafka_version, to_message_format_version, compression_types,
-                     new_consumer=True, security_protocol="PLAINTEXT"):
-        """Test upgrade of Kafka broker cluster from 0.8.2, 0.9.0 or 0.10.0 to the current
version
-
-        from_kafka_version is a Kafka version to upgrade from: either 0.8.2.X, 0.9.0.x or
0.10.0.x
-
-        If to_message_format_version is None, it means that we will upgrade to default (latest)
-        message format version. It is possible to upgrade to 0.10 brokers but still use message
-        format version 0.9
-
-        - Start 3 node broker cluster on version 'from_kafka_version'
-        - Start producer and consumer in the background
-        - Perform two-phase rolling upgrade
-            - First phase: upgrade brokers to 0.10 with inter.broker.protocol.version set
to
-            from_kafka_version and log.message.format.version set to from_kafka_version
-            - Second phase: remove inter.broker.protocol.version config with rolling bounce;
if
-            to_message_format_version is set to 0.9, set log.message.format.version to
-            to_message_format_version, otherwise remove log.message.format.version config
-        - Finally, validate that every message acked by the producer was consumed by the
consumer
-        """
-        self.kafka = KafkaService(self.test_context, num_nodes=3, zk=self.zk,
-                                  version=KafkaVersion(from_kafka_version),
-                                  topics={self.topic: {"partitions": 3, "replication-factor":
3,
-                                                       'configs': {"min.insync.replicas":
2}}})
-        self.kafka.security_protocol = security_protocol
-        self.kafka.interbroker_security_protocol = security_protocol
-        self.kafka.start()
-
-        self.producer = VerifiableProducer(self.test_context, self.num_producers, self.kafka,
-                                           self.topic, throughput=self.producer_throughput,
-                                           message_validator=is_int,
-                                           compression_types=compression_types,
-                                           version=KafkaVersion(from_kafka_version))
-
-        assert self.zk.query("/cluster/id") is None
-
-        # TODO - reduce the timeout
-        self.consumer = ConsoleConsumer(self.test_context, self.num_consumers, self.kafka,
-                                        self.topic, consumer_timeout_ms=200000, new_consumer=new_consumer,
-                                        message_validator=is_int, version=KafkaVersion(from_kafka_version))
-
-        self.run_produce_consume_validate(core_test_action=lambda: self.perform_upgrade(from_kafka_version,
-                                                                                        to_message_format_version))
-
-        cluster_id_json = self.zk.query("/cluster/id")
-        assert cluster_id_json is not None
-        try:
-            cluster_id = json.loads(cluster_id_json)
-        except :
-            self.logger.debug("Data in /cluster/id znode could not be parsed. Data = %s"
% cluster_id_json)
-
-        self.logger.debug("Cluster id [%s]", cluster_id)
-        assert len(cluster_id["id"]) == 22

http://git-wip-us.apache.org/repos/asf/kafka/blob/a5d28149/tests/travis/Dockerfile
----------------------------------------------------------------------
diff --git a/tests/travis/Dockerfile b/tests/travis/Dockerfile
deleted file mode 100644
index 48f8bd6..0000000
--- a/tests/travis/Dockerfile
+++ /dev/null
@@ -1,38 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-FROM openjdk:8
-
-MAINTAINER Raghav Kumar Gautam
-# commands to update docker image:
-#   - docker build . -t raghavgautam/kfk-image
-#   - docker push raghavgautam/kfk-image
-RUN apt update
-RUN apt install -y unzip wget curl jq coreutils openssh-server net-tools vim openjdk-8-jdk
python-pip
-RUN pip install ducktape
-
-VOLUME ["/kafka"]
-VOLUME ["/kfk_src"]
-
-ENV MIRROR="http://apache.cs.utah.edu/"
-RUN wget -q "${MIRROR}kafka/0.8.2.2/kafka_2.10-0.8.2.2.tgz" -O "/tmp/kafka_2.10-0.8.2.2.tgz"
&& tar xfz /tmp/kafka_2.10-0.8.2.2.tgz -C /opt && mv "/opt/kafka_2.10-0.8.2.2"
"/opt/kafka-0.8.2.2"
-RUN wget -q "${MIRROR}kafka/0.9.0.1/kafka_2.10-0.9.0.1.tgz" -O "/tmp/kafka_2.10-0.9.0.1.tgz"
&& tar xfz /tmp/kafka_2.10-0.9.0.1.tgz -C /opt && mv "/opt/kafka_2.10-0.9.0.1"
"/opt/kafka-0.9.0.1"
-RUN wget -q "${MIRROR}kafka/0.10.0.1/kafka_2.10-0.10.0.1.tgz" -O "/tmp/kafka_2.10-0.10.0.1.tgz"
&& tar xfz /tmp/kafka_2.10-0.10.0.1.tgz -C /opt && mv "/opt/kafka_2.10-0.10.0.1"
"/opt/kafka-0.10.0.1"
-
-RUN rm /tmp/kafka_*.tgz
-ADD ssh /root/.ssh
-RUN chmod 600 /root/.ssh/id_rsa
-
-CMD service ssh start && tail -f /dev/null

http://git-wip-us.apache.org/repos/asf/kafka/blob/a5d28149/tests/travis/run_tests.sh
----------------------------------------------------------------------
diff --git a/tests/travis/run_tests.sh b/tests/travis/run_tests.sh
deleted file mode 100755
index 6660625..0000000
--- a/tests/travis/run_tests.sh
+++ /dev/null
@@ -1,58 +0,0 @@
-#!/usr/bin/env bash
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# To run tests use a command like:
-#   TC_PATHS="tests/kafkatest/tests/streams tests/kafkatest/tests/tools" bash tests/travis/run_tests.sh
-set -x
-
-SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
-TESTS_DIR=`dirname ${SCRIPT_DIR}`
-KFK_SRC=`dirname ${TESTS_DIR}`
-
-
-cd ${SCRIPT_DIR}
-chmod 600 ssh/id_rsa
-
-docker network rm knw
-docker network create knw
-
-docker kill $(docker ps -f=network=knw -q)
-docker rm $(docker ps -a -f=network=knw -q)
-
-for i in $(seq -w 1 12); do
-  docker run -d -t --name knode${i} --network knw -v ${KFK_SRC}:/kfk_src raghavgautam/kfk-image
-done
-
-docker info
-docker ps
-docker network inspect knw
-
-for i in $(seq -w 1 12); do
-  echo knode${i}
-  docker exec knode${i} bash -c "(tar xfz /kfk_src/core/build/distributions/kafka_*SNAPSHOT.tgz
-C /opt || echo missing kafka tgz did you build kafka tarball) && mv /opt/kafka*SNAPSHOT
/opt/kafka-trunk && ls -l /opt"
-  docker exec knode01 bash -c "ssh knode$i hostname"
-done
-
-# hack to copy test dependencies
-# this is required for running MiniKDC
-(cd ${KFK_SRC} && ./gradlew copyDependantTestLibs)
-for i in $(seq -w 1 12); do
-  echo knode${i}
-  docker exec knode${i} bash -c "cp /kfk_src/core/build/dependant-testlibs/* /opt/kafka-trunk/libs/"
-  docker exec knode01 bash -c "ssh knode$i hostname"
-done
-
-docker exec knode01 bash -c "cd /kfk_src; ducktape ${_DUCKTAPE_OPTIONS} --cluster-file tests/cluster_file.json
${TC_PATHS:-tests/kafkatest/tests}"

http://git-wip-us.apache.org/repos/asf/kafka/blob/a5d28149/tests/travis/ssh/authorized_keys
----------------------------------------------------------------------
diff --git a/tests/travis/ssh/authorized_keys b/tests/travis/ssh/authorized_keys
deleted file mode 100644
index 9f9da1f..0000000
--- a/tests/travis/ssh/authorized_keys
+++ /dev/null
@@ -1,15 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC0qDT9kEPWc8JQ53b4KnT/ZJOLwb+3c//jpLW/2ofjDyIsPW4FohLpicfouch/zsRpN4G38lua+2BsGls9sMIZc6PXY2L+NIGCkqEMdCoU1Ym8SMtyJklfzp3m/0PeK9s2dLlR3PFRYvyFA4btQK5hkbYDNZPzf4airvzdRzLkrFf81+RemaMI2EtONwJRcbLViPaTXVKJdbFwJTJ1u7yu9wDYWHKBMA92mHTQeP6bhVYCqxJn3to/RfZYd+sHw6mfxVg5OrAlUOYpSV4pDNCAsIHdtZ56V8NQlJL6NJ2vzzSSYUwLMqe88fhrC8yYHoxC07QPy1EdkSTHdohAicyT
root@knode01.knw

http://git-wip-us.apache.org/repos/asf/kafka/blob/a5d28149/tests/travis/ssh/config
----------------------------------------------------------------------
diff --git a/tests/travis/ssh/config b/tests/travis/ssh/config
deleted file mode 100644
index 1f87417..0000000
--- a/tests/travis/ssh/config
+++ /dev/null
@@ -1,21 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-Host *
-  ControlMaster auto
-  ControlPath ~/.ssh/master-%r@%h:%p
-  StrictHostKeyChecking no
-  ConnectTimeout=10
-  IdentityFile ~/.ssh/id_rsa

http://git-wip-us.apache.org/repos/asf/kafka/blob/a5d28149/tests/travis/ssh/id_rsa
----------------------------------------------------------------------
diff --git a/tests/travis/ssh/id_rsa b/tests/travis/ssh/id_rsa
deleted file mode 100644
index 276e07b..0000000
--- a/tests/travis/ssh/id_rsa
+++ /dev/null
@@ -1,27 +0,0 @@
------BEGIN RSA PRIVATE KEY-----
-MIIEpQIBAAKCAQEAtKg0/ZBD1nPCUOd2+Cp0/2STi8G/t3P/46S1v9qH4w8iLD1u
-BaIS6YnH6LnIf87EaTeBt/JbmvtgbBpbPbDCGXOj12Ni/jSBgpKhDHQqFNWJvEjL
-ciZJX86d5v9D3ivbNnS5UdzxUWL8hQOG7UCuYZG2AzWT83+Goq783Ucy5KxX/Nfk
-XpmjCNhLTjcCUXGy1Yj2k11SiXWxcCUydbu8rvcA2FhygTAPdph00Hj+m4VWAqsS
-Z97aP0X2WHfrB8Opn8VYOTqwJVDmKUleKQzQgLCB3bWeelfDUJSS+jSdr880kmFM
-CzKnvPH4awvMmB6MQtO0D8tRHZEkx3aIQInMkwIDAQABAoIBAQCz6EMFNNLp0NP1
-X9yRXS6wW4e4CRWUazesiw3YZpcmnp6IchCMGZA99FEZyVILPW1J3tYWyotBdw7Z
-+RFeCRXy5L+IMtiVkNJcpwss7M4ve0w0LkY0gj5V49xJ+3Gp4gDnZSxcguvrAem5
-yP5obR572fDpl0SknB4HCr6U2l+rauzrLyevy5eeDT/vmXbuM1cdHpNIXmmElz4L
-t31n+exQRn6tP1h516iXbcYbopxDgdv2qKGAqzWKE6TyWpzF5x7kjOEYt0bZ5QO3
-Lwh7AAqE/3mwxlYwng1L4WAT7RtcP19W+9JDIc7ENInMGxq6q46p1S3IPZsf1cj/
-aAJ9q3LBAoGBAOVJr0+WkR786n3BuswpGQWBgVxfai4y9Lf90vuGKawdQUzXv0/c
-EB/CFqP/dIsquukA8PfzjNMyTNmEHXi4Sf16H8Rg4EGhIYMEqIQojx1t/yLLm0aU
-YPEvW/02Umtlg3pJw9fQAAzFVqCasw2E2lUdAUkydGRwDUJZmv2/b3NzAoGBAMm0
-Jo7Et7ochH8Vku6uA+hG+RdwlKFm5JA7/Ci3DOdQ1zmJNrvBBFQLo7AjA4iSCoBd
-s9+y0nrSPcF4pM3l6ghLheaqbnIi2HqIMH9mjDbrOZiWvbnjvjpOketgNX8vV3Ye
-GUkSjoNcmvRmdsICmUjeML8bGOmq4zF9W/GIfTphAoGBAKGRo8R8f/SLGh3VtvCI
-gUY89NAHuEWnyIQii1qMNq8+yjYAzaHTm1UVqmiT6SbrzFvGOwcuCu0Dw91+2Fmp
-2xGPzfTOoxf8GCY/0ROXlQmS6jc1rEw24Hzz92ldrwRYuyYf9q4Ltw1IvXtcp5F+
-LW/OiYpv0E66Gs3HYI0wKbP7AoGBAJMZWeFW37LQJ2TTJAQDToAwemq4xPxsoJX7
-2SsMTFHKKBwi0JLe8jwk/OxwrJwF/bieHZcvv8ao2zbkuDQcz6/a/D074C5G8V9z
-QQM4k1td8vQwQw91Yv782/gvgvRNX1iaHNCowtxURgGlVEirQoTc3eoRZfrLkMM/
-7DTa2JEhAoGACEu3zHJ1sgyeOEgLArUJXlQM30A/ulMrnCd4MEyIE+ReyWAUevUQ
-0lYdVNva0/W4C5e2lUOJL41jjIPLqI7tcFR2PZE6n0xTTkxNH5W2u1WpFeKjx+O3
-czv7Bt6wYyLHIMy1JEqAQ7pw1mtJ5s76UDvXUhciF+DU2pWYc6APKR0=
------END RSA PRIVATE KEY-----

http://git-wip-us.apache.org/repos/asf/kafka/blob/a5d28149/tests/travis/ssh/id_rsa.pub
----------------------------------------------------------------------
diff --git a/tests/travis/ssh/id_rsa.pub b/tests/travis/ssh/id_rsa.pub
deleted file mode 100644
index 76e8f5f..0000000
--- a/tests/travis/ssh/id_rsa.pub
+++ /dev/null
@@ -1 +0,0 @@
-ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC0qDT9kEPWc8JQ53b4KnT/ZJOLwb+3c//jpLW/2ofjDyIsPW4FohLpicfouch/zsRpN4G38lua+2BsGls9sMIZc6PXY2L+NIGCkqEMdCoU1Ym8SMtyJklfzp3m/0PeK9s2dLlR3PFRYvyFA4btQK5hkbYDNZPzf4airvzdRzLkrFf81+RemaMI2EtONwJRcbLViPaTXVKJdbFwJTJ1u7yu9wDYWHKBMA92mHTQeP6bhVYCqxJn3to/RfZYd+sHw6mfxVg5OrAlUOYpSV4pDNCAsIHdtZ56V8NQlJL6NJ2vzzSSYUwLMqe88fhrC8yYHoxC07QPy1EdkSTHdohAicyT
root@knode01.knw


Mime
View raw message