mirror of https://github.com/apache/kafka.git
Tweaked README, changed default log collection behavior on VerifiableProducer
This commit is contained in:
parent
0eb6fdc6fb
commit
2ea4e29705
|
@ -15,7 +15,7 @@ This quickstart will help you run the Kafka system tests on your local machine.
|
|||
* Install Vagrant Plugins:
|
||||
|
||||
# Required
|
||||
$ vagrant plugin install vagrant-hostmanager
|
||||
$ vagrant plugin install vagrant-hostmanager vagrant-cachier
|
||||
|
||||
* Build a specific branch of Kafka
|
||||
|
||||
|
@ -57,12 +57,15 @@ installing dependencies and updates on every vm.):
|
|||
|
||||
EC2 Quickstart
|
||||
--------------
|
||||
This quickstart will help you run the Kafka system tests using Amazon EC2. As a convention, we'll use "kafkatest" in most names, but you can use whatever you want.
|
||||
This quickstart will help you run the Kafka system tests on EC2. In this setup, all logic is run
|
||||
on EC2 and none on your local machine.
|
||||
|
||||
There are a lot of steps here, but the basic goals are to create one distinguished EC2 instance that
|
||||
will be our "test driver", and to set up the security groups and iam role so that the test driver
|
||||
can create, destroy, and run ssh commands on any number of "workers".
|
||||
|
||||
As a convention, we'll use "kafkatest" in most names, but you can use whatever name you want.
|
||||
|
||||
Preparation
|
||||
-----------
|
||||
In these steps, we will create an IAM role which has permission to create and destroy EC2 instances,
|
||||
|
|
|
@ -23,7 +23,7 @@ class VerifiableProducer(BackgroundThreadService):
|
|||
logs = {
|
||||
"producer_log": {
|
||||
"path": "/mnt/producer.log",
|
||||
"collect_default": True}
|
||||
"collect_default": False}
|
||||
}
|
||||
|
||||
def __init__(self, context, num_nodes, kafka, topic, max_messages=-1, throughput=100000):
|
||||
|
@ -47,8 +47,6 @@ class VerifiableProducer(BackgroundThreadService):
|
|||
data = self.try_parse_json(line)
|
||||
if data is not None:
|
||||
|
||||
self.logger.debug("VerifiableProducer: " + str(data))
|
||||
|
||||
with self.lock:
|
||||
if data["name"] == "producer_send_error":
|
||||
data["node"] = idx
|
||||
|
|
|
@ -97,7 +97,12 @@ class ReplicationTest(Test):
|
|||
self.logger.info("num consumed: %d" % len(self.consumed))
|
||||
|
||||
# Check produced vs consumed
|
||||
self.validate()
|
||||
success, msg = self.validate()
|
||||
|
||||
if not success:
|
||||
self.mark_for_collect(self.producer)
|
||||
|
||||
assert success, msg
|
||||
|
||||
def clean_shutdown(self):
|
||||
"""Discover leader node for our topic and shut it down cleanly."""
|
||||
|
@ -142,7 +147,7 @@ class ReplicationTest(Test):
|
|||
# Collect all the data logs if there was a failure
|
||||
self.mark_for_collect(self.kafka)
|
||||
|
||||
assert success, msg
|
||||
return success, msg
|
||||
|
||||
def test_clean_shutdown(self):
|
||||
self.run_with_failure(self.clean_shutdown)
|
||||
|
|
Loading…
Reference in New Issue