Updated console consumer to directo log output directly to file rather than stdout

This commit is contained in:
Geoff Anderson 2015-08-06 18:36:26 -07:00
parent 006b45c7e5
commit 521a84b6d5
5 changed files with 145 additions and 10 deletions

View File

@ -0,0 +1,14 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

View File

@ -0,0 +1,56 @@
# Copyright 2015 Confluent Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ducktape.tests.test import Test
from ducktape.utils.util import wait_until
from kafkatest.services.zookeeper import ZookeeperService
from kafkatest.services.kafka import KafkaService
from kafkatest.services.console_consumer import ConsoleConsumer
import time
class ConsoleConsumerTest(Test):
"""Sanity checks on console consumer service class."""
def __init__(self, test_context):
super(ConsoleConsumerTest, self).__init__(test_context)
self.topic = "topic"
self.zk = ZookeeperService(test_context, num_nodes=1)
self.kafka = KafkaService(test_context, num_nodes=1, zk=self.zk,
topics={self.topic: {"partitions": 1, "replication-factor": 1}})
self.consumer = ConsoleConsumer(test_context, num_nodes=1, kafka=self.kafka, topic=self.topic)
def setUp(self):
self.zk.start()
self.kafka.start()
def test_start(self):
t0 = time.time()
self.consumer.start()
node = self.consumer.nodes[0]
if not wait_until(lambda: self.consumer.alive(node), timeout_sec=10, backoff_sec=.2):
raise Exception("Consumer was too slow to start")
self.logger.info("consumer started in %s seconds " % str(time.time() - t0))
# Verify that log output is happening
consumer_log_lines = [line for line in node.account.ssh_capture("cat %s" % ConsoleConsumer.log_file)]
assert len(consumer_log_lines) > 0
# Verify no consumed messages
consumed = [line for line in node.account.ssh_capture("cat %s" % ConsoleConsumer.stdout_capture)]
assert len(consumed) == 0

View File

@ -15,6 +15,7 @@
from ducktape.services.background_thread import BackgroundThreadService
import os
def is_int(msg):
"""Default method used to check whether text pulled from console consumer is a message.
@ -69,9 +70,24 @@ Option Description
class ConsoleConsumer(BackgroundThreadService):
# Root directory for persistent output
persistent_root = "/mnt/console_consumer"
stdout_capture = os.path.join(persistent_root, "console_consumer.stdout")
stderr_capture = os.path.join(persistent_root, "console_consumer.stderr")
log_dir = os.path.join(persistent_root, "logs")
log_file = os.path.join(log_dir, "console_consumer.log")
log4j_config = os.path.join(persistent_root, "tools-log4j.properties")
config_file = os.path.join(persistent_root, "console_consumer.properties")
logs = {
"consumer_stdout": {
"path": stdout_capture,
"collect_default": False},
"consumer_stderr": {
"path": stderr_capture,
"collect_default": False},
"consumer_log": {
"path": "/mnt/consumer.log",
"path": log_file,
"collect_default": True}
}
@ -104,18 +120,37 @@ class ConsoleConsumer(BackgroundThreadService):
@property
def start_cmd(self):
args = self.args.copy()
args.update({'zk_connect': self.kafka.zk.connect_setting()})
cmd = "/opt/kafka/bin/kafka-console-consumer.sh --topic %(topic)s --zookeeper %(zk_connect)s" \
" --consumer.config /mnt/console_consumer.properties" % args
args['zk_connect'] = self.kafka.zk.connect_setting()
args['stdout'] = ConsoleConsumer.stdout_capture
args['stderr'] = ConsoleConsumer.stderr_capture
args['config_file'] = ConsoleConsumer.config_file
cmd = "export LOG_DIR=%s;" % ConsoleConsumer.log_dir
cmd += " export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%s\";" % ConsoleConsumer.log4j_config
cmd += " /opt/kafka/bin/kafka-console-consumer.sh --topic %(topic)s --zookeeper %(zk_connect)s" \
" --consumer.config %(config_file)s" % args
if self.from_beginning:
cmd += " --from-beginning"
cmd += " 2>> /mnt/consumer.log | tee -a /mnt/consumer.log &"
cmd += " 2>> %(stderr)s | tee -a %(stdout)s &" % args
return cmd
def pids(self, node):
try:
cmd = "ps ax | grep -i console_consumer | grep java | grep -v grep | awk '{print $1}'"
pid_arr = [pid for pid in node.account.ssh_capture(cmd, allow_fail=True, callback=int)]
return pid_arr
except:
return []
def alive(self, node):
return len(self.pids(node)) > 0
def _worker(self, idx, node):
# form config file
node.account.ssh("mkdir -p %s" % ConsoleConsumer.persistent_root, allow_fail=False)
# Create and upload config file
if self.consumer_timeout_ms is not None:
prop_file = self.render('console_consumer.properties', consumer_timeout_ms=self.consumer_timeout_ms)
else:
@ -123,12 +158,16 @@ class ConsoleConsumer(BackgroundThreadService):
self.logger.info("console_consumer.properties:")
self.logger.info(prop_file)
node.account.create_file("/mnt/console_consumer.properties", prop_file)
node.account.create_file(ConsoleConsumer.config_file, prop_file)
# Create and upload log properties
log_config = self.render('console_consumer_log4j.properties', log_file=ConsoleConsumer.log_file)
node.account.create_file(ConsoleConsumer.log4j_config, log_config)
# Run and capture output
cmd = self.start_cmd
self.logger.debug("Console consumer %d command: %s", idx, cmd)
for line in node.account.ssh_capture(cmd):
for line in node.account.ssh_capture(cmd, allow_fail=False):
msg = line.strip()
msg = self.message_validator(msg)
if msg is not None:
@ -142,5 +181,5 @@ class ConsoleConsumer(BackgroundThreadService):
node.account.kill_process("java", allow_fail=False)
def clean_node(self, node):
node.account.ssh("rm -rf /mnt/console_consumer.properties /mnt/consumer.log", allow_fail=False)
node.account.ssh("rm -rf %s" % ConsoleConsumer.persistent_root, allow_fail=False)

View File

@ -14,6 +14,6 @@
# limitations under the License.
# see kafka.server.KafkaConfig for additional details and defaults
{% if consumer_timeout_ms is defined %}
{% if consumer_timeout_ms is not none %}
consumer.timeout.ms={{ consumer_timeout_ms }}
{% endif %}

View File

@ -0,0 +1,26 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Define the root logger with appender file
log4j.rootLogger = INFO, FILE
log4j.appender.FILE=org.apache.log4j.FileAppender
log4j.appender.FILE.File={{ log_file }}
log4j.appender.FILE.ImmediateFlush=true
log4j.appender.FILE.Threshold=debug
# Set the append to false, overwrite
log4j.appender.FILE.Append=false
log4j.appender.FILE.layout=org.apache.log4j.PatternLayout
log4j.appender.FILE.layout.conversionPattern=[%d] %p %m (%c)%n