KAFKA-9366 Upgrade log4j to log4j2 (#17373)

This pull request replaces Log4j with Log4j2 across the entire project, including dependencies, configurations, and code. The notable changes are listed below:

1. Introduce Log4j2 Instead of Log4j
2. Change Configuration File Format from Properties to YAML
3. Adds warnings to notify users if they are still using Log4j properties, encouraging them to transition to Log4j2 configurations

Co-authored-by: Lee Dongjin <dongjin@apache.org>

Reviewers: Luke Chen <showuon@gmail.com>, Mickael Maison <mickael.maison@gmail.com>, Chia-Ping Tsai <chia7712@gmail.com>
This commit is contained in:
TengYao Chi 2024-12-14 01:14:31 +08:00 committed by GitHub
parent b94defa189
commit b37b89c668
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
94 changed files with 2041 additions and 939 deletions

View File

@ -220,6 +220,7 @@ jackson-annotations-2.16.2
jackson-core-2.16.2
jackson-databind-2.16.2
jackson-dataformat-csv-2.16.2
jackson-dataformat-yaml-2.16.2
jackson-datatype-jdk8-2.16.2
jackson-jaxrs-base-2.16.2
jackson-jaxrs-json-provider-2.16.2
@ -239,6 +240,11 @@ jetty-servlets-9.4.56.v20240826
jetty-util-9.4.56.v20240826
jetty-util-ajax-9.4.56.v20240826
jose4j-0.9.4
log4j-api-2.24.1
log4j-core-2.24.1
log4j-core-test-2.24.1
log4j-slf4j-impl-2.24.1
log4j-1.2-api-2.24.1
lz4-java-1.8.0
maven-artifact-3.9.6
metrics-core-4.1.12.1
@ -254,7 +260,6 @@ netty-transport-native-epoll-4.1.115.Final
netty-transport-native-unix-common-4.1.115.Final
opentelemetry-proto-1.0.0-alpha
plexus-utils-3.5.1
reload4j-1.2.25
rocksdbjni-7.9.2
scala-library-2.13.15
scala-logging_2.13-3.9.5
@ -312,7 +317,6 @@ argparse4j-0.7.0, see: licenses/argparse-MIT
classgraph-4.8.173, see: licenses/classgraph-MIT
jopt-simple-5.0.4, see: licenses/jopt-simple-MIT
slf4j-api-1.7.36, see: licenses/slf4j-MIT
slf4j-reload4j-1.7.36, see: licenses/slf4j-MIT
pcollections-4.0.1, see: licenses/pcollections-MIT
---------------------------------------

View File

@ -50,10 +50,10 @@ Follow instructions in https://kafka.apache.org/quickstart
./gradlew clients:test --tests org.apache.kafka.clients.MetadataTest.testTimeToNextUpdate
### Running a particular unit/integration test with log4j output ###
By default, there will be only small number of logs output while testing. You can adjust it by changing the `log4j.properties` file in the module's `src/test/resources` directory.
By default, there will be only small number of logs output while testing. You can adjust it by changing the `log4j2.yml` file in the module's `src/test/resources` directory.
For example, if you want to see more logs for clients project tests, you can modify [the line](https://github.com/apache/kafka/blob/trunk/clients/src/test/resources/log4j.properties#L21) in `clients/src/test/resources/log4j.properties`
to `log4j.logger.org.apache.kafka=INFO` and then run:
For example, if you want to see more logs for clients project tests, you can modify [the line](https://github.com/apache/kafka/blob/trunk/clients/src/test/resources/log4j2.yml#L35) in `clients/src/test/resources/log4j2.yml`
to `level: INFO` and then run:
./gradlew cleanTest clients:test --tests NetworkClientTest

View File

@ -22,8 +22,12 @@ fi
base_dir=$(dirname $0)
if [ "x$KAFKA_LOG4J_OPTS" = "x" ]; then
export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/connect-log4j.properties"
if [ -z "$KAFKA_LOG4J_OPTS" ]; then
export KAFKA_LOG4J_OPTS="-Dlog4j2.configurationFile=$base_dir/../config/connect-log4j2.yaml"
elif echo "$KAFKA_LOG4J_OPTS" | grep -qE "log4j\.[^[:space:]]+$"; then
echo DEPRECATED: A Log4j 1.x configuration file has been detected, which is no longer recommended. >&2
echo To use a Log4j 2.x configuration, please see https://logging.apache.org/log4j/2.x/migrate-from-log4j1.html#Log4j2ConfigurationFormat for details about Log4j configuration file migration. >&2
echo You can also use the \$KAFKA_HOME/config/connect-log4j2.yaml file as a starting point. Make sure to remove the Log4j 1.x configuration after completing the migration. >&2
fi
if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then

View File

@ -22,8 +22,12 @@ fi
base_dir=$(dirname $0)
if [ "x$KAFKA_LOG4J_OPTS" = "x" ]; then
export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/connect-log4j.properties"
if [ -z "$KAFKA_LOG4J_OPTS" ]; then
export KAFKA_LOG4J_OPTS="-Dlog4j2.configurationFile=$base_dir/../config/connect-log4j2.yaml"
elif echo "$KAFKA_LOG4J_OPTS" | grep -qE "log4j\.[^[:space:]]+$"; then
echo DEPRECATED: A Log4j 1.x configuration file has been detected, which is no longer recommended. >&2
echo To use a Log4j 2.x configuration, please see https://logging.apache.org/log4j/2.x/migrate-from-log4j1.html#Log4j2ConfigurationFormat for details about Log4j configuration file migration. >&2
echo You can also use the \$KAFKA_HOME/config/connect-log4j2.yaml file as a starting point. Make sure to remove the Log4j 1.x configuration after completing the migration. >&2
fi
if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then

View File

@ -22,8 +22,12 @@ fi
base_dir=$(dirname $0)
if [ "x$KAFKA_LOG4J_OPTS" = "x" ]; then
export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/connect-log4j.properties"
if [ -z "$KAFKA_LOG4J_OPTS" ]; then
export KAFKA_LOG4J_OPTS="-Dlog4j2.configurationFile=$base_dir/../config/connect-log4j2.yaml"
elif echo "$KAFKA_LOG4J_OPTS" | grep -qE "log4j\.[^[:space:]]+$"; then
echo DEPRECATED: A Log4j 1.x configuration file has been detected, which is no longer recommended. >&2
echo To use a Log4j 2.x configuration, please see https://logging.apache.org/log4j/2.x/migrate-from-log4j1.html#Log4j2ConfigurationFormat for details about Log4j configuration file migration. >&2
echo You can also use the \$KAFKA_HOME/config/connect-log4j2.yaml file as a starting point. Make sure to remove the Log4j 1.x configuration after completing the migration. >&2
fi
if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then

View File

@ -21,8 +21,12 @@ then
fi
base_dir=$(dirname $0)
if [ "x$KAFKA_LOG4J_OPTS" = "x" ]; then
export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/log4j.properties"
if [ -z "$KAFKA_LOG4J_OPTS" ]; then
export KAFKA_LOG4J_OPTS="-Dlog4j2.configurationFile=$base_dir/../config/log4j2.yaml"
elif echo "$KAFKA_LOG4J_OPTS" | grep -qE "log4j\.[^[:space:]]+$"; then
echo DEPRECATED: A Log4j 1.x configuration file has been detected, which is no longer recommended. >&2
echo To use a Log4j 2.x configuration, please see https://logging.apache.org/log4j/2.x/migrate-from-log4j1.html#Log4j2ConfigurationFormat for details about Log4j configuration file migration. >&2
echo You can also use the \$KAFKA_HOME/config/log4j2.yaml file as a starting point. Make sure to remove the Log4j 1.x configuration after completing the migration. >&2
fi
if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then

View File

@ -27,7 +27,14 @@ popd
rem Log4j settings
IF ["%KAFKA_LOG4J_OPTS%"] EQU [""] (
set KAFKA_LOG4J_OPTS=-Dlog4j.configuration=file:%BASE_DIR%/config/connect-log4j.properties
set KAFKA_LOG4J_OPTS=-Dlog4j2.configurationFile=%BASE_DIR%/config/connect-log4j2.yaml
) ELSE (
echo %KAFKA_LOG4J_OPTS% | findstr /r /c:"log4j\.[^ ]*$" >nul
IF %ERRORLEVEL% == 0 (
echo DEPRECATED: A Log4j 1.x configuration file has been detected, which is no longer recommended.
echo To use a Log4j 2.x configuration, please see https://logging.apache.org/log4j/2.x/migrate-from-log4j1.html#Log4j2ConfigurationFormat for details about Log4j configuration file migration. >&2
echo You can also use the %BASE_DIR%/config/connect-log4j2.yaml file as a starting point. Make sure to remove the Log4j 1.x configuration after completing the migration. >&2
)
)
"%~dp0kafka-run-class.bat" org.apache.kafka.connect.cli.ConnectDistributed %*

View File

@ -27,7 +27,14 @@ popd
rem Log4j settings
IF ["%KAFKA_LOG4J_OPTS%"] EQU [""] (
set KAFKA_LOG4J_OPTS=-Dlog4j.configuration=file:%BASE_DIR%/config/connect-log4j.properties
set KAFKA_LOG4J_OPTS=-Dlog4j2.configurationFile=%BASE_DIR%/config/connect-log4j2.yaml
) ELSE (
echo %KAFKA_LOG4J_OPTS% | findstr /r /c:"log4j\.[^ ]*$" >nul
IF %ERRORLEVEL% == 0 (
echo DEPRECATED: A Log4j 1.x configuration file has been detected, which is no longer recommended.
echo To use a Log4j 2.x configuration, please see https://logging.apache.org/log4j/2.x/migrate-from-log4j1.html#Log4j2ConfigurationFormat for details about Log4j configuration file migration.
echo You can also use the %BASE_DIR%/config/connect-log4j2.yaml file as a starting point. Make sure to remove the Log4j 1.x configuration after completing the migration.
)
)
"%~dp0kafka-run-class.bat" org.apache.kafka.connect.cli.ConnectStandalone %*

View File

@ -21,7 +21,14 @@ IF [%1] EQU [] (
SetLocal
IF ["%KAFKA_LOG4J_OPTS%"] EQU [""] (
set KAFKA_LOG4J_OPTS=-Dlog4j.configuration=file:%~dp0../../config/log4j.properties
set KAFKA_LOG4J_OPTS=-Dlog4j2.configurationFile=%~dp0../../config/log4j2.yaml
) ELSE (
echo %KAFKA_LOG4J_OPTS% | findstr /r /c:"log4j\.[^ ]*$" >nul
IF %ERRORLEVEL% == 0 (
echo DEPRECATED: A Log4j 1.x configuration file has been detected, which is no longer recommended.
echo To use a Log4j 2.x configuration, please see https://logging.apache.org/log4j/2.x/migrate-from-log4j1.html#Log4j2ConfigurationFormat for details about Log4j configuration file migration.
echo You can also use the %~dp0../../config/log4j2.yaml file as a starting point. Make sure to remove the Log4j 1.x configuration after completing the migration.
)
)
IF ["%KAFKA_HEAP_OPTS%"] EQU [""] (
rem detect OS architecture

View File

@ -138,7 +138,7 @@ ext {
}
runtimeTestLibs = [
libs.slf4jReload4j,
libs.slf4jLog4j2,
libs.junitPlatformLanucher,
project(":test-common:test-common-runtime")
]
@ -178,12 +178,14 @@ allprojects {
libs.scalaLibrary,
libs.scalaReflect,
libs.jacksonAnnotations,
libs.jacksonDatabindYaml,
// be explicit about the Netty dependency version instead of relying on the version set by
// ZooKeeper (potentially older and containing CVEs)
libs.nettyHandler,
libs.nettyTransportNativeEpoll,
// be explicit about the reload4j version instead of relying on the transitive versions
libs.reload4j
libs.log4j2Api,
libs.log4j2Core,
libs.log4j1Bridge2Api
)
}
}
@ -963,13 +965,15 @@ project(':server') {
implementation libs.slf4jApi
compileOnly libs.reload4j
compileOnly libs.log4j2Api
compileOnly libs.log4j2Core
compileOnly libs.log4j1Bridge2Api
testImplementation project(':clients').sourceSets.test.output
testImplementation libs.mockitoCore
testImplementation libs.junitJupiter
testImplementation libs.slf4jReload4j
testImplementation libs.slf4jLog4j2
testRuntimeOnly runtimeTestLibs
}
@ -1028,7 +1032,7 @@ project(':share') {
testImplementation libs.junitJupiter
testImplementation libs.mockitoCore
testImplementation libs.slf4jReload4j
testImplementation libs.slf4jLog4j2
testRuntimeOnly runtimeTestLibs
}
@ -1099,15 +1103,17 @@ project(':core') {
implementation libs.dropwizardMetrics
exclude module: 'slf4j-log4j12'
exclude module: 'log4j'
// Both Kafka and Zookeeper use slf4j. ZooKeeper moved from log4j to logback in v3.8.0, but Kafka relies on reload4j.
// Both Kafka and Zookeeper use slf4j. ZooKeeper moved from log4j to logback in v3.8.0.
// We are removing Zookeeper's dependency on logback so we have a singular logging backend.
exclude module: 'logback-classic'
exclude module: 'logback-core'
}
// ZooKeeperMain depends on commons-cli but declares the dependency as `provided`
implementation libs.commonsCli
compileOnly libs.reload4j
implementation libs.log4j2Core
implementation libs.log4j2Api
implementation libs.log4j1Bridge2Api
implementation libs.jacksonDatabindYaml
testImplementation project(':clients').sourceSets.test.output
testImplementation project(':group-coordinator').sourceSets.test.output
@ -1137,7 +1143,7 @@ project(':core') {
testImplementation libs.apachedsMavibotPartition
testImplementation libs.apachedsJdbmPartition
testImplementation libs.junitJupiter
testImplementation libs.slf4jReload4j
testImplementation libs.slf4jLog4j2
testImplementation libs.caffeine
testRuntimeOnly runtimeTestLibs
@ -1169,9 +1175,6 @@ project(':core') {
}
tasks.create(name: "copyDependantLibs", type: Copy) {
from (configurations.compileClasspath) {
include('reload4j*jar')
}
from (configurations.runtimeClasspath) {
exclude('kafka-clients*')
}
@ -1384,11 +1387,14 @@ project(':metadata') {
implementation libs.jacksonDatabind
implementation libs.jacksonJDK8Datatypes
implementation libs.metrics
compileOnly libs.reload4j
compileOnly libs.log4j2Api
compileOnly libs.log4j2Core
compileOnly libs.log4j1Bridge2Api
testImplementation libs.jacksonDatabindYaml
testImplementation libs.junitJupiter
testImplementation libs.jqwik
testImplementation libs.mockitoCore
testImplementation libs.slf4jReload4j
testImplementation libs.slf4jLog4j2
testImplementation project(':clients').sourceSets.test.output
testImplementation project(':raft').sourceSets.test.output
testImplementation project(':server-common').sourceSets.test.output
@ -1513,6 +1519,7 @@ project(':group-coordinator') {
testImplementation project(':clients').sourceSets.test.output
testImplementation project(':server-common').sourceSets.test.output
testImplementation project(':coordinator-common').sourceSets.test.output
testImplementation libs.jacksonDatabindYaml
testImplementation libs.junitJupiter
testImplementation libs.mockitoCore
@ -1575,9 +1582,10 @@ project(':test-common') {
implementation project(':storage')
implementation project(':server-common')
implementation libs.slf4jApi
implementation libs.jacksonDatabindYaml
testImplementation libs.junitJupiter
testImplementation libs.mockitoCore
testRuntimeOnly runtimeTestLibs
}
@ -1883,12 +1891,18 @@ project(':clients') {
testImplementation libs.bcpkix
testImplementation libs.jacksonJakartarsJsonProvider
testImplementation libs.jacksonDatabindYaml
testImplementation libs.jose4j
testImplementation libs.junitJupiter
testImplementation libs.reload4j
testImplementation libs.log4j2Api
testImplementation libs.log4j2Core
testImplementation libs.log4j1Bridge2Api
testImplementation libs.spotbugs
testImplementation libs.mockitoCore
testImplementation libs.mockitoJunitJupiter // supports MockitoExtension
testCompileOnly libs.bndlib
testRuntimeOnly libs.jacksonDatabind
testRuntimeOnly libs.jacksonJDK8Datatypes
testRuntimeOnly runtimeTestLibs
@ -2046,11 +2060,13 @@ project(':raft') {
implementation project(':clients')
implementation libs.slf4jApi
implementation libs.jacksonDatabind
implementation libs.jacksonDatabindYaml
testImplementation project(':server-common')
testImplementation project(':server-common').sourceSets.test.output
testImplementation project(':clients')
testImplementation project(':clients').sourceSets.test.output
testImplementation libs.jacksonDatabindYaml
testImplementation libs.junitJupiter
testImplementation libs.mockitoCore
testImplementation libs.jqwik
@ -2146,6 +2162,7 @@ project(':server-common') {
testImplementation project(':clients')
testImplementation project(':clients').sourceSets.test.output
testImplementation libs.jacksonDatabindYaml
testImplementation libs.junitJupiter
testImplementation libs.mockitoCore
@ -2279,6 +2296,7 @@ project(':storage') {
testImplementation project(':server-common')
testImplementation project(':server-common').sourceSets.test.output
testImplementation libs.hamcrest
testImplementation libs.jacksonDatabindYaml
testImplementation libs.junitJupiter
testImplementation libs.mockitoCore
testImplementation libs.bcpkix
@ -2441,13 +2459,18 @@ project(':tools') {
implementation libs.jacksonDataformatCsv
implementation libs.jacksonJDK8Datatypes
implementation libs.slf4jApi
implementation libs.slf4jReload4j
implementation libs.slf4jLog4j2
implementation libs.log4j2Api
implementation libs.log4j2Core
implementation libs.log4j1Bridge2Api
implementation libs.joptSimple
implementation libs.re2j
implementation libs.jose4j // for SASL/OAUTHBEARER JWT validation
implementation libs.jacksonJakartarsJsonProvider
compileOnly libs.spotbugs
testImplementation project(':clients')
testImplementation project(':clients').sourceSets.test.output
testImplementation project(':server')
@ -2473,7 +2496,6 @@ project(':tools') {
testImplementation(libs.jfreechart) {
exclude group: 'junit', module: 'junit'
}
testImplementation libs.reload4j
testImplementation libs.apachedsCoreApi
testImplementation libs.apachedsInterceptorKerberos
testImplementation libs.apachedsProtocolShared
@ -2513,7 +2535,9 @@ project(':trogdor') {
implementation libs.jacksonDatabind
implementation libs.jacksonJDK8Datatypes
implementation libs.slf4jApi
runtimeOnly libs.reload4j
runtimeOnly libs.log4j2Api
runtimeOnly libs.log4j2Core
runtimeOnly libs.log4j1Bridge2Api
implementation libs.jacksonJakartarsJsonProvider
implementation libs.jerseyContainerServlet
@ -2534,13 +2558,16 @@ project(':trogdor') {
implementation project(':group-coordinator:group-coordinator-api')
testImplementation project(':clients')
testImplementation libs.junitJupiter
testImplementation project(':clients').sourceSets.test.output
testImplementation project(':group-coordinator')
testImplementation libs.junitJupiter
testImplementation libs.mockitoCore
testImplementation project(':group-coordinator')
testRuntimeOnly runtimeTestLibs
testRuntimeOnly libs.log4j2Api
testRuntimeOnly libs.log4j2Core
testRuntimeOnly libs.log4j1Bridge2Api
testRuntimeOnly libs.junitPlatformLanucher
}
javadoc {
@ -2585,6 +2612,7 @@ project(':shell') {
testImplementation project(':core')
testImplementation project(':server-common')
testImplementation project(':server-common').sourceSets.test.output
testImplementation libs.jacksonDatabindYaml
testImplementation libs.junitJupiter
testRuntimeOnly runtimeTestLibs
@ -2629,9 +2657,13 @@ project(':streams') {
// testCompileOnly prevents streams from exporting a dependency on test-utils, which would cause a dependency cycle
testCompileOnly project(':streams:test-utils')
testCompileOnly libs.bndlib
testImplementation project(':clients').sourceSets.test.output
testImplementation libs.reload4j
testImplementation libs.log4j2Api
testImplementation libs.log4j2Core
testImplementation libs.log4j1Bridge2Api
testImplementation libs.jacksonDatabindYaml
testImplementation libs.junitJupiter
testImplementation libs.bcpkix
testImplementation libs.hamcrest
@ -2774,6 +2806,7 @@ project(':streams:streams-scala') {
testImplementation project(':clients').sourceSets.test.output
testImplementation project(':streams:test-utils')
testImplementation libs.jacksonDatabindYaml
testImplementation libs.junitJupiter
testImplementation libs.mockitoJunitJupiter // supports MockitoExtension
testRuntimeOnly runtimeTestLibs
@ -2830,10 +2863,13 @@ project(':streams:integration-tests') {
testImplementation project(':transaction-coordinator')
testImplementation libs.bcpkix
testImplementation libs.hamcrest
testImplementation libs.jacksonDatabindYaml
testImplementation libs.junitJupiter
testImplementation libs.junitPlatformSuiteEngine // supports suite test
testImplementation libs.mockitoCore
testImplementation libs.reload4j
testImplementation libs.log4j2Api
testImplementation libs.log4j2Core
testImplementation libs.log4j1Bridge2Api
testImplementation libs.slf4jApi
testImplementation project(':streams:test-utils')
@ -2873,6 +2909,7 @@ project(':streams:test-utils') {
implementation libs.slf4jApi
testImplementation project(':clients').sourceSets.test.output
testImplementation libs.jacksonDatabindYaml
testImplementation libs.junitJupiter
testImplementation libs.mockitoCore
testImplementation libs.hamcrest
@ -2904,7 +2941,7 @@ project(':streams:examples') {
implementation(project(':connect:json'))
implementation project(':streams')
implementation libs.slf4jReload4j
implementation libs.slf4jLog4j2
testImplementation project(':streams:test-utils')
testImplementation project(':clients').sourceSets.test.output // for org.apache.kafka.test.IntegrationTest
@ -3299,7 +3336,7 @@ project(':jmh-benchmarks') {
implementation libs.jacksonDatabind
implementation libs.metrics
implementation libs.mockitoCore
implementation libs.slf4jReload4j
implementation libs.slf4jLog4j2
implementation libs.scalaLibrary
}
@ -3343,7 +3380,9 @@ project(':connect:api') {
dependencies {
api project(':clients')
implementation libs.slf4jApi
runtimeOnly libs.reload4j
runtimeOnly libs.log4j2Api
runtimeOnly libs.log4j2Core
runtimeOnly libs.log4j1Bridge2Api
implementation libs.jakartaRsApi
testImplementation libs.junitJupiter
@ -3379,7 +3418,9 @@ project(':connect:transforms') {
api project(':connect:api')
implementation libs.slf4jApi
runtimeOnly libs.reload4j
runtimeOnly libs.log4j2Api
runtimeOnly libs.log4j2Core
runtimeOnly libs.log4j1Bridge2Api
testImplementation libs.junitJupiter
@ -3419,7 +3460,9 @@ project(':connect:json') {
api libs.jacksonBlackbird
implementation libs.slf4jApi
runtimeOnly libs.reload4j
runtimeOnly libs.log4j2Api
runtimeOnly libs.log4j2Core
runtimeOnly libs.log4j1Bridge2Api
testImplementation libs.junitJupiter
@ -3464,8 +3507,10 @@ project(':connect:runtime') {
api project(':connect:transforms')
implementation libs.slf4jApi
implementation libs.reload4j
implementation libs.slf4jReload4j
implementation libs.slf4jLog4j2
implementation libs.log4j2Api
implementation libs.log4j2Core
implementation libs.log4j1Bridge2Api
implementation libs.jose4j // for SASL/OAUTHBEARER JWT validation
implementation libs.jacksonAnnotations
implementation libs.jacksonJakartarsJsonProvider
@ -3489,6 +3534,9 @@ project(':connect:runtime') {
implementation libs.mavenArtifact
implementation libs.swaggerAnnotations
compileOnly libs.bndlib
compileOnly libs.spotbugs
// We use this library to generate OpenAPI docs for the REST API, but we don't want or need it at compile
// or run time. So, we add it to a separate configuration, which we use later on during docs generation
swagger libs.swaggerJaxrs2
@ -3507,11 +3555,14 @@ project(':connect:runtime') {
testImplementation project(':server-common').sourceSets.test.output
testImplementation project(':test-common:test-common-api')
testImplementation libs.jacksonDatabindYaml
testImplementation libs.junitJupiter
testImplementation libs.mockitoCore
testImplementation libs.mockitoJunitJupiter
testImplementation libs.httpclient
testCompileOnly libs.bndlib
testRuntimeOnly libs.bcpkix
testRuntimeOnly runtimeTestLibs
}
@ -3606,10 +3657,14 @@ project(':connect:file') {
dependencies {
implementation project(':connect:api')
implementation libs.slf4jApi
runtimeOnly libs.reload4j
runtimeOnly libs.log4j2Api
runtimeOnly libs.log4j2Core
runtimeOnly libs.log4j1Bridge2Api
testImplementation libs.jacksonDatabindYaml
testImplementation libs.junitJupiter
testImplementation libs.mockitoCore
testImplementation project(':clients').sourceSets.test.output
testImplementation project(':connect:runtime')
testImplementation project(':connect:runtime').sourceSets.test.output
@ -3646,7 +3701,9 @@ project(':connect:basic-auth-extension') {
dependencies {
implementation project(':connect:api')
implementation libs.slf4jApi
runtimeOnly libs.reload4j
runtimeOnly libs.log4j2Api
runtimeOnly libs.log4j2Core
runtimeOnly libs.log4j1Bridge2Api
implementation libs.jakartaRsApi
implementation libs.jaxAnnotationApi
@ -3691,7 +3748,9 @@ project(':connect:mirror') {
implementation libs.argparse4j
implementation libs.jacksonAnnotations
implementation libs.slf4jApi
runtimeOnly libs.reload4j
runtimeOnly libs.log4j2Api
runtimeOnly libs.log4j2Core
runtimeOnly libs.log4j1Bridge2Api
implementation libs.jacksonAnnotations
implementation libs.jacksonJakartarsJsonProvider
implementation libs.jerseyContainerServlet
@ -3713,7 +3772,11 @@ project(':connect:mirror') {
implementation libs.swaggerAnnotations
testImplementation libs.junitJupiter
testImplementation libs.reload4j
testImplementation libs.log4j2Api
testImplementation libs.log4j2Core
testImplementation libs.log4j1Bridge2Api
testImplementation libs.bndlib
testImplementation libs.jacksonDatabindYaml
testImplementation libs.mockitoCore
testImplementation project(':clients').sourceSets.test.output
testImplementation project(':connect:runtime').sourceSets.test.output
@ -3781,7 +3844,9 @@ project(':connect:mirror-client') {
dependencies {
implementation project(':clients')
implementation libs.slf4jApi
runtimeOnly libs.reload4j
runtimeOnly libs.log4j2Api
runtimeOnly libs.log4j2Core
runtimeOnly libs.log4j1Bridge2Api
testImplementation libs.junitJupiter
testImplementation project(':clients').sourceSets.test.output

View File

@ -114,7 +114,14 @@
<allow pkg="org.apache.kafka.clients.producer"/>
<allow pkg="org.apache.kafka.coordinator.group"/>
<allow pkg="org.apache.kafka.coordinator.transaction"/>
<allow pkg="org.apache.log4j" />
<subpackage name="annotation">
<allow pkg="kafka.test"/>
</subpackage>
<subpackage name="junit">
<allow pkg="kafka.test"/>
<allow pkg="org.apache.kafka.clients"/>
<allow pkg="org.apache.kafka.metadata" />
</subpackage>
<subpackage name="server">
<allow pkg="kafka.test" />
</subpackage>
@ -136,7 +143,7 @@
<allow pkg="org.apache.kafka.storage.internals.checkpoint"/>
<allow pkg="org.apache.kafka.storage.internals.log"/>
<allow pkg="org.apache.kafka.test"/>
<allow pkg="org.apache.log4j"/>
<allow pkg="org.apache.logging.log4j"/>
<allow pkg="org.apache.kafka.common.test"/>
<allow pkg="org.apache.kafka.common.test.api"/>
<allow pkg="org.apache.kafka.common.test.api"/>
@ -146,6 +153,14 @@
<allow pkg="org.apache.commons" />
<allow pkg="org.apache.directory" />
<allow pkg="org.apache.mina.core.service" />
<allow pkg="org.apache.kafka.clients" />
<allow pkg="org.apache.kafka.clients.admin" />
<allow pkg="org.apache.kafka.test" />
</subpackage>
<subpackage name="utils">
<allow pkg="org.apache.logging.log4j" />
<allow pkg="org.apache.logging.log4j.core.config" />
</subpackage>
<subpackage name="clients">

View File

@ -201,7 +201,7 @@
<subpackage name="utils">
<allow pkg="org.apache.kafka.common" />
<allow pkg="org.apache.log4j" />
<allow pkg="org.apache.logging.log4j" />
</subpackage>
<subpackage name="quotas">
@ -225,7 +225,7 @@
<allow pkg="org.apache.kafka.common" />
<allow pkg="org.apache.kafka.clients" exact-match="true"/>
<allow pkg="org.apache.kafka.test" />
<allow class="org.apache.log4j.Level" />
<allow class="org.apache.logging.log4j.Level" />
<subpackage name="consumer">
<allow pkg="org.apache.kafka.clients.consumer" />
@ -308,6 +308,10 @@
<allow pkg="org.jose4j" />
<allow pkg="net.sourceforge.argparse4j" />
<allow pkg="org.apache.log4j" />
<allow pkg="org.apache.logging.log4j" />
<allow pkg="org.apache.logging.log4j.core.config" />
<allow pkg="org.apache.logging.log4j.core.config.properties" />
<allow pkg="org.apache.logging.log4j.core.spi" />
<allow pkg="org.apache.kafka.common.test" />
<allow pkg="joptsimple" />
<allow pkg="javax.rmi.ssl"/>
@ -391,7 +395,7 @@
<allow pkg="org.apache.kafka.clients.producer" exact-match="true"/>
<allow pkg="org.apache.kafka.clients.consumer" exact-match="true"/>
<allow pkg="org.apache.kafka.server.util"/>
<allow pkg="org.apache.log4j"/>
<allow pkg="org.apache.logging.log4j"/>
<allow pkg="org.apache.kafka.streams"/>
@ -553,6 +557,7 @@
<allow pkg="org.apache.kafka.connect.integration" />
<allow pkg="org.apache.kafka.connect.mirror" />
<allow pkg="org.apache.kafka.server.config" />
<allow pkg="org.apache.logging.log4j" />
<allow pkg="kafka.server" />
<subpackage name="rest">
<allow pkg="jakarta.ws.rs" />
@ -568,7 +573,7 @@
<allow pkg="javax.crypto"/>
<allow pkg="org.apache.maven.artifact.versioning" />
<allow pkg="org.eclipse.jetty.util" />
<allow pkg="org.apache.log4j" />
<allow pkg="org.apache.logging.log4j" />
<subpackage name="rest">
<allow pkg="org.eclipse.jetty" />
@ -580,6 +585,7 @@
<allow pkg="com.fasterxml.jackson" />
<allow pkg="org.apache.http"/>
<allow pkg="io.swagger.v3.oas.annotations"/>
<allow pkg="org.apache.logging.log4j" />
</subpackage>
<subpackage name="isolation">

View File

@ -104,7 +104,7 @@ import org.apache.kafka.test.MockConsumerInterceptor;
import org.apache.kafka.test.MockMetricsReporter;
import org.apache.kafka.test.TestUtils;
import org.apache.log4j.Level;
import org.apache.logging.log4j.Level;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Timeout;
import org.junit.jupiter.params.ParameterizedTest;

View File

@ -32,7 +32,7 @@ import org.apache.kafka.common.utils.LogCaptureAppender;
import org.apache.kafka.common.utils.LogContext;
import org.apache.kafka.common.utils.MockTime;
import org.apache.log4j.Level;
import org.apache.logging.log4j.Level;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;

View File

@ -94,7 +94,7 @@ import org.apache.kafka.test.MockProducerInterceptor;
import org.apache.kafka.test.MockSerializer;
import org.apache.kafka.test.TestUtils;
import org.apache.log4j.Level;
import org.apache.logging.log4j.Level;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestInfo;

View File

@ -16,31 +16,37 @@
*/
package org.apache.kafka.common.utils;
import org.apache.log4j.AppenderSkeleton;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.apache.log4j.spi.LoggingEvent;
import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.core.LogEvent;
import org.apache.logging.log4j.core.LoggerContext;
import org.apache.logging.log4j.core.appender.AbstractAppender;
import org.apache.logging.log4j.core.config.Configuration;
import org.apache.logging.log4j.core.config.LoggerConfig;
import org.apache.logging.log4j.core.config.Property;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
public class LogCaptureAppender extends AppenderSkeleton implements AutoCloseable {
private final List<LoggingEvent> events = new LinkedList<>();
public class LogCaptureAppender extends AbstractAppender implements AutoCloseable {
private final List<LogEvent> events = new LinkedList<>();
private final List<LogLevelChange> logLevelChanges = new LinkedList<>();
private final List<org.apache.logging.log4j.core.Logger> loggers = new ArrayList<>();
public static class LogLevelChange {
private final Level originalLevel;
private final Class<?> clazz;
public LogLevelChange(final Level originalLevel, final Class<?> clazz) {
this.originalLevel = originalLevel;
this.clazz = clazz;
}
private final Level originalLevel;
private final Class<?> clazz;
}
@SuppressWarnings("OptionalUsedAsFieldOrParameterType")
@ -74,31 +80,53 @@ public class LogCaptureAppender extends AppenderSkeleton implements AutoCloseabl
}
}
public LogCaptureAppender(String name) {
super(name, null, null, true, Property.EMPTY_ARRAY);
}
public static LogCaptureAppender createAndRegister() {
final LogCaptureAppender logCaptureAppender = new LogCaptureAppender();
Logger.getRootLogger().addAppender(logCaptureAppender);
final LogCaptureAppender logCaptureAppender = new LogCaptureAppender("LogCaptureAppender");
Logger logger = LogManager.getRootLogger();
logCaptureAppender.addToLogger(logger);
return logCaptureAppender;
}
public static LogCaptureAppender createAndRegister(final Class<?> clazz) {
final LogCaptureAppender logCaptureAppender = new LogCaptureAppender();
Logger.getLogger(clazz).addAppender(logCaptureAppender);
final LogCaptureAppender logCaptureAppender = new LogCaptureAppender("LogCaptureAppender");
Logger logger = LogManager.getLogger(clazz);
logCaptureAppender.addToLogger(logger);
return logCaptureAppender;
}
public void setClassLogger(final Class<?> clazz, Level level) {
logLevelChanges.add(new LogLevelChange(Logger.getLogger(clazz).getLevel(), clazz));
Logger.getLogger(clazz).setLevel(level);
public void addToLogger(Logger logger) {
org.apache.logging.log4j.core.Logger coreLogger = (org.apache.logging.log4j.core.Logger) logger;
this.start();
coreLogger.addAppender(this);
loggers.add(coreLogger);
}
public static void unregister(final LogCaptureAppender logCaptureAppender) {
Logger.getRootLogger().removeAppender(logCaptureAppender);
public void setClassLogger(final Class<?> clazz, Level level) {
LoggerContext ctx = (LoggerContext) LogManager.getContext(false);
Configuration config = ctx.getConfiguration();
String loggerName = clazz.getName();
LoggerConfig loggerConfig = config.getLoggerConfig(loggerName);
Level originalLevel = loggerConfig.getLevel();
logLevelChanges.add(new LogLevelChange(originalLevel, clazz));
if (!loggerConfig.getName().equals(loggerName)) {
LoggerConfig newLoggerConfig = new LoggerConfig(loggerName, level, true);
config.addLogger(loggerName, newLoggerConfig);
} else {
loggerConfig.setLevel(level);
}
ctx.updateLoggers();
}
@Override
protected void append(final LoggingEvent event) {
public void append(final LogEvent event) {
synchronized (events) {
events.add(event);
events.add(event.toImmutable());
}
}
@ -112,8 +140,8 @@ public class LogCaptureAppender extends AppenderSkeleton implements AutoCloseabl
public List<String> getMessages() {
final LinkedList<String> result = new LinkedList<>();
synchronized (events) {
for (final LoggingEvent event : events) {
result.add(event.getRenderedMessage());
for (final LogEvent event : events) {
result.add(event.getMessage().getFormattedMessage());
}
}
return result;
@ -122,25 +150,26 @@ public class LogCaptureAppender extends AppenderSkeleton implements AutoCloseabl
public List<Event> getEvents() {
final LinkedList<Event> result = new LinkedList<>();
synchronized (events) {
for (final LoggingEvent event : events) {
final String[] throwableStrRep = event.getThrowableStrRep();
for (final LogEvent event : events) {
final Throwable throwable = event.getThrown();
final Optional<String> throwableString;
final Optional<String> throwableClassName;
if (throwableStrRep == null) {
if (throwable == null) {
throwableString = Optional.empty();
throwableClassName = Optional.empty();
} else {
final StringBuilder throwableStringBuilder = new StringBuilder();
for (final String s : throwableStrRep) {
throwableStringBuilder.append(s);
}
throwableString = Optional.of(throwableStringBuilder.toString());
throwableClassName = Optional.of(event.getThrowableInformation().getThrowable().getClass().getName());
StringWriter stringWriter = new StringWriter();
PrintWriter printWriter = new PrintWriter(stringWriter);
throwable.printStackTrace(printWriter);
throwableString = Optional.of(stringWriter.toString());
throwableClassName = Optional.of(throwable.getClass().getName());
}
result.add(new Event(event.getLevel().toString(), event.getRenderedMessage(), throwableString, throwableClassName));
result.add(new Event(
event.getLevel().toString(),
event.getMessage().getFormattedMessage(),
throwableString,
throwableClassName));
}
}
return result;
@ -148,15 +177,30 @@ public class LogCaptureAppender extends AppenderSkeleton implements AutoCloseabl
@Override
public void close() {
LoggerContext ctx = (LoggerContext) LogManager.getContext(false);
Configuration config = ctx.getConfiguration();
for (final LogLevelChange logLevelChange : logLevelChanges) {
Logger.getLogger(logLevelChange.clazz).setLevel(logLevelChange.originalLevel);
String loggerName = logLevelChange.clazz.getName();
LoggerConfig loggerConfig = config.getLoggerConfig(loggerName);
if (!loggerConfig.getName().equals(loggerName)) {
LoggerConfig newLoggerConfig = new LoggerConfig(loggerName, logLevelChange.originalLevel, true);
config.addLogger(loggerName, newLoggerConfig);
} else {
loggerConfig.setLevel(logLevelChange.originalLevel);
}
}
logLevelChanges.clear();
unregister(this);
ctx.updateLoggers();
unregister();
}
@Override
public boolean requiresLayout() {
return false;
public void unregister() {
for (org.apache.logging.log4j.core.Logger logger : loggers) {
logger.removeAppender(this);
}
loggers.clear();
this.stop();
}
}

View File

@ -0,0 +1,38 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
Configuration:
Properties:
Property:
- name: "logPattern"
value: "[%d] %p %m (%c:%L)%n"
Appenders:
Console:
name: STDOUT
PatternLayout:
pattern: "${logPattern}"
Loggers:
Root:
level: OFF
AppenderRef:
- ref: STDOUT
Logger:
- name: org.apache.kafka
level: ERROR
# We are testing for a particular INFO log message in CommonNameLoggingTrustManagerFactoryWrapper
- name: org.apache.kafka.common.security.ssl.CommonNameLoggingTrustManagerFactoryWrapper
level: INFO

View File

@ -1,39 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
log4j.rootLogger=INFO, stdout, connectAppender
# Send the logs to the console.
#
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
# Send the logs to a file, rolling the file at midnight local time. For example, the `File` option specifies the
# location of the log files (e.g. ${kafka.logs.dir}/connect.log), and at midnight local time the file is closed
# and copied in the same directory but with a filename that ends in the `DatePattern` option.
#
log4j.appender.connectAppender=org.apache.log4j.DailyRollingFileAppender
log4j.appender.connectAppender.DatePattern='.'yyyy-MM-dd-HH
log4j.appender.connectAppender.File=${kafka.logs.dir}/connect.log
log4j.appender.connectAppender.layout=org.apache.log4j.PatternLayout
# The `%X{connector.context}` parameter in the layout includes connector-specific and task-specific information
# in the log messages, where appropriate. This makes it easier to identify those log messages that apply to a
# specific connector.
#
connect.log.pattern=[%d] %p %X{connector.context}%m (%c:%L)%n
log4j.appender.stdout.layout.ConversionPattern=${connect.log.pattern}
log4j.appender.connectAppender.layout.ConversionPattern=${connect.log.pattern}

View File

@ -0,0 +1,44 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
Configuration:
Properties:
Property:
- name: "kafka.logs.dir"
value: "."
- name: "logPattern"
value: "[%d] %p %X{connector.context}%m (%c:%L)%n"
Appenders:
Console:
name: STDOUT
PatternLayout:
pattern: "${logPattern}"
RollingFile:
- name: ConnectAppender
fileName: "${sys:kafka.logs.dir}/connect.log"
filePattern: "${sys:kafka.logs.dir}/connect-%d{yyyy-MM-dd-HH}.log"
PatternLayout:
pattern: "${logPattern}"
TimeBasedTriggeringPolicy:
modulate: true
interval: 1
Loggers:
Root:
level: INFO
AppenderRef:
- ref: STDOUT
- ref: ConnectAppender

View File

@ -1,93 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Unspecified loggers and loggers with additivity=true output to server.log and stdout
# Note that INFO only applies to unspecified loggers, the log level of the child logger is used otherwise
log4j.rootLogger=INFO, stdout, kafkaAppender
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c)%n
log4j.appender.kafkaAppender=org.apache.log4j.DailyRollingFileAppender
log4j.appender.kafkaAppender.DatePattern='.'yyyy-MM-dd-HH
log4j.appender.kafkaAppender.File=${kafka.logs.dir}/server.log
log4j.appender.kafkaAppender.layout=org.apache.log4j.PatternLayout
log4j.appender.kafkaAppender.layout.ConversionPattern=[%d] %p %m (%c)%n
log4j.appender.stateChangeAppender=org.apache.log4j.DailyRollingFileAppender
log4j.appender.stateChangeAppender.DatePattern='.'yyyy-MM-dd-HH
log4j.appender.stateChangeAppender.File=${kafka.logs.dir}/state-change.log
log4j.appender.stateChangeAppender.layout=org.apache.log4j.PatternLayout
log4j.appender.stateChangeAppender.layout.ConversionPattern=[%d] %p %m (%c)%n
log4j.appender.requestAppender=org.apache.log4j.DailyRollingFileAppender
log4j.appender.requestAppender.DatePattern='.'yyyy-MM-dd-HH
log4j.appender.requestAppender.File=${kafka.logs.dir}/kafka-request.log
log4j.appender.requestAppender.layout=org.apache.log4j.PatternLayout
log4j.appender.requestAppender.layout.ConversionPattern=[%d] %p %m (%c)%n
log4j.appender.cleanerAppender=org.apache.log4j.DailyRollingFileAppender
log4j.appender.cleanerAppender.DatePattern='.'yyyy-MM-dd-HH
log4j.appender.cleanerAppender.File=${kafka.logs.dir}/log-cleaner.log
log4j.appender.cleanerAppender.layout=org.apache.log4j.PatternLayout
log4j.appender.cleanerAppender.layout.ConversionPattern=[%d] %p %m (%c)%n
log4j.appender.controllerAppender=org.apache.log4j.DailyRollingFileAppender
log4j.appender.controllerAppender.DatePattern='.'yyyy-MM-dd-HH
log4j.appender.controllerAppender.File=${kafka.logs.dir}/controller.log
log4j.appender.controllerAppender.layout=org.apache.log4j.PatternLayout
log4j.appender.controllerAppender.layout.ConversionPattern=[%d] %p %m (%c)%n
log4j.appender.authorizerAppender=org.apache.log4j.DailyRollingFileAppender
log4j.appender.authorizerAppender.DatePattern='.'yyyy-MM-dd-HH
log4j.appender.authorizerAppender.File=${kafka.logs.dir}/kafka-authorizer.log
log4j.appender.authorizerAppender.layout=org.apache.log4j.PatternLayout
log4j.appender.authorizerAppender.layout.ConversionPattern=[%d] %p %m (%c)%n
# Change the two lines below to adjust the general broker logging level (output to server.log and stdout)
log4j.logger.kafka=INFO
log4j.logger.org.apache.kafka=INFO
# Change to DEBUG or TRACE to enable request logging
log4j.logger.kafka.request.logger=WARN, requestAppender
log4j.additivity.kafka.request.logger=false
# Uncomment the lines below and change log4j.logger.kafka.network.RequestChannel$ to TRACE for additional output
# related to the handling of requests
#log4j.logger.kafka.network.Processor=TRACE, requestAppender
#log4j.logger.kafka.server.KafkaApis=TRACE, requestAppender
#log4j.additivity.kafka.server.KafkaApis=false
log4j.logger.kafka.network.RequestChannel$=WARN, requestAppender
log4j.additivity.kafka.network.RequestChannel$=false
# Change the line below to adjust KRaft mode controller logging
log4j.logger.org.apache.kafka.controller=INFO, controllerAppender
log4j.additivity.org.apache.kafka.controller=false
# Change the line below to adjust ZK mode controller logging
log4j.logger.kafka.controller=TRACE, controllerAppender
log4j.additivity.kafka.controller=false
log4j.logger.kafka.log.LogCleaner=INFO, cleanerAppender
log4j.additivity.kafka.log.LogCleaner=false
log4j.logger.state.change.logger=INFO, stateChangeAppender
log4j.additivity.state.change.logger=false
# Access denials are logged at INFO level, change to DEBUG to also log allowed accesses
log4j.logger.kafka.authorizer.logger=INFO, authorizerAppender
log4j.additivity.kafka.authorizer.logger=false

158
config/log4j2.yaml Normal file
View File

@ -0,0 +1,158 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Unspecified loggers and loggers with additivity=true output to server.log and stdout
# Note that INFO only applies to unspecified loggers, the log level of the child logger is used otherwise
Configuration:
Properties:
Property:
# Fallback if the system property is not set
- name: "kafka.logs.dir"
value: "."
- name: "logPattern"
value: "[%d] %p %m (%c)%n"
# Appenders configuration
# See: https://logging.apache.org/log4j/2.x/manual/appenders.html
Appenders:
Console:
name: STDOUT
PatternLayout:
pattern: "${logPattern}"
RollingFile:
- name: KafkaAppender
fileName: "${sys:kafka.logs.dir}/server.log"
filePattern: "${sys:kafka.logs.dir}/server.log.%d{yyyy-MM-dd-HH}"
PatternLayout:
pattern: "${logPattern}"
TimeBasedTriggeringPolicy:
modulate: true
interval: 1
# State Change appender
- name: StateChangeAppender
fileName: "${sys:kafka.logs.dir}/state-change.log"
filePattern: "${sys:kafka.logs.dir}/stage-change.log.%d{yyyy-MM-dd-HH}"
PatternLayout:
pattern: "${logPattern}"
TimeBasedTriggeringPolicy:
modulate: true
interval: 1
# Request appender
- name: RequestAppender
fileName: "${sys:kafka.logs.dir}/kafka-request.log"
filePattern: "${sys:kafka.logs.dir}/kafka-request.log.%d{yyyy-MM-dd-HH}"
PatternLayout:
pattern: "${logPattern}"
TimeBasedTriggeringPolicy:
modulate: true
interval: 1
# Cleaner appender
- name: CleanerAppender
fileName: "${sys:kafka.logs.dir}/log-cleaner.log"
filePattern: "${sys:kafka.logs.dir}/log-cleaner.log.%d{yyyy-MM-dd-HH}"
PatternLayout:
pattern: "${logPattern}"
TimeBasedTriggeringPolicy:
modulate: true
interval: 1
# Controller appender
- name: ControllerAppender
fileName: "${sys:kafka.logs.dir}/controller.log"
filePattern: "${sys:kafka.logs.dir}/controller.log.%d{yyyy-MM-dd-HH}"
PatternLayout:
pattern: "${logPattern}"
TimeBasedTriggeringPolicy:
modulate: true
interval: 1
# Authorizer appender
- name: AuthorizerAppender
fileName: "${sys:kafka.logs.dir}/kafka-authorizer.log"
filePattern: "${sys:kafka.logs.dir}/kafka-authorizer.log.%d{yyyy-MM-dd-HH}"
PatternLayout:
pattern: "${logPattern}"
TimeBasedTriggeringPolicy:
modulate: true
interval: 1
# Loggers configuration
# See: https://logging.apache.org/log4j/2.x/manual/configuration.html#configuring-loggers
Loggers:
Root:
level: INFO
AppenderRef:
- ref: STDOUT
- ref: KafkaAppender
Logger:
# Kafka logger
- name: kafka
level: INFO
# Kafka org.apache logger
- name: org.apache.kafka
level: INFO
# Kafka request logger
- name: kafka.request.logger
level: WARN
additivity: false
AppenderRef:
ref: RequestAppender
# Uncomment the lines below and change log4j.logger.kafka.network.RequestChannel$ to TRACE
# for additional output related to the handling of requests
# - name: kafka.network.Processor
# level: TRACE
# additivity: false
# AppenderRef:
# ref: RequestAppender
# - name: kafka.server.KafkaApis
# level: TRACE
# additivity: false
# AppenderRef:
# ref: RequestAppender
# Kafka network RequestChannel$ logger
- name: kafka.network.RequestChannel$
level: WARN
additivity: false
AppenderRef:
ref: RequestAppender
# KRaft mode controller logger
- name: org.apache.kafka.controller
level: INFO
additivity: false
AppenderRef:
ref: ControllerAppender
# ZK mode controller logger
- name: kafka.controller
level: TRACE
additivity: false
AppenderRef:
ref: ControllerAppender
# LogCleaner logger
- name: kafka.log.LogCleaner
level: INFO
additivity: false
AppenderRef:
ref: CleanerAppender
# State change logger
- name: state.change.logger
level: INFO
additivity: false
AppenderRef:
ref: StateChangeAppender
# Authorizer logger
- name: kafka.authorizer.logger
level: INFO
additivity: false
AppenderRef:
ref: AuthorizerAppender

View File

@ -1,28 +0,0 @@
##
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
log4j.rootLogger=INFO, stdout
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
#
# The `%X{connector.context}` parameter in the layout includes connector-specific and task-specific information
# in the log message, where appropriate. This makes it easier to identify those log messages that apply to a
# specific connector. Simply add this parameter to the log layout configuration below to include the contextual information.
#
log4j.appender.stdout.layout.ConversionPattern=[%d] %p %X{connector.context}%m (%c:%L)%n
log4j.logger.kafka=WARN

View File

@ -0,0 +1,35 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
Configuration:
Properties:
Property:
- name: "logPattern"
value: "[%d] %p %X{connector.context}%m (%c:%L)%n"
Appenders:
Console:
name: STDOUT
PatternLayout:
pattern: "${logPattern}"
Loggers:
Root:
level: INFO
AppenderRef:
- ref: STDOUT
Logger:
- name: kafka
level: WARN

View File

@ -40,7 +40,7 @@ import org.apache.kafka.connect.connector.ConnectorContext;
import org.apache.kafka.connect.errors.ConnectException;
import org.apache.kafka.connect.source.ExactlyOnceSupport;
import org.apache.log4j.Level;
import org.apache.logging.log4j.Level;
import org.junit.jupiter.api.Test;
import java.util.ArrayList;

View File

@ -1,33 +0,0 @@
##
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
log4j.rootLogger=INFO, stdout
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
#
# The `%X{connector.context}` parameter in the layout includes connector-specific and task-specific information
# in the log message, where appropriate. This makes it easier to identify those log messages that apply to a
# specific connector. Simply add this parameter to the log layout configuration below to include the contextual information.
#
log4j.appender.stdout.layout.ConversionPattern=[%d] %p %X{connector.context}%m (%c:%L)%n
#
# The following line includes no MDC context parameters:
#log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c:%L)%n (%t)
log4j.logger.kafka=WARN
log4j.logger.state.change.logger=OFF
log4j.logger.org.apache.kafka.connect=DEBUG

View File

@ -0,0 +1,41 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
Configuration:
Properties:
Property:
- name: "logPattern"
value: "[%d] %p %X{connector.context}%m (%c:%L)%n"
Appenders:
Console:
name: STDOUT
PatternLayout:
pattern: "${logPattern}"
Loggers:
Root:
level: INFO
AppenderRef:
- ref: STDOUT
Logger:
- name: kafka
level: WARN
- name: state.change.logger
level: "OFF"
- name: org.apache.kafka.connect
level: DEBUG

View File

@ -65,7 +65,7 @@ import org.apache.kafka.connect.util.ConnectorTaskId;
import org.apache.kafka.connect.util.Stage;
import org.apache.kafka.connect.util.TemporaryStage;
import org.apache.log4j.Level;
import org.apache.logging.log4j.Level;
import org.apache.maven.artifact.versioning.InvalidVersionSpecificationException;
import org.apache.maven.artifact.versioning.VersionRange;
import org.slf4j.Logger;

View File

@ -19,19 +19,23 @@ package org.apache.kafka.connect.runtime;
import org.apache.kafka.common.utils.Time;
import org.apache.kafka.connect.runtime.rest.entities.LoggerLevel;
import org.apache.log4j.Level;
import org.apache.log4j.LogManager;
import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.core.LoggerContext;
import org.apache.logging.log4j.core.config.Configurator;
import org.apache.logging.log4j.core.config.LoggerConfig;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.TreeMap;
import java.util.stream.Collectors;
/**
* Manages logging levels on a single worker. Supports dynamic adjustment and querying
@ -46,6 +50,10 @@ public class Loggers {
/**
* Log4j uses "root" (case-insensitive) as name of the root logger.
* Note: In log4j, the root logger's name was "root" and Kafka also followed that name for dynamic logging control feature.
*
* The root logger's name is changed in log4j2 to empty string (see: [[LogManager.ROOT_LOGGER_NAME]]) but for backward-
* compatibility. Kafka keeps its original root logger name. It is why here is a dedicated definition for the root logger name.
*/
private static final String ROOT_LOGGER_NAME = "root";
@ -66,18 +74,17 @@ public class Loggers {
public synchronized LoggerLevel level(String logger) {
Objects.requireNonNull(logger, "Logger may not be null");
org.apache.log4j.Logger foundLogger = null;
org.apache.logging.log4j.Logger foundLogger = null;
if (ROOT_LOGGER_NAME.equalsIgnoreCase(logger)) {
foundLogger = rootLogger();
} else {
Enumeration<org.apache.log4j.Logger> en = currentLoggers();
List<org.apache.logging.log4j.Logger> currentLoggers = currentLoggers();
// search within existing loggers for the given name.
// using LogManger.getLogger() will create a logger if it doesn't exist
// (potential leak since these don't get cleaned up).
while (en.hasMoreElements()) {
org.apache.log4j.Logger l = en.nextElement();
if (logger.equals(l.getName())) {
foundLogger = l;
for (org.apache.logging.log4j.Logger currentLogger : currentLoggers) {
if (logger.equals(currentLogger.getName())) {
foundLogger = currentLogger;
break;
}
}
@ -98,14 +105,12 @@ public class Loggers {
public synchronized Map<String, LoggerLevel> allLevels() {
Map<String, LoggerLevel> result = new TreeMap<>();
Enumeration<org.apache.log4j.Logger> enumeration = currentLoggers();
Collections.list(enumeration)
.stream()
.filter(logger -> logger.getLevel() != null)
currentLoggers().stream()
.filter(logger -> !logger.getLevel().equals(Level.OFF))
.forEach(logger -> result.put(logger.getName(), loggerLevel(logger)));
org.apache.log4j.Logger root = rootLogger();
if (root.getLevel() != null) {
org.apache.logging.log4j.Logger root = rootLogger();
if (!root.getLevel().equals(Level.OFF)) {
result.put(ROOT_LOGGER_NAME, loggerLevel(root));
}
@ -124,10 +129,10 @@ public class Loggers {
Objects.requireNonNull(level, "Level may not be null");
log.info("Setting level of namespace {} and children to {}", namespace, level);
List<org.apache.log4j.Logger> childLoggers = loggers(namespace);
List<org.apache.logging.log4j.Logger> childLoggers = loggers(namespace);
List<String> result = new ArrayList<>();
for (org.apache.log4j.Logger logger: childLoggers) {
for (org.apache.logging.log4j.Logger logger: childLoggers) {
setLevel(logger, level);
result.add(logger.getName());
}
@ -143,25 +148,24 @@ public class Loggers {
* @return all loggers that fall under the given namespace; never null, and will always contain
* at least one logger (the ancestor logger for the namespace)
*/
private synchronized List<org.apache.log4j.Logger> loggers(String namespace) {
private synchronized List<org.apache.logging.log4j.Logger> loggers(String namespace) {
Objects.requireNonNull(namespace, "Logging namespace may not be null");
if (ROOT_LOGGER_NAME.equalsIgnoreCase(namespace)) {
List<org.apache.log4j.Logger> result = Collections.list(currentLoggers());
List<org.apache.logging.log4j.Logger> result = currentLoggers();
result.add(rootLogger());
return result;
}
List<org.apache.log4j.Logger> result = new ArrayList<>();
org.apache.log4j.Logger ancestorLogger = lookupLogger(namespace);
Enumeration<org.apache.log4j.Logger> en = currentLoggers();
List<org.apache.logging.log4j.Logger> result = new ArrayList<>();
org.apache.logging.log4j.Logger ancestorLogger = lookupLogger(namespace);
List<org.apache.logging.log4j.Logger> currentLoggers = currentLoggers();
boolean present = false;
while (en.hasMoreElements()) {
org.apache.log4j.Logger current = en.nextElement();
if (current.getName().startsWith(namespace)) {
result.add(current);
for (org.apache.logging.log4j.Logger currentLogger : currentLoggers) {
if (currentLogger.getName().startsWith(namespace)) {
result.add(currentLogger);
}
if (namespace.equals(current.getName())) {
if (namespace.equals(currentLogger.getName())) {
present = true;
}
}
@ -174,43 +178,46 @@ public class Loggers {
}
// visible for testing
org.apache.log4j.Logger lookupLogger(String logger) {
org.apache.logging.log4j.Logger lookupLogger(String logger) {
return LogManager.getLogger(logger);
}
@SuppressWarnings("unchecked")
// visible for testing
Enumeration<org.apache.log4j.Logger> currentLoggers() {
return LogManager.getCurrentLoggers();
List<org.apache.logging.log4j.Logger> currentLoggers() {
LoggerContext context = (LoggerContext) LogManager.getContext(false);
Collection<LoggerConfig> loggerConfigs = context.getConfiguration().getLoggers().values();
return loggerConfigs.stream()
.map(LoggerConfig::getName)
.distinct()
.map(LogManager::getLogger)
.collect(Collectors.toCollection(ArrayList::new));
}
// visible for testing
org.apache.log4j.Logger rootLogger() {
org.apache.logging.log4j.Logger rootLogger() {
return LogManager.getRootLogger();
}
private void setLevel(org.apache.log4j.Logger logger, Level level) {
Level currentLevel = logger.getLevel();
if (currentLevel == null)
currentLevel = logger.getEffectiveLevel();
private void setLevel(org.apache.logging.log4j.Logger logger, Level level) {
String loggerName = logger.getName();
LoggerContext context = (LoggerContext) LogManager.getContext(false);
LoggerConfig loggerConfig = context.getConfiguration().getLoggerConfig(loggerName);
Level currentLevel = loggerConfig.getLevel();
if (level.equals(currentLevel)) {
log.debug("Skipping update for logger {} since its level is already {}", logger.getName(), level);
log.debug("Skipping update for logger {} since its level is already {}", loggerName, level);
return;
}
log.debug("Setting level of logger {} (excluding children) to {}", logger.getName(), level);
logger.setLevel(level);
lastModifiedTimes.put(logger.getName(), time.milliseconds());
log.debug("Setting level of logger {} (excluding children) to {}", loggerName, level);
Configurator.setLevel(loggerName, level);
lastModifiedTimes.put(loggerName, time.milliseconds());
}
private LoggerLevel loggerLevel(org.apache.log4j.Logger logger) {
Level level = logger.getLevel();
if (level == null)
level = logger.getEffectiveLevel();
private LoggerLevel loggerLevel(org.apache.logging.log4j.Logger logger) {
LoggerContext context = (LoggerContext) LogManager.getContext(false);
LoggerConfig loggerConfig = context.getConfiguration().getLoggerConfig(logger.getName());
Level level = loggerConfig.getLevel();
Long lastModified = lastModifiedTimes.get(logger.getName());
return new LoggerLevel(Objects.toString(level), lastModified);
}
}

View File

@ -21,7 +21,7 @@ import org.apache.kafka.connect.runtime.Herder;
import org.apache.kafka.connect.runtime.rest.entities.LoggerLevel;
import org.apache.kafka.connect.runtime.rest.errors.BadRequestException;
import org.apache.log4j.Level;
import org.apache.logging.log4j.Level;
import org.slf4j.LoggerFactory;
import java.util.List;

View File

@ -20,9 +20,13 @@ import org.apache.kafka.common.utils.MockTime;
import org.apache.kafka.common.utils.Time;
import org.apache.kafka.connect.runtime.rest.entities.LoggerLevel;
import org.apache.log4j.Hierarchy;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.core.LoggerContext;
import org.apache.logging.log4j.core.config.Configuration;
import org.apache.logging.log4j.core.config.Configurator;
import org.apache.logging.log4j.core.config.LoggerConfig;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
@ -30,13 +34,12 @@ import org.mockito.junit.jupiter.MockitoExtension;
import org.mockito.junit.jupiter.MockitoSettings;
import org.mockito.quality.Strictness;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Vector;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream;
@ -59,12 +62,15 @@ public class LoggersTest {
@Test
public void testGetLoggersIgnoresNullLevels() {
Logger root = logger("root");
LoggerContext loggerContext = (LoggerContext) LogManager.getContext(false);
Logger root = loggerContext.getRootLogger();
Configurator.setLevel(root, Level.OFF);
Logger a = logger("a");
a.setLevel(null);
Logger b = logger("b");
b.setLevel(Level.INFO);
Logger a = loggerContext.getLogger("a");
Configurator.setLevel(a, null);
Logger b = loggerContext.getLogger("b");
Configurator.setLevel(b, Level.INFO);
Loggers loggers = new TestLoggers(root, a, b);
@ -78,14 +84,15 @@ public class LoggersTest {
@Test
public void testGetLoggerFallsBackToEffectiveLogLevel() {
Logger root = logger("root");
root.setLevel(Level.ERROR);
LoggerContext loggerContext = (LoggerContext) LogManager.getContext(false);
Logger root = loggerContext.getRootLogger();
Configurator.setLevel(root, Level.ERROR);
Hierarchy hierarchy = new Hierarchy(root);
Logger a = hierarchy.getLogger("a");
a.setLevel(null);
Logger b = hierarchy.getLogger("b");
b.setLevel(Level.INFO);
Logger a = loggerContext.getLogger("a");
Configurator.setLevel(a, null);
Logger b = loggerContext.getLogger("b");
Configurator.setLevel(b, Level.INFO);
Loggers loggers = new TestLoggers(root, a, b);
@ -96,14 +103,15 @@ public class LoggersTest {
@Test
public void testGetUnknownLogger() {
Logger root = logger("root");
root.setLevel(Level.ERROR);
LoggerContext loggerContext = (LoggerContext) LogManager.getContext(false);
Logger root = loggerContext.getRootLogger();
Configurator.setLevel(root, Level.ERROR);
Hierarchy hierarchy = new Hierarchy(root);
Logger a = hierarchy.getLogger("a");
a.setLevel(null);
Logger b = hierarchy.getLogger("b");
b.setLevel(Level.INFO);
Logger a = loggerContext.getLogger("a");
Configurator.setLevel(a, null);
Logger b = loggerContext.getLogger("b");
Configurator.setLevel(b, Level.INFO);
Loggers loggers = new TestLoggers(root, a, b);
@ -113,17 +121,18 @@ public class LoggersTest {
@Test
public void testSetLevel() {
Logger root = logger("root");
root.setLevel(Level.ERROR);
LoggerContext loggerContext = (LoggerContext) LogManager.getContext(false);
Logger root = loggerContext.getRootLogger();
Configurator.setLevel(root, Level.ERROR);
Logger x = logger("a.b.c.p.X");
Logger y = logger("a.b.c.p.Y");
Logger z = logger("a.b.c.p.Z");
Logger w = logger("a.b.c.s.W");
x.setLevel(Level.INFO);
y.setLevel(Level.INFO);
z.setLevel(Level.INFO);
w.setLevel(Level.INFO);
Logger x = loggerContext.getLogger("a.b.c.p.X");
Logger y = loggerContext.getLogger("a.b.c.p.Y");
Logger z = loggerContext.getLogger("a.b.c.p.Z");
Logger w = loggerContext.getLogger("a.b.c.s.W");
Configurator.setLevel(x, Level.INFO);
Configurator.setLevel(y, Level.INFO);
Configurator.setLevel(z, Level.INFO);
Configurator.setLevel(w, Level.INFO);
// We don't explicitly register a logger for a.b.c.p, so it won't appear in the list of current loggers;
// one should be created by the Loggers instance when we set the level
@ -166,25 +175,37 @@ public class LoggersTest {
@Test
public void testSetRootLevel() {
Logger root = logger("root");
root.setLevel(Level.ERROR);
// In this test case, we focus on setting the level for the root logger.
// Ideally, we want to start with a "clean" configuration to conduct this test case.
// By programmatically creating a new configuration at the beginning, we can ensure
// that this test case is not affected by existing Log4j configurations.
LoggerContext loggerContext = (LoggerContext) LogManager.getContext(false);
Configuration config = loggerContext.getConfiguration();
String rootLoggerName = "root";
LoggerConfig rootConfig = new LoggerConfig(rootLoggerName, Level.ERROR, false);
config.addLogger(rootLoggerName, rootConfig);
loggerContext.updateLoggers();
Logger p = logger("a.b.c.p");
Logger x = logger("a.b.c.p.X");
Logger y = logger("a.b.c.p.Y");
Logger z = logger("a.b.c.p.Z");
Logger w = logger("a.b.c.s.W");
x.setLevel(Level.INFO);
y.setLevel(Level.INFO);
z.setLevel(Level.INFO);
w.setLevel(Level.INFO);
Logger root = LogManager.getLogger(rootLoggerName);
Configurator.setLevel(root, Level.ERROR);
Logger p = loggerContext.getLogger("a.b.c.p");
Logger x = loggerContext.getLogger("a.b.c.p.X");
Logger y = loggerContext.getLogger("a.b.c.p.Y");
Logger z = loggerContext.getLogger("a.b.c.p.Z");
Logger w = loggerContext.getLogger("a.b.c.s.W");
Configurator.setLevel(p, Level.INFO);
Configurator.setLevel(x, Level.INFO);
Configurator.setLevel(y, Level.INFO);
Configurator.setLevel(z, Level.INFO);
Configurator.setLevel(w, Level.INFO);
Loggers loggers = new TestLoggers(root, x, y, z, w);
List<String> modified = loggers.setLevel("root", Level.DEBUG);
assertEquals(Arrays.asList("a.b.c.p.X", "a.b.c.p.Y", "a.b.c.p.Z", "a.b.c.s.W", "root"), modified);
List<String> modified = loggers.setLevel(rootLoggerName, Level.DEBUG);
assertEquals(Arrays.asList("a.b.c.p.X", "a.b.c.p.Y", "a.b.c.p.Z", "a.b.c.s.W", rootLoggerName), modified);
assertNull(p.getLevel());
assertEquals(p.getLevel(), Level.INFO);
assertEquals(root.getLevel(), Level.DEBUG);
@ -194,7 +215,7 @@ public class LoggersTest {
assertEquals(z.getLevel(), Level.DEBUG);
Map<String, LoggerLevel> expectedLevels = new HashMap<>();
expectedLevels.put("root", new LoggerLevel(Level.DEBUG.toString(), INITIAL_TIME));
expectedLevels.put(rootLoggerName, new LoggerLevel(Level.DEBUG.toString(), INITIAL_TIME));
expectedLevels.put("a.b.c.p.X", new LoggerLevel(Level.DEBUG.toString(), INITIAL_TIME));
expectedLevels.put("a.b.c.p.Y", new LoggerLevel(Level.DEBUG.toString(), INITIAL_TIME));
expectedLevels.put("a.b.c.p.Z", new LoggerLevel(Level.DEBUG.toString(), INITIAL_TIME));
@ -206,7 +227,8 @@ public class LoggersTest {
@Test
public void testSetLevelNullArguments() {
Logger root = logger("root");
LoggerContext loggerContext = (LoggerContext) LogManager.getContext(false);
Logger root = loggerContext.getRootLogger();
Loggers loggers = new TestLoggers(root);
assertThrows(NullPointerException.class, () -> loggers.setLevel(null, Level.INFO));
assertThrows(NullPointerException.class, () -> loggers.setLevel("root", null));
@ -229,12 +251,12 @@ public class LoggersTest {
@Override
Logger lookupLogger(String logger) {
return currentLoggers.computeIfAbsent(logger, l -> new Logger(logger) { });
return currentLoggers.computeIfAbsent(logger, LogManager::getLogger);
}
@Override
Enumeration<Logger> currentLoggers() {
return new Vector<>(currentLoggers.values()).elements();
List<Logger> currentLoggers() {
return new ArrayList<>(currentLoggers.values());
}
@Override
@ -242,9 +264,4 @@ public class LoggersTest {
return rootLogger;
}
}
private Logger logger(String name) {
return new Logger(name) { };
}
}

View File

@ -22,7 +22,7 @@ import org.apache.kafka.connect.errors.ConnectException;
import org.apache.kafka.connect.runtime.standalone.StandaloneConfig;
import org.apache.kafka.connect.util.ConnectorTaskId;
import org.apache.log4j.Level;
import org.apache.logging.log4j.Level;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;

View File

@ -54,7 +54,7 @@ import org.apache.kafka.connect.util.ConnectorTaskId;
import org.apache.kafka.connect.util.TopicAdmin;
import org.apache.kafka.connect.util.TopicCreationGroup;
import org.apache.log4j.Level;
import org.apache.logging.log4j.Level;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.extension.ExtendWith;
import org.junit.jupiter.params.ParameterizedTest;

View File

@ -353,7 +353,6 @@ public class ConnectRestServerTest {
server.stop();
Collection<String> logMessages = restServerAppender.getMessages();
LogCaptureAppender.unregister(restServerAppender);
restServerAppender.close();
String expectedlogContent = "\"GET / HTTP/1.1\" " + response.getStatusLine().getStatusCode();
assertTrue(logMessages.stream().anyMatch(logMessage -> logMessage.contains(expectedlogContent)));

View File

@ -1,37 +0,0 @@
##
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
log4j.rootLogger=INFO, stdout
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
#
# The `%X{connector.context}` parameter in the layout includes connector-specific and task-specific information
# in the log message, where appropriate. This makes it easier to identify those log messages that apply to a
# specific connector. Simply add this parameter to the log layout configuration below to include the contextual information.
#
log4j.appender.stdout.layout.ConversionPattern=[%d] %p %X{connector.context}%m (%c:%L)%n
#
# The following line includes no MDC context parameters:
#log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c:%L)%n (%t)
log4j.logger.kafka=WARN
log4j.logger.state.change.logger=OFF
log4j.logger.org.apache.kafka.connect=DEBUG
# Troubleshooting KAFKA-17493.
log4j.logger.org.apache.kafka.consumer=DEBUG
log4j.logger.org.apache.kafka.coordinator.group=DEBUG

View File

@ -0,0 +1,48 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
Configuration:
Properties:
Property:
- name: "logPattern"
value: "[%d] %p %X{connector.context}%m (%c:%L)%n"
Appenders:
Console:
name: STDOUT
PatternLayout:
pattern: "${logPattern}"
Loggers:
Root:
level: INFO
AppenderRef:
- ref: STDOUT
Logger:
- name: kafka
level: WARN
- name: state.change.logger
level: "OFF"
- name: org.apache.kafka.connect
level: DEBUG
# Troubleshooting KAFKA-17493.
- name: org.apache.kafka.consumer
level: DEBUG
- name: org.apache.kafka.coordinator.group
level: DEBUG

View File

@ -17,83 +17,90 @@
package kafka.utils
import org.apache.kafka.common.utils.Utils
import org.apache.logging.log4j.core.LoggerContext
import org.apache.logging.log4j.core.config.Configurator
import org.apache.logging.log4j.{Level, LogManager}
import java.util
import java.util.Locale
import org.apache.kafka.common.utils.Utils
import org.apache.log4j.{Level, LogManager, Logger}
import scala.collection.mutable
import scala.jdk.CollectionConverters._
object Log4jController {
/**
* Note: In log4j, the root logger's name was "root" and Kafka also followed that name for dynamic logging control feature.
*
* The root logger's name is changed in log4j2 to empty string (see: [[LogManager.ROOT_LOGGER_NAME]]) but for backward-
* compatibility. Kafka keeps its original root logger name. It is why here is a dedicated definition for the root logger name.
*/
val ROOT_LOGGER = "root"
private def resolveLevel(logger: Logger): String = {
var name = logger.getName
var level = logger.getLevel
while (level == null) {
val index = name.lastIndexOf(".")
if (index > 0) {
name = name.substring(0, index)
val ancestor = existingLogger(name)
if (ancestor != null) {
level = ancestor.getLevel
}
} else {
level = existingLogger(ROOT_LOGGER).getLevel
}
}
level.toString
/**
* Returns a map of the log4j loggers and their assigned log level.
* If a logger does not have a log level assigned, we return the log level of the first ancestor with a level configured.
*/
def loggers: Map[String, String] = {
val logContext = LogManager.getContext(false).asInstanceOf[LoggerContext]
val rootLoggerLevel = logContext.getRootLogger.getLevel.toString
// Loggers defined in the configuration
val configured = logContext.getConfiguration.getLoggers.asScala
.values
.filterNot(_.getName.equals(LogManager.ROOT_LOGGER_NAME))
.map { logger =>
logger.getName -> logger.getLevel.toString
}.toMap
// Loggers actually running
val actual = logContext.getLoggers.asScala
.filterNot(_.getName.equals(LogManager.ROOT_LOGGER_NAME))
.map { logger =>
logger.getName -> logger.getLevel.toString
}.toMap
(configured ++ actual) + (ROOT_LOGGER -> rootLoggerLevel)
}
/**
* Returns a map of the log4j loggers and their assigned log level.
* If a logger does not have a log level assigned, we return the root logger's log level
*/
def loggers: mutable.Map[String, String] = {
val logs = new mutable.HashMap[String, String]()
val rootLoggerLvl = existingLogger(ROOT_LOGGER).getLevel.toString
logs.put(ROOT_LOGGER, rootLoggerLvl)
val loggers = LogManager.getCurrentLoggers
while (loggers.hasMoreElements) {
val logger = loggers.nextElement().asInstanceOf[Logger]
if (logger != null) {
logs.put(logger.getName, resolveLevel(logger))
}
}
logs
}
/**
* Sets the log level of a particular logger
*/
* Sets the log level of a particular logger. If the given logLevel is not an available log4j level
* (i.e., one of OFF, FATAL, ERROR, WARN, INFO, DEBUG, TRACE, ALL) it falls back to DEBUG.
*
* @see [[Level.toLevel]]
*/
def logLevel(loggerName: String, logLevel: String): Boolean = {
val log = existingLogger(loggerName)
if (!Utils.isBlank(loggerName) && !Utils.isBlank(logLevel) && log != null) {
log.setLevel(Level.toLevel(logLevel.toUpperCase(Locale.ROOT)))
if (Utils.isBlank(loggerName) || Utils.isBlank(logLevel))
return false
val level = Level.toLevel(logLevel.toUpperCase(Locale.ROOT))
if (loggerName == ROOT_LOGGER) {
Configurator.setAllLevels(LogManager.ROOT_LOGGER_NAME, level)
true
} else {
if (loggerExists(loggerName) && level != null) {
Configurator.setAllLevels(loggerName, level)
true
}
else false
}
else false
}
def unsetLogLevel(loggerName: String): Boolean = {
val log = existingLogger(loggerName)
if (!Utils.isBlank(loggerName) && log != null) {
log.setLevel(null)
if (loggerName == ROOT_LOGGER) {
Configurator.setAllLevels(LogManager.ROOT_LOGGER_NAME, null)
true
} else {
if (loggerExists(loggerName)) {
Configurator.setAllLevels(loggerName, null)
true
}
else false
}
else false
}
def loggerExists(loggerName: String): Boolean = existingLogger(loggerName) != null
private def existingLogger(loggerName: String) =
if (loggerName == ROOT_LOGGER)
LogManager.getRootLogger
else LogManager.exists(loggerName)
def loggerExists(loggerName: String): Boolean = loggers.contains(loggerName)
}
/**
@ -113,15 +120,7 @@ class Log4jController extends Log4jControllerMBean {
def getLogLevel(loggerName: String): String = {
val log = Log4jController.existingLogger(loggerName)
if (log != null) {
val level = log.getLevel
if (level != null)
log.getLevel.toString
else
Log4jController.resolveLevel(log)
}
else "No such logger."
Log4jController.loggers.getOrElse(loggerName, "No such logger.")
}
def setLogLevel(loggerName: String, level: String): Boolean = Log4jController.logLevel(loggerName, level)

View File

@ -40,7 +40,7 @@ import org.apache.kafka.common.utils.SecurityUtils;
import org.apache.kafka.metadata.authorizer.StandardAuthorizer;
import org.apache.kafka.test.TestUtils;
import org.apache.log4j.Level;
import org.apache.logging.log4j.Level;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;

View File

@ -0,0 +1,38 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
Configuration:
Properties:
Property:
- name: "logPattern"
value: "[%d] %p %m (%c:%L)%n"
Appenders:
Console:
name: STDOUT
PatternLayout:
pattern: "${logPattern}"
Loggers:
Root:
level: OFF
AppenderRef:
- ref: STDOUT
Logger:
- name: kafka
level: WARN
- name: org.apache.kafka
level: WARN

View File

@ -57,9 +57,9 @@ import org.apache.kafka.security.authorizer.AclEntry
import org.apache.kafka.server.config.{QuotaConfig, ServerConfigs, ServerLogConfigs, ZkConfigs}
import org.apache.kafka.storage.internals.log.{CleanerConfig, LogConfig, LogFileUtils}
import org.apache.kafka.test.TestUtils.{DEFAULT_MAX_WAIT_MS, assertFutureThrows}
import org.apache.log4j.PropertyConfigurator
import org.apache.logging.log4j.core.config.Configurator
import org.junit.jupiter.api.Assertions._
import org.junit.jupiter.api.{AfterEach, BeforeEach, TestInfo, Timeout}
import org.junit.jupiter.api.{BeforeEach, TestInfo, Timeout}
import org.junit.jupiter.params.ParameterizedTest
import org.junit.jupiter.params.provider.{MethodSource, ValueSource}
import org.slf4j.LoggerFactory
@ -89,18 +89,11 @@ class PlaintextAdminIntegrationTest extends BaseAdminIntegrationTest {
@BeforeEach
override def setUp(testInfo: TestInfo): Unit = {
super.setUp(testInfo)
Configurator.reconfigure();
brokerLoggerConfigResource = new ConfigResource(
ConfigResource.Type.BROKER_LOGGER, brokers.head.config.brokerId.toString)
}
@AfterEach
override def tearDown(): Unit = {
// Due to the fact that log4j is not re-initialized across tests, changing a logger's log level persists
// across test classes. We need to clean up the changes done after testing.
resetLogging()
super.tearDown()
}
@ParameterizedTest
@Timeout(30)
@ValueSource(strings = Array("kraft"))
@ -3766,6 +3759,27 @@ class PlaintextAdminIntegrationTest extends BaseAdminIntegrationTest {
assertEquals(newAncestorLogLevel, newAncestorLoggerConfig.get("kafka.server.ControllerServer").value())
}
@ParameterizedTest
@ValueSource(strings = Array("kraft"))
def testIncrementalAlterConfigsForLog4jLogLevelsCanSetToRootLogger(quorum: String): Unit = {
client = createAdminClient
val initialLoggerConfig = describeBrokerLoggers()
val initialRootLogLevel = initialLoggerConfig.get(Log4jController.ROOT_LOGGER).value()
val newRootLogLevel = LogLevelConfig.DEBUG_LOG_LEVEL
val alterRootLoggerEntry = Seq(
new AlterConfigOp(new ConfigEntry(Log4jController.ROOT_LOGGER, newRootLogLevel), AlterConfigOp.OpType.SET)
).asJavaCollection
alterBrokerLoggers(alterRootLoggerEntry, validateOnly = true)
val validatedRootLoggerConfig = describeBrokerLoggers()
assertEquals(initialRootLogLevel, validatedRootLoggerConfig.get(Log4jController.ROOT_LOGGER).value())
alterBrokerLoggers(alterRootLoggerEntry)
val changedRootLoggerConfig = describeBrokerLoggers()
assertEquals(newRootLogLevel, changedRootLoggerConfig.get(Log4jController.ROOT_LOGGER).value())
}
@ParameterizedTest
@ValueSource(strings = Array("kraft"))
def testIncrementalAlterConfigsForLog4jLogLevelsCannotResetRootLogger(quorum: String): Unit = {
@ -4108,17 +4122,4 @@ object PlaintextAdminIntegrationTest {
assertEquals(LogConfig.DEFAULT_COMPRESSION_TYPE, configs.get(brokerResource).get(ServerConfigs.COMPRESSION_TYPE_CONFIG).value)
}
/**
* Resets the logging configuration after the test.
*/
def resetLogging(): Unit = {
org.apache.log4j.LogManager.resetConfiguration()
val stream = this.getClass.getResourceAsStream("/log4j.properties")
try {
PropertyConfigurator.configure(stream)
} finally {
stream.close()
}
}
}

View File

@ -1,22 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
log4j.rootLogger=INFO, KAFKA
log4j.appender.KAFKA=kafka.log4j.KafkaAppender
log4j.appender.KAFKA.Port=9092
log4j.appender.KAFKA.Host=localhost
log4j.appender.KAFKA.Topic=test-logger
log4j.appender.KAFKA.Serializer=kafka.AppenderStringSerializer

View File

@ -34,13 +34,14 @@ import org.apache.kafka.common.security.auth.SecurityProtocol
import org.apache.kafka.clients.admin.{Admin, AdminClientConfig, AlterConfigOp, AlterConfigsResult, ConfigEntry}
import org.apache.kafka.server.config.ReplicationConfigs
import org.apache.kafka.server.metrics.KafkaYammerMetrics
import org.apache.log4j.{Level, Logger}
import org.apache.logging.log4j.{Level, LogManager}
import org.junit.jupiter.api.{AfterEach, BeforeEach, TestInfo}
import org.junit.jupiter.api.Assertions._
import org.junit.jupiter.params.ParameterizedTest
import org.junit.jupiter.params.provider.MethodSource
import com.yammer.metrics.core.Meter
import org.apache.kafka.metadata.LeaderConstants
import org.apache.logging.log4j.core.config.Configurator
class UncleanLeaderElectionTest extends QuorumTestHarness {
val brokerId1 = 0
@ -63,8 +64,8 @@ class UncleanLeaderElectionTest extends QuorumTestHarness {
val partitionId = 0
val topicPartition = new TopicPartition(topic, partitionId)
val kafkaApisLogger = Logger.getLogger(classOf[kafka.server.KafkaApis])
val networkProcessorLogger = Logger.getLogger(classOf[kafka.network.Processor])
val kafkaApisLogger = LogManager.getLogger(classOf[kafka.server.KafkaApis])
val networkProcessorLogger = LogManager.getLogger(classOf[kafka.network.Processor])
@BeforeEach
override def setUp(testInfo: TestInfo): Unit = {
@ -80,8 +81,8 @@ class UncleanLeaderElectionTest extends QuorumTestHarness {
}
// temporarily set loggers to a higher level so that tests run quietly
kafkaApisLogger.setLevel(Level.FATAL)
networkProcessorLogger.setLevel(Level.FATAL)
Configurator.setLevel(kafkaApisLogger.getName, Level.FATAL)
Configurator.setLevel(networkProcessorLogger.getName, Level.FATAL)
}
@AfterEach
@ -90,8 +91,8 @@ class UncleanLeaderElectionTest extends QuorumTestHarness {
brokers.foreach(broker => CoreUtils.delete(broker.config.logDirs))
// restore log levels
kafkaApisLogger.setLevel(Level.ERROR)
networkProcessorLogger.setLevel(Level.ERROR)
Configurator.setLevel(kafkaApisLogger.getName, Level.ERROR)
Configurator.setLevel(networkProcessorLogger.getName, Level.ERROR)
admin.close()

View File

@ -45,7 +45,8 @@ import org.apache.kafka.server.metrics.KafkaYammerMetrics
import org.apache.kafka.server.network.ConnectionDisconnectListener
import org.apache.kafka.server.quota.{ThrottleCallback, ThrottledChannel}
import org.apache.kafka.test.{TestSslUtils, TestUtils => JTestUtils}
import org.apache.log4j.Level
import org.apache.logging.log4j.{Level, LogManager}
import org.apache.logging.log4j.core.config.Configurator
import org.junit.jupiter.api.Assertions._
import org.junit.jupiter.api._
@ -88,7 +89,7 @@ class SocketServerTest {
var server: SocketServer = _
val sockets = new ArrayBuffer[Socket]
private val kafkaLogger = org.apache.log4j.LogManager.getLogger("kafka")
private val kafkaLogger = LogManager.getLogger("kafka")
private var logLevelToRestore: Level = _
def endpoint: EndPoint = {
KafkaConfig.fromProps(props, doLog = false).dataPlaneListeners.head
@ -102,7 +103,7 @@ class SocketServerTest {
server.enableRequestProcessing(Map.empty).get(1, TimeUnit.MINUTES)
// Run the tests with TRACE logging to exercise request logging path
logLevelToRestore = kafkaLogger.getLevel
kafkaLogger.setLevel(Level.TRACE)
Configurator.setLevel(kafkaLogger.getName, Level.TRACE)
assertTrue(server.controlPlaneRequestChannelOpt.isEmpty)
}
@ -112,7 +113,7 @@ class SocketServerTest {
shutdownServerAndMetrics(server)
sockets.foreach(_.close())
sockets.clear()
kafkaLogger.setLevel(logLevelToRestore)
Configurator.setLevel(kafkaLogger.getName, logLevelToRestore)
TestUtils.clearYammerMetrics()
}

View File

@ -54,10 +54,11 @@ versions += [
apacheds: "2.0.0-M24",
argparse4j: "0.7.0",
bcpkix: "1.78.1",
// Version >=3.1.2 includes an improvement to prevent hash DOS attacks,
// but currently, tests are failing in >=3.1.2. Therefore, we are temporarily using version 3.1.1.
// Version >=3.1.2 includes an improvement to prevent hash DOS attacks,
// but currently, tests are failing in >=3.1.2. Therefore, we are temporarily using version 3.1.1.
// The failing tests should be fixed under KAFKA-18089, allowing us to upgrade to >=3.1.2.
caffeine: "3.1.1",
bndlib: "7.0.0",
checkstyle: project.hasProperty('checkstyleVersion') ? checkstyleVersion : "10.20.2",
commonsCli: "1.4",
commonsIo: "2.14.0", // ZooKeeper dependency. Do not use, this is going away.
@ -106,6 +107,7 @@ versions += [
kafka_37: "3.7.1",
kafka_38: "3.8.1",
kafka_39: "3.9.0",
log4j2: "2.24.1",
// When updating lz4 make sure the compression levels in org.apache.kafka.common.record.CompressionType are still valid
lz4: "1.8.0",
mavenArtifact: "3.9.6",
@ -115,7 +117,6 @@ versions += [
opentelemetryProto: "1.0.0-alpha",
protobuf: "3.25.5", // a dependency of opentelemetryProto
pcollections: "4.0.1",
reload4j: "1.2.25",
re2j: "1.7",
rocksDB: "7.9.2",
// When updating the scalafmt version please also update the version field in checkstyle/.scalafmt.conf. scalafmt now
@ -148,6 +149,7 @@ libs += [
apachedsJdbmPartition: "org.apache.directory.server:apacheds-jdbm-partition:$versions.apacheds",
argparse4j: "net.sourceforge.argparse4j:argparse4j:$versions.argparse4j",
bcpkix: "org.bouncycastle:bcpkix-jdk18on:$versions.bcpkix",
bndlib:"biz.aQute.bnd:biz.aQute.bndlib:$versions.bndlib",
caffeine: "com.github.ben-manes.caffeine:caffeine:$versions.caffeine",
classgraph: "io.github.classgraph:classgraph:$versions.classgraph",
commonsCli: "commons-cli:commons-cli:$versions.commonsCli",
@ -155,6 +157,7 @@ libs += [
commonsValidator: "commons-validator:commons-validator:$versions.commonsValidator",
jacksonAnnotations: "com.fasterxml.jackson.core:jackson-annotations:$versions.jackson",
jacksonDatabind: "com.fasterxml.jackson.core:jackson-databind:$versions.jackson",
jacksonDatabindYaml: "com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:$versions.jackson",
jacksonDataformatCsv: "com.fasterxml.jackson.dataformat:jackson-dataformat-csv:$versions.jackson",
jacksonModuleScala: "com.fasterxml.jackson.module:jackson-module-scala_$versions.baseScala:$versions.jackson",
jacksonJDK8Datatypes: "com.fasterxml.jackson.datatype:jackson-datatype-jdk8:$versions.jackson",
@ -204,6 +207,10 @@ libs += [
kafkaStreams_37: "org.apache.kafka:kafka-streams:$versions.kafka_37",
kafkaStreams_38: "org.apache.kafka:kafka-streams:$versions.kafka_38",
kafkaStreams_39: "org.apache.kafka:kafka-streams:$versions.kafka_39",
log4j1Bridge2Api: "org.apache.logging.log4j:log4j-1.2-api:$versions.log4j2",
log4j2Api: "org.apache.logging.log4j:log4j-api:$versions.log4j2",
log4j2Core: "org.apache.logging.log4j:log4j-core:$versions.log4j2",
log4j2CoreTest: "org.apache.logging.log4j:log4j-core-test:$versions.log4j2",
lz4: "org.lz4:lz4-java:$versions.lz4",
metrics: "com.yammer.metrics:metrics-core:$versions.metrics",
dropwizardMetrics: "io.dropwizard.metrics:metrics-core:$versions.dropwizardMetrics",
@ -214,15 +221,15 @@ libs += [
pcollections: "org.pcollections:pcollections:$versions.pcollections",
opentelemetryProto: "io.opentelemetry.proto:opentelemetry-proto:$versions.opentelemetryProto",
protobuf: "com.google.protobuf:protobuf-java:$versions.protobuf",
reload4j: "ch.qos.reload4j:reload4j:$versions.reload4j",
re2j: "com.google.re2j:re2j:$versions.re2j",
rocksDBJni: "org.rocksdb:rocksdbjni:$versions.rocksDB",
scalaLibrary: "org.scala-lang:scala-library:$versions.scala",
scalaLogging: "com.typesafe.scala-logging:scala-logging_$versions.baseScala:$versions.scalaLogging",
scalaReflect: "org.scala-lang:scala-reflect:$versions.scala",
slf4jApi: "org.slf4j:slf4j-api:$versions.slf4j",
slf4jReload4j: "org.slf4j:slf4j-reload4j:$versions.slf4j",
slf4jLog4j2: "org.apache.logging.log4j:log4j-slf4j-impl:$versions.log4j2",
snappy: "org.xerial.snappy:snappy-java:$versions.snappy",
spotbugs: "com.github.spotbugs:spotbugs-annotations:$versions.spotbugs",
swaggerAnnotations: "io.swagger.core.v3:swagger-annotations:$swaggerVersion",
swaggerJaxrs2: "io.swagger.core.v3:swagger-jaxrs2:$swaggerVersion",
zookeeper: "org.apache.zookeeper:zookeeper:$versions.zookeeper",

View File

@ -1,6 +1,3 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
@ -12,8 +9,23 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
log4j.rootLogger=DEBUG, stdout
Configuration:
Properties:
Property:
- name: "logPattern"
value: "[%d] %p %m (%c:%L)%n"
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c:%L)%n
Appenders:
Console:
name: STDOUT
PatternLayout:
pattern: "${logPattern}"
Loggers:
Root:
level: DEBUG
AppenderRef:
- ref: STDOUT
Logger:
- name: org.apache.kafka
level: DEBUG

View File

@ -0,0 +1,36 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
Configuration:
Properties:
Property:
- name: "logPattern"
value: "[%d] %p %m (%c:%L)%n"
Appenders:
Console:
name: STDOUT
PatternLayout:
pattern: "${logPattern}"
Loggers:
Root:
level: DEBUG
AppenderRef:
- ref: STDOUT
Logger:
- name: org.apache.kafka
level: DEBUG

View File

@ -17,6 +17,7 @@
base_dir=$(dirname $0)
if [ "x$KAFKA_LOG4J_OPTS" = "x" ]; then
echo "DEPRECATED: using log4j 1.x configuration. To use log4j 2.x configuration, run with: 'export KAFKA_LOG4J_OPTS=\"-Dlog4j.configurationFile=file:$base_dir/../config/kraft-log4j2.yml\"'"
export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/kraft-log4j.properties"
fi

View File

@ -0,0 +1,39 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
Configuration:
Properties:
Property:
- name: "logPattern"
value: "[%d] %p %m (%c)%n"
Appenders:
Console:
name: STDERR
target: SYSTEM_ERR
PatternLayout:
pattern: "${logPattern}"
Loggers:
Root:
level: INFO
AppenderRef:
- ref: STDERR
Logger:
- name: org.apache.kafka.raft
level: INFO
- name: org.apache.kafka.snapshot
level: INFO

View File

@ -0,0 +1,38 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
Configuration:
Properties:
Property:
- name: "logPattern"
value: "[%d] %p %m (%c:%L)%n"
Appenders:
Console:
name: STDOUT
PatternLayout:
pattern: "${logPattern}"
Loggers:
Root:
level: OFF
AppenderRef:
- ref: STDOUT
Logger:
- name: org.apache.kafka.raft
level: ERROR
- name: org.apache.kafka.snapshot
level: ERROR

View File

@ -1,21 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
log4j.rootLogger=INFO, stdout
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c:%L)%n
log4j.logger.org.apache.kafka=INFO

View File

@ -1,9 +1,9 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
@ -12,10 +12,24 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
log4j.rootLogger=DEBUG, stdout
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c:%L)%n
Configuration:
Properties:
Property:
- name: "logPattern"
value: "[%d] %p %m (%c:%L)%n"
log4j.logger.org.apache.kafka=DEBUG
Appenders:
Console:
name: STDOUT
PatternLayout:
pattern: "${logPattern}"
Loggers:
Root:
level: INFO
AppenderRef:
- ref: STDOUT
Logger:
- name: org.apache.kafka
level: INFO

View File

@ -1,9 +1,9 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
@ -12,10 +12,21 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
log4j.rootLogger=DEBUG, stdout
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c:%L)%n
Configuration:
Properties:
Property:
- name: "logPattern"
value: "[%d] %p %m (%c:%L)%n"
log4j.logger.org.apache.kafka=DEBUG
Appenders:
Console:
name: STDOUT
PatternLayout:
pattern: "${logPattern}"
Loggers:
Root:
level: DEBUG
AppenderRef:
- ref: STDOUT

View File

@ -1,28 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
log4j.rootLogger=OFF, stdout
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c:%L)%n
log4j.appender.fileAppender=org.apache.log4j.RollingFileAppender
log4j.appender.fileAppender.layout=org.apache.log4j.PatternLayout
log4j.appender.fileAppender.layout.ConversionPattern=%d [%t] %-5p %c %x - %m%n
log4j.appender.fileAppender.File=storage.log
log4j.logger.org.apache.kafka.server.log.remote.storage=INFO
log4j.logger.org.apache.kafka.server.log.remote.metadata.storage=INFO
log4j.logger.kafka.log.remote=INFO

View File

@ -0,0 +1,57 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
Configuration:
Properties:
Property:
- name: "logPattern"
value: "[%d] %p %m (%c:%L)%n"
- name: "fileLogPattern"
value: "%d [%t] %-5p %c %x - %m%n"
Appenders:
Console:
name: STDOUT
PatternLayout:
pattern: "${logPattern}"
RollingFile:
- name: FileAppender
fileName: storage.log
filePattern: "storage-%d{yyyy-MM-dd}.log"
PatternLayout:
pattern: "${fileLogPattern}"
TimeBasedTriggeringPolicy:
interval: 1
Loggers:
Root:
level: OFF
AppenderRef:
- ref: STDOUT
Logger:
- name: org.apache.kafka.server.log.remote.storage
level: INFO
AppenderRef:
- ref: FileAppender
- name: org.apache.kafka.server.log.remote.metadata.storage
level: INFO
AppenderRef:
- ref: FileAppender
- name: kafka.log.remote
level: INFO
AppenderRef:
- ref: FileAppender

View File

@ -1,36 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
log4j.rootLogger=INFO, stdout
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c:%L)%n
log4j.logger.kafka=ERROR
log4j.logger.state.change.logger=ERROR
log4j.logger.org.apache.kafka=ERROR
log4j.logger.org.apache.kafka.clients=ERROR
# These are the only logs we will likely ever find anything useful in to debug Streams test failures
log4j.logger.org.apache.kafka.clients.consumer=INFO
log4j.logger.org.apache.kafka.clients.producer=INFO
log4j.logger.org.apache.kafka.streams=INFO
# printing out the configs takes up a huge amount of the allotted characters,
# and provides little value as we can always figure out the test configs without the logs
log4j.logger.org.apache.kafka.clients.producer.ProducerConfig=ERROR
log4j.logger.org.apache.kafka.clients.consumer.ConsumerConfig=ERROR
log4j.logger.org.apache.kafka.clients.admin.AdminClientConfig=ERROR
log4j.logger.org.apache.kafka.streams.StreamsConfig=ERROR

View File

@ -0,0 +1,65 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
Configuration:
Properties:
Property:
- name: "logPattern"
value: "[%d] %p %m (%c:%L)%n"
Appenders:
Console:
name: STDOUT
PatternLayout:
pattern: "${logPattern}"
Loggers:
Root:
level: INFO
AppenderRef:
- ref: STDOUT
Logger:
- name: kafka
level: ERROR
- name: state.change.logger
level: ERROR
- name: org.apache.kafka
level: ERROR
- name: org.apache.kafka.clients
level: ERROR
- name: org.apache.kafka.clients.consumer
level: INFO
- name: org.apache.kafka.clients.producer
level: INFO
- name: org.apache.kafka.streams
level: INFO
- name: org.apache.kafka.clients.producer.ProducerConfig
level: ERROR
- name: org.apache.kafka.clients.consumer.ConsumerConfig
level: ERROR
- name: org.apache.kafka.clients.admin.AdminClientConfig
level: ERROR
- name: org.apache.kafka.streams.StreamsConfig
level: ERROR

View File

@ -1,19 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
log4j.rootLogger=INFO, console
log4j.appender.console=org.apache.log4j.ConsoleAppender
log4j.appender.console.layout=org.apache.log4j.PatternLayout
log4j.appender.console.layout.ConversionPattern=%d{HH:mm:ss,SSS} %-5p %-60c %x - %m%n

View File

@ -1,9 +1,9 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
@ -12,11 +12,21 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
log4j.rootLogger=OFF, stdout
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c:%L)%n
Configuration:
Properties:
Property:
- name: "logPattern"
value: "%d{HH:mm:ss,SSS} %-5p %-60c %x - %m%n"
log4j.logger.org.apache.kafka.raft=ERROR
log4j.logger.org.apache.kafka.snapshot=ERROR
Appenders:
Console:
name: STDOUT
PatternLayout:
pattern: "${logPattern}"
Loggers:
Root:
level: INFO
AppenderRef:
- ref: STDOUT

View File

@ -44,7 +44,7 @@ import org.apache.kafka.streams.processor.internals.StreamsPartitionAssignor;
import org.apache.kafka.streams.state.BuiltInDslStoreSuppliers;
import org.apache.kafka.streams.utils.TestUtils.RecordingProcessorWrapper;
import org.apache.log4j.Level;
import org.apache.logging.log4j.Level;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.Timeout;

View File

@ -53,7 +53,7 @@ import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.errors.StreamsException;
import org.apache.kafka.streams.processor.internals.InternalTopicManager.ValidationResult;
import org.apache.log4j.Level;
import org.apache.logging.log4j.Level;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;

View File

@ -40,7 +40,7 @@ import org.apache.kafka.test.InternalMockProcessorContext;
import org.apache.kafka.test.MockSourceNode;
import org.apache.kafka.test.MockTimestampExtractor;
import org.apache.log4j.Level;
import org.apache.logging.log4j.Level;
import org.hamcrest.Matchers;
import org.junit.jupiter.api.Test;

View File

@ -59,7 +59,8 @@ import org.apache.kafka.streams.processor.TaskId;
import org.apache.kafka.streams.processor.internals.metrics.StreamsMetricsImpl;
import org.apache.kafka.test.InternalMockProcessorContext;
import org.apache.log4j.Level;
import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.core.filter.ThresholdFilter;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
@ -1283,7 +1284,7 @@ public class RecordCollectorTest {
try (final LogCaptureAppender logCaptureAppender =
LogCaptureAppender.createAndRegister(RecordCollectorImpl.class)) {
logCaptureAppender.setThreshold(Level.INFO);
logCaptureAppender.addFilter(ThresholdFilter.createFilter(Level.INFO, null, null));
collector.send(topic, "3", "0", null, null, stringSerializer, stringSerializer, sinkNodeName, context, streamPartitioner);
collector.flush();

View File

@ -45,7 +45,7 @@ import org.apache.kafka.test.MockStandbyUpdateListener;
import org.apache.kafka.test.MockStateRestoreListener;
import org.apache.kafka.test.StreamsTestUtils;
import org.apache.log4j.Level;
import org.apache.logging.log4j.Level;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.junit.jupiter.params.ParameterizedTest;

View File

@ -55,7 +55,7 @@ import org.apache.kafka.streams.processor.internals.tasks.DefaultTaskManager;
import org.apache.kafka.streams.processor.internals.testutil.DummyStreamsConfig;
import org.apache.kafka.streams.state.internals.OffsetCheckpoint;
import org.apache.log4j.Level;
import org.apache.logging.log4j.Level;
import org.hamcrest.Matchers;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;

View File

@ -1,36 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
log4j.rootLogger=INFO, stdout
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c:%L)%n
log4j.logger.kafka=ERROR
log4j.logger.state.change.logger=ERROR
log4j.logger.org.apache.kafka=ERROR
log4j.logger.org.apache.kafka.clients=ERROR
# These are the only logs we will likely ever find anything useful in to debug Streams test failures
log4j.logger.org.apache.kafka.clients.consumer=INFO
log4j.logger.org.apache.kafka.clients.producer=INFO
log4j.logger.org.apache.kafka.streams=INFO
# printing out the configs takes up a huge amount of the allotted characters,
# and provides little value as we can always figure out the test configs without the logs
log4j.logger.org.apache.kafka.clients.producer.ProducerConfig=ERROR
log4j.logger.org.apache.kafka.clients.consumer.ConsumerConfig=ERROR
log4j.logger.org.apache.kafka.clients.admin.AdminClientConfig=ERROR
log4j.logger.org.apache.kafka.streams.StreamsConfig=ERROR

View File

@ -0,0 +1,65 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
Configuration:
Properties:
Property:
- name: "logPattern"
value: "[%d] %p %m (%c:%L)%n"
Appenders:
Console:
name: STDOUT
PatternLayout:
pattern: "${logPattern}"
Loggers:
Root:
level: INFO
AppenderRef:
- ref: STDOUT
Logger:
- name: kafka
level: ERROR
- name: state.change.logger
level: ERROR
- name: org.apache.kafka
level: ERROR
- name: org.apache.kafka.clients
level: ERROR
- name: org.apache.kafka.clients.consumer
level: INFO
- name: org.apache.kafka.clients.producer
level: INFO
- name: org.apache.kafka.streams
level: INFO
- name: org.apache.kafka.clients.producer.ProducerConfig
level: ERROR
- name: org.apache.kafka.clients.consumer.ConsumerConfig
level: ERROR
- name: org.apache.kafka.clients.admin.AdminClientConfig
level: ERROR
- name: org.apache.kafka.streams.StreamsConfig
level: ERROR

View File

@ -1,34 +0,0 @@
# Copyright (C) 2018 Lightbend Inc. <https://www.lightbend.com>
# Copyright (C) 2017-2018 Alexis Seigneurin.
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Set root logger level to DEBUG and its only appender to A1.
log4j.rootLogger=INFO, R
# A1 is set to be a ConsoleAppender.
log4j.appender.A1=org.apache.log4j.ConsoleAppender
log4j.appender.R=org.apache.log4j.RollingFileAppender
log4j.appender.R.File=logs/kafka-streams-scala.log
log4j.appender.R.MaxFileSize=100KB
# Keep one backup file
log4j.appender.R.MaxBackupIndex=1
# A1 uses PatternLayout.
log4j.appender.R.layout=org.apache.log4j.PatternLayout
log4j.appender.R.layout.ConversionPattern=%-4r [%t] %-5p %c %x - %m%n

View File

@ -0,0 +1,40 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
Configuration:
Properties:
Property:
- name: "logPattern"
value: "%-4r [%t] %-5p %c %x - %m%n"
Appenders:
Console:
name: A1
RollingFile:
- name: R
fileName: logs/kafka-streams-scala.log
filePattern: "streams-scala-%d{yyyy-MM-dd}.log"
PatternLayout:
pattern: "${logPattern}"
SizeBasedTriggeringPolicy:
size: "100KB"
DefaultRolloverStrategy:
max: 1
Loggers:
Root:
level: INFO
AppenderRef:
- ref: R

View File

@ -1,21 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
log4j.rootLogger=INFO, stdout
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c:%L)%n
log4j.logger.org.apache.kafka=INFO

View File

@ -0,0 +1,35 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
Configuration:
Properties:
Property:
- name: "logPattern"
value: "[%d] %p %m (%c:%L)%n"
Appenders:
Console:
name: STDOUT
PatternLayout:
pattern: "${logPattern}"
Loggers:
Root:
level: INFO
AppenderRef:
- ref: STDOUT
Logger:
- name: org.apache.kafka
level: INFO

View File

@ -0,0 +1,35 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
Configuration:
Properties:
Property:
- name: "logPattern"
value: "[%d] %p %m (%c:%L)%n"
Appenders:
Console:
name: STDOUT
PatternLayout:
pattern: "${logPattern}"
Loggers:
Root:
level: INFO
AppenderRef:
- ref: STDOUT
Logger:
- name: org.apache.kafka
level: INFO

View File

@ -25,7 +25,7 @@ from ducktape.services.service import Service
from ducktape.utils.util import wait_until
from kafkatest.directory_layout.kafka_path import KafkaPathResolverMixin
from kafkatest.services.kafka.util import fix_opts_for_new_jvm
from kafkatest.services.kafka.util import fix_opts_for_new_jvm, get_log4j_config_param, get_log4j_config_for_connect
class ConnectServiceBase(KafkaPathResolverMixin, Service):
@ -38,7 +38,6 @@ class ConnectServiceBase(KafkaPathResolverMixin, Service):
LOG_FILE = os.path.join(PERSISTENT_ROOT, "connect.log")
STDOUT_FILE = os.path.join(PERSISTENT_ROOT, "connect.stdout")
STDERR_FILE = os.path.join(PERSISTENT_ROOT, "connect.stderr")
LOG4J_CONFIG_FILE = os.path.join(PERSISTENT_ROOT, "connect-log4j.properties")
PID_FILE = os.path.join(PERSISTENT_ROOT, "connect.pid")
EXTERNAL_CONFIGS_FILE = os.path.join(PERSISTENT_ROOT, "connect-external-configs.properties")
CONNECT_REST_PORT = 8083
@ -340,7 +339,8 @@ class ConnectStandaloneService(ConnectServiceBase):
return self.nodes[0]
def start_cmd(self, node, connector_configs):
cmd = "( export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%s\"; " % self.LOG4J_CONFIG_FILE
cmd = "( export KAFKA_LOG4J_OPTS=\"%s%s\"; " % \
(get_log4j_config_param(node), os.path.join(self.PERSISTENT_ROOT, get_log4j_config_for_connect(node)))
heap_kafka_opts = "-XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=%s" % \
self.logs["connect_heap_dump_file"]["path"]
other_kafka_opts = self.security_config.kafka_opts.strip('\"')
@ -364,7 +364,8 @@ class ConnectStandaloneService(ConnectServiceBase):
if self.external_config_template_func:
node.account.create_file(self.EXTERNAL_CONFIGS_FILE, self.external_config_template_func(node))
node.account.create_file(self.CONFIG_FILE, self.config_template_func(node))
node.account.create_file(self.LOG4J_CONFIG_FILE, self.render('connect_log4j.properties', log_file=self.LOG_FILE))
node.account.create_file(os.path.join(self.PERSISTENT_ROOT, get_log4j_config_for_connect(node)),
self.render(get_log4j_config_for_connect(node), log_file=self.LOG_FILE))
remote_connector_configs = []
for idx, template in enumerate(self.connector_config_templates):
target_file = os.path.join(self.PERSISTENT_ROOT, "connect-connector-" + str(idx) + ".properties")
@ -400,7 +401,8 @@ class ConnectDistributedService(ConnectServiceBase):
# connector_configs argument is intentionally ignored in distributed service.
def start_cmd(self, node, connector_configs):
cmd = "( export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%s\"; " % self.LOG4J_CONFIG_FILE
cmd = ("( export KAFKA_LOG4J_OPTS=\"%s%s\"; " %
(get_log4j_config_param(node), os.path.join(self.PERSISTENT_ROOT, get_log4j_config_for_connect(node))))
heap_kafka_opts = "-XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=%s" % \
self.logs["connect_heap_dump_file"]["path"]
other_kafka_opts = self.security_config.kafka_opts.strip('\"')
@ -421,7 +423,8 @@ class ConnectDistributedService(ConnectServiceBase):
if self.external_config_template_func:
node.account.create_file(self.EXTERNAL_CONFIGS_FILE, self.external_config_template_func(node))
node.account.create_file(self.CONFIG_FILE, self.config_template_func(node))
node.account.create_file(self.LOG4J_CONFIG_FILE, self.render('connect_log4j.properties', log_file=self.LOG_FILE))
node.account.create_file(os.path.join(self.PERSISTENT_ROOT, get_log4j_config_for_connect(node)),
self.render(get_log4j_config_for_connect(node), log_file=self.LOG_FILE))
if self.connector_config_templates:
raise DucktapeError("Config files are not valid in distributed mode, submit connectors via the REST API")

View File

@ -21,8 +21,8 @@ from ducktape.utils.util import wait_until
from kafkatest.directory_layout.kafka_path import KafkaPathResolverMixin
from kafkatest.services.monitor.jmx import JmxMixin, JmxTool
from kafkatest.version import DEV_BRANCH, LATEST_3_7
from kafkatest.services.kafka.util import fix_opts_for_new_jvm
from kafkatest.version import DEV_BRANCH, LATEST_3_7, get_version, LATEST_4_0
from kafkatest.services.kafka.util import fix_opts_for_new_jvm, get_log4j_config_param, get_log4j_config_for_tools
"""
The console consumer is a tool that reads data from Kafka and outputs it to standard output.
@ -36,7 +36,6 @@ class ConsoleConsumer(KafkaPathResolverMixin, JmxMixin, BackgroundThreadService)
STDERR_CAPTURE = os.path.join(PERSISTENT_ROOT, "console_consumer.stderr")
LOG_DIR = os.path.join(PERSISTENT_ROOT, "logs")
LOG_FILE = os.path.join(LOG_DIR, "console_consumer.log")
LOG4J_CONFIG = os.path.join(PERSISTENT_ROOT, "tools-log4j.properties")
CONFIG_FILE = os.path.join(PERSISTENT_ROOT, "console_consumer.properties")
JMX_TOOL_LOG = os.path.join(PERSISTENT_ROOT, "jmx_tool.log")
JMX_TOOL_ERROR_LOG = os.path.join(PERSISTENT_ROOT, "jmx_tool.err.log")
@ -146,7 +145,8 @@ class ConsoleConsumer(KafkaPathResolverMixin, JmxMixin, BackgroundThreadService)
args['stdout'] = ConsoleConsumer.STDOUT_CAPTURE
args['stderr'] = ConsoleConsumer.STDERR_CAPTURE
args['log_dir'] = ConsoleConsumer.LOG_DIR
args['log4j_config'] = ConsoleConsumer.LOG4J_CONFIG
args['log4j_param'] = get_log4j_config_param(node)
args['log4j_config'] = get_log4j_config_for_tools(node)
args['config_file'] = ConsoleConsumer.CONFIG_FILE
args['stdout'] = ConsoleConsumer.STDOUT_CAPTURE
args['jmx_port'] = self.jmx_port
@ -160,7 +160,7 @@ class ConsoleConsumer(KafkaPathResolverMixin, JmxMixin, BackgroundThreadService)
cmd = fix_opts_for_new_jvm(node)
cmd += "export JMX_PORT=%(jmx_port)s; " \
"export LOG_DIR=%(log_dir)s; " \
"export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%(log4j_config)s\"; " \
"export KAFKA_LOG4J_OPTS=\"%(log4j_param)s%(log4j_config)s\"; " \
"export KAFKA_OPTS=%(kafka_opts)s; " \
"%(console_consumer)s " \
"--topic %(topic)s " \
@ -226,8 +226,8 @@ class ConsoleConsumer(KafkaPathResolverMixin, JmxMixin, BackgroundThreadService)
node.account.create_file(ConsoleConsumer.CONFIG_FILE, prop_file)
# Create and upload log properties
log_config = self.render('tools_log4j.properties', log_file=ConsoleConsumer.LOG_FILE)
node.account.create_file(ConsoleConsumer.LOG4J_CONFIG, log_config)
log_config = self.render(get_log4j_config_for_tools(node), log_file=ConsoleConsumer.LOG_FILE)
node.account.create_file(get_log4j_config_for_tools(node), log_config)
# Run and capture output
cmd = self.start_cmd(node)

View File

@ -33,7 +33,7 @@ from kafkatest.services.security.listener_security_config import ListenerSecurit
from kafkatest.services.security.security_config import SecurityConfig
from kafkatest.version import DEV_BRANCH
from kafkatest.version import KafkaVersion
from kafkatest.services.kafka.util import fix_opts_for_new_jvm
from kafkatest.services.kafka.util import fix_opts_for_new_jvm, get_log4j_config_param, get_log4j_config
class KafkaListener:
@ -145,7 +145,6 @@ class KafkaService(KafkaPathResolverMixin, JmxMixin, Service):
"""
PERSISTENT_ROOT = "/mnt/kafka"
STDOUT_STDERR_CAPTURE = os.path.join(PERSISTENT_ROOT, "server-start-stdout-stderr.log")
LOG4J_CONFIG = os.path.join(PERSISTENT_ROOT, "kafka-log4j.properties")
# Logs such as controller.log, server.log, etc all go here
OPERATIONAL_LOG_DIR = os.path.join(PERSISTENT_ROOT, "kafka-operational-logs")
OPERATIONAL_LOG_INFO_DIR = os.path.join(OPERATIONAL_LOG_DIR, "info")
@ -805,7 +804,7 @@ class KafkaService(KafkaPathResolverMixin, JmxMixin, Service):
kafka_mode = self.context.globals.get("kafka_mode", "")
cmd = f"export KAFKA_MODE={kafka_mode}; "
cmd += "export JMX_PORT=%d; " % self.jmx_port
cmd += "export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%s\"; " % self.LOG4J_CONFIG
cmd += "export KAFKA_LOG4J_OPTS=\"%s%s\"; " % (get_log4j_config_param(node), os.path.join(self.PERSISTENT_ROOT, get_log4j_config(node)))
heap_kafka_opts = "-XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=%s" % \
self.logs["kafka_heap_dump_file"]["path"]
security_kafka_opts = self.security_config.kafka_opts.strip('\"')
@ -874,7 +873,8 @@ class KafkaService(KafkaPathResolverMixin, JmxMixin, Service):
self.logger.info("kafka.properties:")
self.logger.info(prop_file)
node.account.create_file(KafkaService.CONFIG_FILE, prop_file)
node.account.create_file(self.LOG4J_CONFIG, self.render('log4j.properties', log_dir=KafkaService.OPERATIONAL_LOG_DIR))
node.account.create_file(os.path.join(self.PERSISTENT_ROOT, get_log4j_config(node)),
self.render(get_log4j_config(node), log_dir=KafkaService.OPERATIONAL_LOG_DIR))
if self.quorum_info.using_kraft:
# format log directories if necessary

View File

@ -0,0 +1,283 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
Configuration:
Properties:
Property:
- name: "log_dir"
value: {{ log_dir }}
- name: "logPattern"
value: "[%d] %p %m (%c)%n"
Appenders:
Console:
name: STDOUT
PatternLayout:
pattern: "${logPattern}"
RollingFile:
- name: KafkaInfoAppender
fileName: "${log_dir}/info/server.log"
filePattern: "${log_dir}/info/server.log.%d{yyyy-MM-dd-HH}"
PatternLayout:
pattern: "${logPattern}"
TimeBasedTriggeringPolicy:
interval: 1
Filters:
ThresholdFilter:
level: INFO
onMatch: ACCEPT
- name: StateChangeInfoAppender
fileName: "${log_dir}/info/state-change.log"
filePattern: "${log_dir}/info/state-change.log.%d{yyyy-MM-dd-HH}"
PatternLayout:
pattern: "${logPattern}"
TimeBasedTriggeringPolicy:
interval: 1
Filters:
ThresholdFilter:
level: INFO
onMatch: ACCEPT
- name: RequestInfoAppender
fileName: "${log_dir}/info/kafka-request.log"
filePattern: "${log_dir}/info/kafka-request.log.%d{yyyy-MM-dd-HH}"
PatternLayout:
pattern: "${logPattern}"
TimeBasedTriggeringPolicy:
interval: 1
Filters:
ThresholdFilter:
level: INFO
onMatch: ACCEPT
- name: CleanerInfoAppender
fileName: "${log_dir}/info/log-cleaner.log"
filePattern: "${log_dir}/info/log-cleaner.log.%d{yyyy-MM-dd-HH}"
PatternLayout:
pattern: "${logPattern}"
TimeBasedTriggeringPolicy:
interval: 1
Filters:
ThresholdFilter:
level: INFO
onMatch: ACCEPT
- name: ControllerInfoAppender
fileName: "${log_dir}/info/controller.log"
filePattern: "${log_dir}/info/controller.log.%d{yyyy-MM-dd-HH}"
PatternLayout:
pattern: "${logPattern}"
TimeBasedTriggeringPolicy:
interval: 1
Filters:
ThresholdFilter:
level: INFO
onMatch: ACCEPT
- name: AuthorizerInfoAppender
fileName: "${log_dir}/info/kafka-authorizer.log"
filePattern: "${log_dir}/info/kafka-authorizer.log.%d{yyyy-MM-dd-HH}"
PatternLayout:
pattern: "${logPattern}"
TimeBasedTriggeringPolicy:
interval: 1
Filters:
ThresholdFilter:
level: INFO
onMatch: ACCEPT
- name: KafkaDebugAppender
fileName: "${log_dir}/debug/server.log"
filePattern: "${log_dir}/debug/server.log.%d{yyyy-MM-dd-HH}"
PatternLayout:
pattern: "${logPattern}"
TimeBasedTriggeringPolicy:
interval: 1
Filters:
ThresholdFilter:
level: DEBUG
onMatch: ACCEPT
- name: StateChangeDebugAppender
fileName: "${log_dir}/debug/state-change.log"
filePattern: "${log_dir}/debug/state-change.log.%d{yyyy-MM-dd-HH}"
PatternLayout:
pattern: "${logPattern}"
TimeBasedTriggeringPolicy:
interval: 1
Filters:
ThresholdFilter:
level: DEBUG
onMatch: ACCEPT
- name: RequestDebugAppender
fileName: "${log_dir}/debug/kafka-request.log"
filePattern: "${log_dir}/debug/kafka-request.log.%d{yyyy-MM-dd-HH}"
PatternLayout:
pattern: "${logPattern}"
TimeBasedTriggeringPolicy:
interval: 1
Filters:
ThresholdFilter:
level: DEBUG
onMatch: ACCEPT
- name: CleanerDebugAppender
fileName: "${log_dir}/debug/log-cleaner.log"
filePattern: "${log_dir}/debug/log-cleaner.log.%d{yyyy-MM-dd-HH}"
PatternLayout:
pattern: "${logPattern}"
TimeBasedTriggeringPolicy:
interval: 1
Filters:
ThresholdFilter:
level: DEBUG
onMatch: ACCEPT
- name: ControllerDebugAppender
fileName: "${log_dir}/debug/controller.log"
filePattern: "${log_dir}/debug/controller.log.%d{yyyy-MM-dd-HH}"
PatternLayout:
pattern: "${logPattern}"
TimeBasedTriggeringPolicy:
interval: 1
Filters:
ThresholdFilter:
level: DEBUG
onMatch: ACCEPT
- name: AuthorizerDebugAppender
fileName: "${log_dir}/debug/kafka-authorizer.log"
filePattern: "${log_dir}/debug/kafka-authorizer.log.%d{yyyy-MM-dd-HH}"
PatternLayout:
pattern: "${logPattern}"
TimeBasedTriggeringPolicy:
interval: 1
Filters:
ThresholdFilter:
level: DEBUG
onMatch: ACCEPT
Loggers:
Root:
level: {{ log_level|default("DEBUG") }}
AppenderRef:
- ref: STDOUT
Logger:
- name: kafka.producer.async.DefaultEventHandler
level: {{ log_level|default("DEBUG") }}
AppenderRef:
- ref: KafkaInfoAppender
- ref: KafkaDebugAppender
- name: kafka.client.ClientUtils
level: {{ log_level|default("DEBUG") }}
AppenderRef:
- ref: KafkaInfoAppender
- ref: KafkaDebugAppender
- name: kafka.perf
level: {{ log_level|default("DEBUG") }}
AppenderRef:
- ref: KafkaInfoAppender
- ref: KafkaDebugAppender
- name: kafka.perf.ProducerPerformance$ProducerThread
level: {{ log_level|default("DEBUG") }}
AppenderRef:
- ref: KafkaInfoAppender
- ref: KafkaDebugAppender
- name: kafka
level: {{ log_level|default("DEBUG") }}
AppenderRef:
- ref: KafkaInfoAppender
- ref: KafkaDebugAppender
- name: kafka.network.RequestChannel$
level: {{ log_level|default("DEBUG") }}
additivity: false
AppenderRef:
- ref: RequestInfoAppender
- ref: RequestDebugAppender
- name: kafka.network.Processor
level: {{ log_level|default("DEBUG") }}
AppenderRef:
- ref: RequestInfoAppender
- ref: RequestDebugAppender
- name: kafka.server.KafkaApis
level: {{ log_level|default("DEBUG") }}
additivity: false
AppenderRef:
- ref: RequestInfoAppender
- ref: RequestDebugAppender
- name: kafka.request.logger
level: {{ log_level|default("DEBUG") }}
additivity: false
AppenderRef:
- ref: RequestInfoAppender
- ref: RequestDebugAppender
- name: org.apache.kafka.raft
level: {{ log_level|default("DEBUG") }}
AppenderRef:
- ref: ControllerInfoAppender
- ref: ControllerDebugAppender
- name: org.apache.kafka.controller
level: {{ log_level|default("DEBUG") }}
AppenderRef:
- ref: ControllerInfoAppender
- ref: ControllerDebugAppender
- name: kafka.controller
level: {{ log_level|default("DEBUG") }}
additivity: false
AppenderRef:
- ref: ControllerInfoAppender
- ref: ControllerDebugAppender
- name: kafka.log.LogCleaner
level: {{ log_level|default("DEBUG") }}
additivity: false
AppenderRef:
- ref: CleanerInfoAppender
- ref: CleanerDebugAppender
- name: state.change.logger
level: {{ log_level|default("DEBUG") }}
additivity: false
AppenderRef:
- ref: StateChangeInfoAppender
- ref: StateChangeDebugAppender
- name: kafka.authorizer.logger
level: {{ log_level|default("DEBUG") }}
additivity: false
AppenderRef:
- ref: AuthorizerInfoAppender
- ref: AuthorizerDebugAppender
- name: org.apache.kafka.coordinator.group
level: {{ log_level|default("DEBUG") }}
additivity: false
AppenderRef:
- ref: KafkaInfoAppender
- ref: KafkaDebugAppender

View File

@ -16,6 +16,7 @@
from collections import namedtuple
from kafkatest.utils.remote_account import java_version
from kafkatest.version import LATEST_4_0, get_version
TopicPartition = namedtuple('TopicPartition', ['topic', 'partition'])
@ -30,4 +31,20 @@ def fix_opts_for_new_jvm(node):
return ""
def get_log4j_config_param(node):
return '-Dlog4j2.configurationFile=file:' if get_version(node) >= LATEST_4_0 else '-Dlog4j.configuration=file:'
def get_log4j_config(node):
return 'log4j2.yaml' if get_version(node) >= LATEST_4_0 else 'log4j.properties'
def get_log4j_config_for_connect(node):
return 'connect_log4j2.yaml' if get_version(node) >= LATEST_4_0 else 'connect_log4j.properties'
def get_log4j_config_for_tools(node):
return 'tools_log4j2.yaml' if get_version(node) >= LATEST_4_0 else 'tools_log4j.properties'
def get_log4j_config_for_trogdor_coordinator(node):
return 'trogdor-coordinator-log4j2.yaml' if get_version(node) >= LATEST_4_0 else 'trogdor-coordinator-log4j.properties'
def get_log4j_config_for_trogdor_agent(node):
return 'trogdor-agent-log4j2.yaml' if get_version(node) >= LATEST_4_0 else 'trogdor-agent-log4j.properties'

View File

@ -16,7 +16,7 @@
import os
from kafkatest.services.kafka.util import fix_opts_for_new_jvm
from kafkatest.services.kafka.util import fix_opts_for_new_jvm, get_log4j_config_param, get_log4j_config_for_tools
from kafkatest.services.performance import PerformanceService
from kafkatest.version import V_2_5_0, DEV_BRANCH
@ -49,7 +49,6 @@ class ConsumerPerformanceService(PerformanceService):
STDOUT_CAPTURE = os.path.join(PERSISTENT_ROOT, "consumer_performance.stdout")
STDERR_CAPTURE = os.path.join(PERSISTENT_ROOT, "consumer_performance.stderr")
LOG_FILE = os.path.join(LOG_DIR, "consumer_performance.log")
LOG4J_CONFIG = os.path.join(PERSISTENT_ROOT, "tools-log4j.properties")
CONFIG_FILE = os.path.join(PERSISTENT_ROOT, "consumer.properties")
logs = {
@ -111,7 +110,7 @@ class ConsumerPerformanceService(PerformanceService):
cmd = fix_opts_for_new_jvm(node)
cmd += "export LOG_DIR=%s;" % ConsumerPerformanceService.LOG_DIR
cmd += " export KAFKA_OPTS=%s;" % self.security_config.kafka_opts
cmd += " export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%s\";" % ConsumerPerformanceService.LOG4J_CONFIG
cmd += " export KAFKA_LOG4J_OPTS=\"%s%s\";" % (get_log4j_config_param(node), get_log4j_config_for_tools(node))
cmd += " %s" % self.path.script("kafka-consumer-perf-test.sh", node)
for key, value in self.args(node.version).items():
cmd += " --%s %s" % (key, value)
@ -128,8 +127,8 @@ class ConsumerPerformanceService(PerformanceService):
def _worker(self, idx, node):
node.account.ssh("mkdir -p %s" % ConsumerPerformanceService.PERSISTENT_ROOT, allow_fail=False)
log_config = self.render('tools_log4j.properties', log_file=ConsumerPerformanceService.LOG_FILE)
node.account.create_file(ConsumerPerformanceService.LOG4J_CONFIG, log_config)
log_config = self.render(get_log4j_config_for_tools(node), log_file=ConsumerPerformanceService.LOG_FILE)
node.account.create_file(get_log4j_config_for_tools(node), log_config)
node.account.create_file(ConsumerPerformanceService.CONFIG_FILE, str(self.security_config))
self.security_config.setup_node(node)

View File

@ -15,9 +15,8 @@
import os
from kafkatest.services.kafka.util import fix_opts_for_new_jvm
from kafkatest.services.kafka.util import fix_opts_for_new_jvm, get_log4j_config_param, get_log4j_config_for_tools
from kafkatest.services.performance import PerformanceService
from kafkatest.services.security.security_config import SecurityConfig
from kafkatest.version import get_version, V_3_4_0, DEV_BRANCH
@ -31,7 +30,6 @@ class EndToEndLatencyService(PerformanceService):
STDOUT_CAPTURE = os.path.join(PERSISTENT_ROOT, "end_to_end_latency.stdout")
STDERR_CAPTURE = os.path.join(PERSISTENT_ROOT, "end_to_end_latency.stderr")
LOG_FILE = os.path.join(LOG_DIR, "end_to_end_latency.log")
LOG4J_CONFIG = os.path.join(PERSISTENT_ROOT, "tools-log4j.properties")
CONFIG_FILE = os.path.join(PERSISTENT_ROOT, "client.properties")
logs = {
@ -76,7 +74,7 @@ class EndToEndLatencyService(PerformanceService):
})
cmd = fix_opts_for_new_jvm(node)
cmd += "export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%s\"; " % EndToEndLatencyService.LOG4J_CONFIG
cmd += "export KAFKA_LOG4J_OPTS=\"%s%s\"; " % (get_log4j_config_param(node), get_log4j_config_for_tools(node))
cmd += "KAFKA_OPTS=%(kafka_opts)s %(kafka_run_class)s %(java_class_name)s " % args
cmd += "%(bootstrap_servers)s %(topic)s %(num_records)d %(acks)d %(message_bytes)d %(config_file)s" % args
@ -88,9 +86,9 @@ class EndToEndLatencyService(PerformanceService):
def _worker(self, idx, node):
node.account.ssh("mkdir -p %s" % EndToEndLatencyService.PERSISTENT_ROOT, allow_fail=False)
log_config = self.render('tools_log4j.properties', log_file=EndToEndLatencyService.LOG_FILE)
log_config = self.render(get_log4j_config_for_tools(node), log_file=EndToEndLatencyService.LOG_FILE)
node.account.create_file(EndToEndLatencyService.LOG4J_CONFIG, log_config)
node.account.create_file(get_log4j_config_for_tools(node), log_config)
client_config = str(self.security_config)
client_config += "compression_type=%(compression_type)s" % self.args
node.account.create_file(EndToEndLatencyService.CONFIG_FILE, client_config)

View File

@ -19,7 +19,7 @@ from ducktape.utils.util import wait_until
from ducktape.cluster.remoteaccount import RemoteCommandError
from kafkatest.directory_layout.kafka_path import TOOLS_JAR_NAME, TOOLS_DEPENDANT_TEST_LIBS_JAR_NAME
from kafkatest.services.kafka.util import fix_opts_for_new_jvm
from kafkatest.services.kafka.util import fix_opts_for_new_jvm, get_log4j_config_param, get_log4j_config_for_tools
from kafkatest.services.monitor.http import HttpMetricsCollector
from kafkatest.services.performance import PerformanceService
from kafkatest.services.security.security_config import SecurityConfig
@ -33,7 +33,6 @@ class ProducerPerformanceService(HttpMetricsCollector, PerformanceService):
STDERR_CAPTURE = os.path.join(PERSISTENT_ROOT, "producer_performance.stderr")
LOG_DIR = os.path.join(PERSISTENT_ROOT, "logs")
LOG_FILE = os.path.join(LOG_DIR, "producer_performance.log")
LOG4J_CONFIG = os.path.join(PERSISTENT_ROOT, "tools-log4j.properties")
def __init__(self, context, num_nodes, kafka, topic, num_records, record_size, throughput, version=DEV_BRANCH, settings=None,
intermediate_stats=False, client_id="producer-performance"):
@ -90,7 +89,7 @@ class ProducerPerformanceService(HttpMetricsCollector, PerformanceService):
cmd += "for file in %s; do CLASSPATH=$CLASSPATH:$file; done; " % jar
cmd += "export CLASSPATH; "
cmd += " export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%s\"; " % ProducerPerformanceService.LOG4J_CONFIG
cmd += " export KAFKA_LOG4J_OPTS=\"%s%s\"; " % (get_log4j_config_param(node), get_log4j_config_for_tools(node))
cmd += "KAFKA_OPTS=%(kafka_opts)s KAFKA_HEAP_OPTS=\"-XX:+HeapDumpOnOutOfMemoryError\" %(kafka_run_class)s org.apache.kafka.tools.ProducerPerformance " \
"--topic %(topic)s --num-records %(num_records)d --record-size %(record_size)d --throughput %(throughput)d --producer-props bootstrap.servers=%(bootstrap_servers)s client.id=%(client_id)s %(metrics_props)s" % args
@ -119,8 +118,8 @@ class ProducerPerformanceService(HttpMetricsCollector, PerformanceService):
node.account.ssh("mkdir -p %s" % ProducerPerformanceService.PERSISTENT_ROOT, allow_fail=False)
# Create and upload log properties
log_config = self.render('tools_log4j.properties', log_file=ProducerPerformanceService.LOG_FILE)
node.account.create_file(ProducerPerformanceService.LOG4J_CONFIG, log_config)
log_config = self.render(get_log4j_config_for_tools(node), log_file=ProducerPerformanceService.LOG_FILE)
node.account.create_file(get_log4j_config_for_tools(node), log_config)
cmd = self.start_cmd(node)
self.logger.debug("Producer performance %d command: %s", idx, cmd)

View File

@ -1,9 +1,9 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
@ -12,11 +12,19 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
log4j.rootLogger=OFF, stdout
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c:%L)%n
Configuration:
Appenders:
File:
name: FILE
fileName: {{ log_file }}
append: true
immediateFlush: true
PatternLayout:
pattern: "[%d] %p %m (%c)%n"
log4j.logger.kafka=WARN
log4j.logger.org.apache.kafka=WARN
Loggers:
Root:
level: {{ log_level|default("INFO") }}
AppenderRef:
- ref: FILE

View File

@ -22,6 +22,7 @@ from ducktape.utils.util import wait_until
from kafkatest.directory_layout.kafka_path import KafkaPathResolverMixin
from kafkatest.services.kafka import KafkaConfig
from kafkatest.services.monitor.jmx import JmxMixin
from .kafka.util import get_log4j_config_param, get_log4j_config_for_tools
STATE_DIR = "state.dir"
@ -37,7 +38,6 @@ class StreamsTestBaseService(KafkaPathResolverMixin, JmxMixin, Service):
STDERR_FILE = os.path.join(PERSISTENT_ROOT, "streams.stderr")
JMX_LOG_FILE = os.path.join(PERSISTENT_ROOT, "jmx_tool.log")
JMX_ERR_FILE = os.path.join(PERSISTENT_ROOT, "jmx_tool.err.log")
LOG4J_CONFIG_FILE = os.path.join(PERSISTENT_ROOT, "tools-log4j.properties")
PID_FILE = os.path.join(PERSISTENT_ROOT, "streams.pid")
CLEAN_NODE_ENABLED = True
@ -285,10 +285,11 @@ class StreamsTestBaseService(KafkaPathResolverMixin, JmxMixin, Service):
args['stdout'] = self.STDOUT_FILE
args['stderr'] = self.STDERR_FILE
args['pidfile'] = self.PID_FILE
args['log4j'] = self.LOG4J_CONFIG_FILE
args['log4j_param'] = get_log4j_config_param(node)
args['log4j'] = get_log4j_config_for_tools(node)
args['kafka_run_class'] = self.path.script("kafka-run-class.sh", node)
cmd = "( export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%(log4j)s\"; " \
cmd = "( export KAFKA_LOG4J_OPTS=\"%(log4j_param)s%(log4j)s\"; " \
"INCLUDE_TEST_JARS=true %(kafka_run_class)s %(streams_class_name)s " \
" %(config_file)s %(user_test_args1)s %(user_test_args2)s %(user_test_args3)s" \
" %(user_test_args4)s & echo $! >&3 ) 1>> %(stdout)s 2>> %(stderr)s 3> %(pidfile)s" % args
@ -305,7 +306,7 @@ class StreamsTestBaseService(KafkaPathResolverMixin, JmxMixin, Service):
node.account.mkdirs(self.PERSISTENT_ROOT)
prop_file = self.prop_file()
node.account.create_file(self.CONFIG_FILE, prop_file)
node.account.create_file(self.LOG4J_CONFIG_FILE, self.render('tools_log4j.properties', log_file=self.LOG_FILE))
node.account.create_file(get_log4j_config_for_tools(node), self.render(get_log4j_config_for_tools(node), log_file=self.LOG_FILE))
self.logger.info("Starting StreamsTest process on " + str(node.account))
with node.account.monitor_log(self.STDOUT_FILE) as monitor:
@ -363,11 +364,12 @@ class StreamsSmokeTestBaseService(StreamsTestBaseService):
args['stdout'] = self.STDOUT_FILE
args['stderr'] = self.STDERR_FILE
args['pidfile'] = self.PID_FILE
args['log4j'] = self.LOG4J_CONFIG_FILE
args['log4j_param'] = get_log4j_config_param(node)
args['log4j'] = get_log4j_config_for_tools(node)
args['version'] = self.KAFKA_STREAMS_VERSION
args['kafka_run_class'] = self.path.script("kafka-run-class.sh", node)
cmd = "( export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%(log4j)s\";" \
cmd = "( export KAFKA_LOG4J_OPTS=\"%(log4j_param)s%(log4j)s\";" \
" INCLUDE_TEST_JARS=true UPGRADE_KAFKA_STREAMS_TEST_VERSION=%(version)s" \
" %(kafka_run_class)s %(streams_class_name)s" \
" %(config_file)s %(user_test_args1)s" \
@ -419,11 +421,12 @@ class StreamsSmokeTestDriverService(StreamsSmokeTestBaseService):
args['stdout'] = self.STDOUT_FILE
args['stderr'] = self.STDERR_FILE
args['pidfile'] = self.PID_FILE
args['log4j'] = self.LOG4J_CONFIG_FILE
args['log4j_param'] = get_log4j_config_param(node)
args['log4j'] = get_log4j_config_for_tools(node)
args['disable_auto_terminate'] = self.DISABLE_AUTO_TERMINATE
args['kafka_run_class'] = self.path.script("kafka-run-class.sh", node)
cmd = "( export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%(log4j)s\"; " \
cmd = "( export KAFKA_LOG4J_OPTS=\"%(log4j_param)s%(log4j)s\"; " \
"INCLUDE_TEST_JARS=true %(kafka_run_class)s %(streams_class_name)s " \
" %(config_file)s %(user_test_args1)s %(disable_auto_terminate)s" \
" & echo $! >&3 ) 1>> %(stdout)s 2>> %(stderr)s 3> %(pidfile)s" % args
@ -496,10 +499,11 @@ class StreamsBrokerDownResilienceService(StreamsTestBaseService):
args['stdout'] = self.STDOUT_FILE
args['stderr'] = self.STDERR_FILE
args['pidfile'] = self.PID_FILE
args['log4j'] = self.LOG4J_CONFIG_FILE
args['log4j_param'] = get_log4j_config_param(node)
args['log4j'] = get_log4j_config_for_tools(node)
args['kafka_run_class'] = self.path.script("kafka-run-class.sh", node)
cmd = "( export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%(log4j)s\"; " \
cmd = "( export KAFKA_LOG4J_OPTS=\"%(log4j_param)s%(log4j)s\"; " \
"INCLUDE_TEST_JARS=true %(kafka_run_class)s %(streams_class_name)s " \
" %(config_file)s %(user_test_args1)s %(user_test_args2)s %(user_test_args3)s" \
" %(user_test_args4)s & echo $! >&3 ) 1>> %(stdout)s 2>> %(stderr)s 3> %(pidfile)s" % args
@ -535,12 +539,13 @@ class StreamsResetter(StreamsTestBaseService):
args['stdout'] = self.STDOUT_FILE
args['stderr'] = self.STDERR_FILE
args['pidfile'] = self.PID_FILE
args['log4j'] = self.LOG4J_CONFIG_FILE
args['log4j_param'] = get_log4j_config_param(node)
args['log4j'] = get_log4j_config_for_tools(node)
args['application.id'] = self.applicationId
args['input.topics'] = self.topic
args['kafka_run_class'] = self.path.script("kafka-run-class.sh", node)
cmd = "(export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%(log4j)s\"; " \
cmd = "(export KAFKA_LOG4J_OPTS=\"%(log4j_param)s%(log4j)s\";" \
"%(kafka_run_class)s %(streams_class_name)s " \
"--bootstrap-server %(bootstrap.servers)s " \
"--force " \
@ -630,11 +635,12 @@ class StreamsUpgradeTestJobRunnerService(StreamsTestBaseService):
args['stdout'] = self.STDOUT_FILE
args['stderr'] = self.STDERR_FILE
args['pidfile'] = self.PID_FILE
args['log4j'] = self.LOG4J_CONFIG_FILE
args['log4j_param'] = get_log4j_config_param(node)
args['log4j'] = get_log4j_config_for_tools(node)
args['version'] = self.KAFKA_STREAMS_VERSION
args['kafka_run_class'] = self.path.script("kafka-run-class.sh", node)
cmd = "( export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%(log4j)s\"; " \
cmd = "( export KAFKA_LOG4J_OPTS=\"%(log4j_param)s%(log4j)s\"; " \
"INCLUDE_TEST_JARS=true UPGRADE_KAFKA_STREAMS_TEST_VERSION=%(version)s " \
" %(kafka_run_class)s %(streams_class_name)s %(config_file)s " \
" & echo $! >&3 ) 1>> %(stdout)s 2>> %(stderr)s 3> %(pidfile)s" % args
@ -730,11 +736,12 @@ class CooperativeRebalanceUpgradeService(StreamsTestBaseService):
args['stdout'] = self.STDOUT_FILE
args['stderr'] = self.STDERR_FILE
args['pidfile'] = self.PID_FILE
args['log4j'] = self.LOG4J_CONFIG_FILE
args['log4j_param'] = get_log4j_config_param(node)
args['log4j'] = get_log4j_config_for_tools(node)
args['version'] = self.KAFKA_STREAMS_VERSION
args['kafka_run_class'] = self.path.script("kafka-run-class.sh", node)
cmd = "( export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%(log4j)s\"; " \
cmd = "( export KAFKA_LOG4J_OPTS=\"%(log4j_param)s%(log4j)s\"; " \
"INCLUDE_TEST_JARS=true UPGRADE_KAFKA_STREAMS_TEST_VERSION=%(version)s " \
" %(kafka_run_class)s %(streams_class_name)s %(config_file)s " \
" & echo $! >&3 ) 1>> %(stdout)s 2>> %(stderr)s 3> %(pidfile)s" % args

View File

@ -0,0 +1,35 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
Configuration:
Properties:
Property:
- name: "logPattern"
value: "[%d] %p %m (%c)%n"
Appenders:
File:
- name: FILE
fileName: {{ log_file }}
append: true
immediateFlush: true
PatternLayout:
pattern: "${logPattern}"
Loggers:
Root:
level: {{ log_level|default("INFO") }}
AppenderRef:
- ref: FILE

View File

@ -0,0 +1,39 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
Configuration:
Appenders:
File:
name: FILE
fileName: {{ log_file }}
append: true
immediateFlush: true
PatternLayout:
pattern: "[%d] %p %m (%c)%n"
Loggers:
Root:
level: {{ log_level|default("INFO") }}
AppenderRef:
- ref: FILE
{% if loggers is defined %}
Logger:
# Add additional loggers dynamically if defined
{% for logger, log_level in loggers.items() %}
- name: {{ logger }}
level: {{ log_level }}
{% endfor %}
{% endif %}

View File

@ -22,6 +22,9 @@ from ducktape.services.background_thread import BackgroundThreadService
from kafkatest.directory_layout.kafka_path import KafkaPathResolverMixin
from ducktape.cluster.remoteaccount import RemoteCommandError
from kafkatest.services.kafka.util import get_log4j_config_param, get_log4j_config_for_tools
class TransactionalMessageCopier(KafkaPathResolverMixin, BackgroundThreadService):
"""This service wraps org.apache.kafka.tools.TransactionalMessageCopier for
use in system testing.
@ -31,7 +34,6 @@ class TransactionalMessageCopier(KafkaPathResolverMixin, BackgroundThreadService
STDERR_CAPTURE = os.path.join(PERSISTENT_ROOT, "transactional_message_copier.stderr")
LOG_DIR = os.path.join(PERSISTENT_ROOT, "logs")
LOG_FILE = os.path.join(LOG_DIR, "transactional_message_copier.log")
LOG4J_CONFIG = os.path.join(PERSISTENT_ROOT, "tools-log4j.properties")
logs = {
"transactional_message_copier_stdout": {
@ -75,9 +77,9 @@ class TransactionalMessageCopier(KafkaPathResolverMixin, BackgroundThreadService
node.account.ssh("mkdir -p %s" % TransactionalMessageCopier.PERSISTENT_ROOT,
allow_fail=False)
# Create and upload log properties
log_config = self.render('tools_log4j.properties',
log_config = self.render(get_log4j_config_for_tools(node),
log_file=TransactionalMessageCopier.LOG_FILE)
node.account.create_file(TransactionalMessageCopier.LOG4J_CONFIG, log_config)
node.account.create_file(get_log4j_config_for_tools(node).LOG4J_CONFIG, log_config)
# Configure security
self.security_config = self.kafka.security_config.client_config(node=node)
self.security_config.setup_node(node)
@ -114,7 +116,7 @@ class TransactionalMessageCopier(KafkaPathResolverMixin, BackgroundThreadService
def start_cmd(self, node, idx):
cmd = "export LOG_DIR=%s;" % TransactionalMessageCopier.LOG_DIR
cmd += " export KAFKA_OPTS=%s;" % self.security_config.kafka_opts
cmd += " export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%s\"; " % TransactionalMessageCopier.LOG4J_CONFIG
cmd += " export KAFKA_LOG4J_OPTS=\"%s%s\"; " % (get_log4j_config_param(node), get_log4j_config_for_tools(node))
cmd += self.path.script("kafka-run-class.sh", node) + " org.apache.kafka.tools." + "TransactionalMessageCopier"
cmd += " --broker-list %s" % self.kafka.bootstrap_servers(self.security_config.security_protocol)
cmd += " --transactional-id %s" % self.transactional_id

View File

@ -0,0 +1,42 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
Configuration:
Properties:
Property:
- name: "logPattern"
value: "[%d] %p %m (%c)%n"
Appenders:
File:
- name: MyFileLogger
fileName: {{ log_path }}
PatternLayout:
pattern: "${logPattern}"
Loggers:
Root:
level: DEBUG
AppenderRef:
- ref: MyFileLogger
Logger:
- name: kafka
level: DEBUG
- name: org.apache.kafka
level: DEBUG
- name: org.eclipse
level: INFO

View File

@ -22,6 +22,8 @@ from requests.packages.urllib3 import Retry
from ducktape.services.service import Service
from ducktape.utils.util import wait_until
from kafkatest.directory_layout.kafka_path import KafkaPathResolverMixin
from kafkatest.services.kafka.util import get_log4j_config_param, get_log4j_config, \
get_log4j_config_for_trogdor_coordinator, get_log4j_config_for_trogdor_agent
class TrogdorService(KafkaPathResolverMixin, Service):
@ -48,8 +50,6 @@ class TrogdorService(KafkaPathResolverMixin, Service):
AGENT_STDOUT_STDERR = os.path.join(PERSISTENT_ROOT, "trogdor-agent-stdout-stderr.log")
COORDINATOR_LOG = os.path.join(PERSISTENT_ROOT, "trogdor-coordinator.log")
AGENT_LOG = os.path.join(PERSISTENT_ROOT, "trogdor-agent.log")
COORDINATOR_LOG4J_PROPERTIES = os.path.join(PERSISTENT_ROOT, "trogdor-coordinator-log4j.properties")
AGENT_LOG4J_PROPERTIES = os.path.join(PERSISTENT_ROOT, "trogdor-agent-log4j.properties")
CONFIG_PATH = os.path.join(PERSISTENT_ROOT, "trogdor.conf")
DEFAULT_AGENT_PORT=8888
DEFAULT_COORDINATOR_PORT=8889
@ -141,26 +141,26 @@ class TrogdorService(KafkaPathResolverMixin, Service):
self._start_agent_node(node)
def _start_coordinator_node(self, node):
node.account.create_file(TrogdorService.COORDINATOR_LOG4J_PROPERTIES,
self.render('log4j.properties',
node.account.create_file(get_log4j_config_for_trogdor_coordinator(node),
self.render(get_log4j_config(node),
log_path=TrogdorService.COORDINATOR_LOG))
self._start_trogdor_daemon("coordinator", TrogdorService.COORDINATOR_STDOUT_STDERR,
TrogdorService.COORDINATOR_LOG4J_PROPERTIES,
get_log4j_config_for_trogdor_coordinator(node),
TrogdorService.COORDINATOR_LOG, node)
self.logger.info("Started trogdor coordinator on %s." % node.name)
def _start_agent_node(self, node):
node.account.create_file(TrogdorService.AGENT_LOG4J_PROPERTIES,
self.render('log4j.properties',
node.account.create_file(get_log4j_config_for_trogdor_agent(node),
self.render(get_log4j_config(node),
log_path=TrogdorService.AGENT_LOG))
self._start_trogdor_daemon("agent", TrogdorService.AGENT_STDOUT_STDERR,
TrogdorService.AGENT_LOG4J_PROPERTIES,
get_log4j_config_for_trogdor_agent(node),
TrogdorService.AGENT_LOG, node)
self.logger.info("Started trogdor agent on %s." % node.name)
def _start_trogdor_daemon(self, daemon_name, stdout_stderr_capture_path,
log4j_properties_path, log_path, node):
cmd = "export KAFKA_LOG4J_OPTS='-Dlog4j.configuration=file:%s'; " % log4j_properties_path
cmd = "export KAFKA_LOG4J_OPTS='%s%s'; " % (get_log4j_config_param(node), log4j_properties_path)
cmd += "%s %s --%s.config %s --node-name %s 1>> %s 2>> %s &" % \
(self.path.script("trogdor.sh", node),
daemon_name,

View File

@ -20,6 +20,7 @@ from ducktape.services.background_thread import BackgroundThreadService
from kafkatest.directory_layout.kafka_path import KafkaPathResolverMixin
from kafkatest.services.kafka import TopicPartition, consumer_group
from kafkatest.services.kafka.util import get_log4j_config_param, get_log4j_config_for_tools
from kafkatest.services.verifiable_client import VerifiableClientMixin
from kafkatest.version import DEV_BRANCH, V_2_3_0, V_2_3_1, V_3_7_0, V_4_0_0
@ -215,7 +216,6 @@ class VerifiableConsumer(KafkaPathResolverMixin, VerifiableClientMixin, Backgrou
STDERR_CAPTURE = os.path.join(PERSISTENT_ROOT, "verifiable_consumer.stderr")
LOG_DIR = os.path.join(PERSISTENT_ROOT, "logs")
LOG_FILE = os.path.join(LOG_DIR, "verifiable_consumer.log")
LOG4J_CONFIG = os.path.join(PERSISTENT_ROOT, "tools-log4j.properties")
CONFIG_FILE = os.path.join(PERSISTENT_ROOT, "verifiable_consumer.properties")
logs = {
@ -296,8 +296,8 @@ class VerifiableConsumer(KafkaPathResolverMixin, VerifiableClientMixin, Backgrou
node.account.ssh("mkdir -p %s" % VerifiableConsumer.PERSISTENT_ROOT, allow_fail=False)
# Create and upload log properties
log_config = self.render('tools_log4j.properties', log_file=VerifiableConsumer.LOG_FILE)
node.account.create_file(VerifiableConsumer.LOG4J_CONFIG, log_config)
log_config = self.render(get_log4j_config_for_tools(node), log_file=VerifiableConsumer.LOG_FILE)
node.account.create_file(get_log4j_config_for_tools(node), log_config)
# Create and upload config file
self.security_config = self.kafka.security_config.client_config(self.prop_file, node,
@ -380,7 +380,7 @@ class VerifiableConsumer(KafkaPathResolverMixin, VerifiableClientMixin, Backgrou
cmd = ""
cmd += "export LOG_DIR=%s;" % VerifiableConsumer.LOG_DIR
cmd += " export KAFKA_OPTS=%s;" % self.security_config.kafka_opts
cmd += " export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%s\"; " % VerifiableConsumer.LOG4J_CONFIG
cmd += " export KAFKA_LOG4J_OPTS=\"%s%s\"; " % (get_log4j_config_param(node), get_log4j_config_for_tools(node))
cmd += self.impl.exec_cmd(node)
if self.on_record_consumed:
cmd += " --verbose"

View File

@ -24,7 +24,7 @@ from kafkatest.services.kafka import TopicPartition
from kafkatest.services.verifiable_client import VerifiableClientMixin
from kafkatest.utils import is_int, is_int_with_prefix
from kafkatest.version import get_version, V_2_5_0, DEV_BRANCH
from kafkatest.services.kafka.util import fix_opts_for_new_jvm
from kafkatest.services.kafka.util import fix_opts_for_new_jvm, get_log4j_config_param, get_log4j_config_for_tools
class VerifiableProducer(KafkaPathResolverMixin, VerifiableClientMixin, BackgroundThreadService):
@ -41,7 +41,6 @@ class VerifiableProducer(KafkaPathResolverMixin, VerifiableClientMixin, Backgrou
STDERR_CAPTURE = os.path.join(PERSISTENT_ROOT, "verifiable_producer.stderr")
LOG_DIR = os.path.join(PERSISTENT_ROOT, "logs")
LOG_FILE = os.path.join(LOG_DIR, "verifiable_producer.log")
LOG4J_CONFIG = os.path.join(PERSISTENT_ROOT, "tools-log4j.properties")
CONFIG_FILE = os.path.join(PERSISTENT_ROOT, "verifiable_producer.properties")
logs = {
@ -127,8 +126,8 @@ class VerifiableProducer(KafkaPathResolverMixin, VerifiableClientMixin, Backgrou
node.account.ssh("mkdir -p %s" % VerifiableProducer.PERSISTENT_ROOT, allow_fail=False)
# Create and upload log properties
log_config = self.render('tools_log4j.properties', log_file=VerifiableProducer.LOG_FILE)
node.account.create_file(VerifiableProducer.LOG4J_CONFIG, log_config)
log_config = self.render(get_log4j_config_for_tools(node), log_file=VerifiableProducer.LOG_FILE)
node.account.create_file(get_log4j_config_for_tools(node), log_config)
# Configure security
self.security_config = self.kafka.security_config.client_config(node=node,
@ -222,7 +221,7 @@ class VerifiableProducer(KafkaPathResolverMixin, VerifiableClientMixin, Backgrou
cmd += " export KAFKA_OPTS=%s;" % self.security_config.kafka_opts
cmd += fix_opts_for_new_jvm(node)
cmd += " export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%s\"; " % VerifiableProducer.LOG4J_CONFIG
cmd += " export KAFKA_LOG4J_OPTS=\"%s%s\"; " % (get_log4j_config_param(node), get_log4j_config_for_tools(node))
cmd += self.impl.exec_cmd(node)
version = get_version(node)
if version >= V_2_5_0:

View File

@ -18,8 +18,10 @@ from ducktape.mark import matrix
from ducktape.mark.resource import cluster
from ducktape.utils.util import wait_until
from kafkatest.services.kafka import quorum
from kafkatest.services.kafka.util import get_log4j_config_param, get_log4j_config_for_tools
from kafkatest.services.streams import StreamsTestBaseService
from kafkatest.tests.kafka_test import KafkaTest
from kafkatest.version import LATEST_4_0
class StreamsRelationalSmokeTestService(StreamsTestBaseService):
@ -33,14 +35,15 @@ class StreamsRelationalSmokeTestService(StreamsTestBaseService):
self.mode = mode
self.nodeId = nodeId
self.processing_guarantee = processing_guarantee
self.log4j_template = 'log4j_template.properties'
self.log4j_template = "log4j2_template.yaml" if (self.node.version >= LATEST_4_0) else "log4j_template.properties"
def start_cmd(self, node):
return "( export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%(log4j)s\"; " \
return "( export KAFKA_LOG4J_OPTS=\"%(log4j_param)s%(log4j)s\"; " \
"INCLUDE_TEST_JARS=true %(kafka_run_class)s org.apache.kafka.streams.tests.RelationalSmokeTest " \
" %(mode)s %(kafka)s %(nodeId)s %(processing_guarantee)s %(state_dir)s" \
" & echo $! >&3 ) 1>> %(stdout)s 2>> %(stderr)s 3> %(pidfile)s" % {
"log4j": self.LOG4J_CONFIG_FILE,
"log4j_param": get_log4j_config_param(node),
"log4j": get_log4j_config_for_tools(node),
"kafka_run_class": self.path.script("kafka-run-class.sh", node),
"mode": self.mode,
"kafka": self.kafka.bootstrap_servers(),
@ -54,8 +57,9 @@ class StreamsRelationalSmokeTestService(StreamsTestBaseService):
def start_node(self, node):
node.account.mkdirs(self.PERSISTENT_ROOT)
node.account.create_file(self.LOG4J_CONFIG_FILE,
self.render("log4j_template.properties", log_file=self.LOG_FILE))
node.account.create_file(get_log4j_config_for_tools(node),
self.render("log4j2_template.yaml" if node.version >= LATEST_4_0 else "log4j_template.properties",
log_file=self.LOG_FILE))
self.logger.info("Starting process on " + str(node.account))
node.account.ssh(self.start_cmd(node))

View File

@ -12,12 +12,28 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
log4j.rootLogger=OFF, stdout
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c:%L)%n
# Define the root logger with appender file
Configuration:
Appenders:
File:
name: FILE
fileName: {{ log_file }}
append: true
immediateFlush: true
PatternLayout:
pattern: "[%d] %p %m (%c)%n"
log4j.logger.org.apache.kafka=ERROR
# We are testing for a particular INFO log message in CommonNameLoggingTrustManagerFactoryWrapper
log4j.logger.org.apache.kafka.common.security.ssl.CommonNameLoggingTrustManagerFactoryWrapper=INFO
Loggers:
Root:
level: {{ log_level|default("INFO") }}
AppenderRef:
- ref: FILE
Logger:
{% if loggers is defined %}
{% for logger, log_level in loggers.items() %}
- name: {{ logger }}
level: {{ log_level }}
{% endfor %}
{% endif %}

View File

@ -41,7 +41,7 @@ import org.apache.kafka.metadata.PartitionRegistration;
import org.apache.kafka.server.quota.QuotaType;
import org.apache.kafka.tools.reassign.ReassignPartitionsCommand;
import org.apache.log4j.PropertyConfigurator;
import org.apache.logging.log4j.core.config.Configurator;
import org.jfree.chart.ChartFactory;
import org.jfree.chart.ChartFrame;
import org.jfree.chart.JFreeChart;
@ -95,7 +95,7 @@ public class ReplicationQuotasTestRig {
private static final String DIR;
static {
PropertyConfigurator.configure("core/src/test/resources/log4j.properties");
Configurator.reconfigure();
new File("Experiments").mkdir();
DIR = "Experiments/Run" + Long.valueOf(System.currentTimeMillis()).toString().substring(8);

View File

@ -1,22 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
log4j.rootLogger=INFO, stdout
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c:%L)%n
log4j.logger.org.apache.kafka=INFO
log4j.logger.org.eclipse.jetty=INFO

View File

@ -0,0 +1,39 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
Configuration:
Properties:
Property:
- name: "logPattern"
value: "[%d] %p %m (%c:%L)%n"
Appenders:
Console:
name: STDOUT
PatternLayout:
pattern: "${logPattern}"
Loggers:
Root:
level: INFO
AppenderRef:
- ref: STDOUT
Logger:
- name: org.apache.kafka
level: ERROR
- name: org.eclipse.jetty
level: ERROR

View File

@ -1,22 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
log4j.rootLogger=TRACE, stdout
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c:%L)%n
log4j.logger.org.apache.kafka=INFO
log4j.logger.org.eclipse.jetty=INFO

View File

@ -0,0 +1,39 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
Configuration:
Properties:
Property:
- name: "logPattern"
value: "[%d] %p %m (%c:%L)%n"
Appenders:
Console:
name: STDOUT
PatternLayout:
pattern: "${logPattern}"
Loggers:
Root:
level: TRACE
AppenderRef:
- ref: STDOUT
Logger:
- name: org.apache.kafka
level: ERROR
- name: org.eclipse.jetty
level: ERROR