diff --git a/LICENSE-binary b/LICENSE-binary
index 08092f6def9..8e5f7c14342 100644
--- a/LICENSE-binary
+++ b/LICENSE-binary
@@ -220,6 +220,7 @@ jackson-annotations-2.16.2
jackson-core-2.16.2
jackson-databind-2.16.2
jackson-dataformat-csv-2.16.2
+jackson-dataformat-yaml-2.16.2
jackson-datatype-jdk8-2.16.2
jackson-jaxrs-base-2.16.2
jackson-jaxrs-json-provider-2.16.2
@@ -239,6 +240,11 @@ jetty-servlets-9.4.56.v20240826
jetty-util-9.4.56.v20240826
jetty-util-ajax-9.4.56.v20240826
jose4j-0.9.4
+log4j-api-2.24.1
+log4j-core-2.24.1
+log4j-core-test-2.24.1
+log4j-slf4j-impl-2.24.1
+log4j-1.2-api-2.24.1
lz4-java-1.8.0
maven-artifact-3.9.6
metrics-core-4.1.12.1
@@ -254,7 +260,6 @@ netty-transport-native-epoll-4.1.115.Final
netty-transport-native-unix-common-4.1.115.Final
opentelemetry-proto-1.0.0-alpha
plexus-utils-3.5.1
-reload4j-1.2.25
rocksdbjni-7.9.2
scala-library-2.13.15
scala-logging_2.13-3.9.5
@@ -312,7 +317,6 @@ argparse4j-0.7.0, see: licenses/argparse-MIT
classgraph-4.8.173, see: licenses/classgraph-MIT
jopt-simple-5.0.4, see: licenses/jopt-simple-MIT
slf4j-api-1.7.36, see: licenses/slf4j-MIT
-slf4j-reload4j-1.7.36, see: licenses/slf4j-MIT
pcollections-4.0.1, see: licenses/pcollections-MIT
---------------------------------------
diff --git a/README.md b/README.md
index b8275004aac..03069291a5e 100644
--- a/README.md
+++ b/README.md
@@ -50,10 +50,10 @@ Follow instructions in https://kafka.apache.org/quickstart
./gradlew clients:test --tests org.apache.kafka.clients.MetadataTest.testTimeToNextUpdate
### Running a particular unit/integration test with log4j output ###
-By default, there will be only small number of logs output while testing. You can adjust it by changing the `log4j.properties` file in the module's `src/test/resources` directory.
+By default, there will be only small number of logs output while testing. You can adjust it by changing the `log4j2.yml` file in the module's `src/test/resources` directory.
-For example, if you want to see more logs for clients project tests, you can modify [the line](https://github.com/apache/kafka/blob/trunk/clients/src/test/resources/log4j.properties#L21) in `clients/src/test/resources/log4j.properties`
-to `log4j.logger.org.apache.kafka=INFO` and then run:
+For example, if you want to see more logs for clients project tests, you can modify [the line](https://github.com/apache/kafka/blob/trunk/clients/src/test/resources/log4j2.yml#L35) in `clients/src/test/resources/log4j2.yml`
+to `level: INFO` and then run:
./gradlew cleanTest clients:test --tests NetworkClientTest
diff --git a/bin/connect-distributed.sh b/bin/connect-distributed.sh
index b8088ad9234..a9d185493ef 100755
--- a/bin/connect-distributed.sh
+++ b/bin/connect-distributed.sh
@@ -22,8 +22,12 @@ fi
base_dir=$(dirname $0)
-if [ "x$KAFKA_LOG4J_OPTS" = "x" ]; then
- export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/connect-log4j.properties"
+if [ -z "$KAFKA_LOG4J_OPTS" ]; then
+ export KAFKA_LOG4J_OPTS="-Dlog4j2.configurationFile=$base_dir/../config/connect-log4j2.yaml"
+elif echo "$KAFKA_LOG4J_OPTS" | grep -qE "log4j\.[^[:space:]]+$"; then
+ echo DEPRECATED: A Log4j 1.x configuration file has been detected, which is no longer recommended. >&2
+ echo To use a Log4j 2.x configuration, please see https://logging.apache.org/log4j/2.x/migrate-from-log4j1.html#Log4j2ConfigurationFormat for details about Log4j configuration file migration. >&2
+ echo You can also use the \$KAFKA_HOME/config/connect-log4j2.yaml file as a starting point. Make sure to remove the Log4j 1.x configuration after completing the migration. >&2
fi
if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then
diff --git a/bin/connect-mirror-maker.sh b/bin/connect-mirror-maker.sh
index 8e2b2e162da..90305754772 100755
--- a/bin/connect-mirror-maker.sh
+++ b/bin/connect-mirror-maker.sh
@@ -22,8 +22,12 @@ fi
base_dir=$(dirname $0)
-if [ "x$KAFKA_LOG4J_OPTS" = "x" ]; then
- export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/connect-log4j.properties"
+if [ -z "$KAFKA_LOG4J_OPTS" ]; then
+ export KAFKA_LOG4J_OPTS="-Dlog4j2.configurationFile=$base_dir/../config/connect-log4j2.yaml"
+elif echo "$KAFKA_LOG4J_OPTS" | grep -qE "log4j\.[^[:space:]]+$"; then
+ echo DEPRECATED: A Log4j 1.x configuration file has been detected, which is no longer recommended. >&2
+ echo To use a Log4j 2.x configuration, please see https://logging.apache.org/log4j/2.x/migrate-from-log4j1.html#Log4j2ConfigurationFormat for details about Log4j configuration file migration. >&2
+ echo You can also use the \$KAFKA_HOME/config/connect-log4j2.yaml file as a starting point. Make sure to remove the Log4j 1.x configuration after completing the migration. >&2
fi
if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then
diff --git a/bin/connect-standalone.sh b/bin/connect-standalone.sh
index bef78d658fd..92e8dc9c8ee 100755
--- a/bin/connect-standalone.sh
+++ b/bin/connect-standalone.sh
@@ -22,8 +22,12 @@ fi
base_dir=$(dirname $0)
-if [ "x$KAFKA_LOG4J_OPTS" = "x" ]; then
- export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/connect-log4j.properties"
+if [ -z "$KAFKA_LOG4J_OPTS" ]; then
+ export KAFKA_LOG4J_OPTS="-Dlog4j2.configurationFile=$base_dir/../config/connect-log4j2.yaml"
+elif echo "$KAFKA_LOG4J_OPTS" | grep -qE "log4j\.[^[:space:]]+$"; then
+ echo DEPRECATED: A Log4j 1.x configuration file has been detected, which is no longer recommended. >&2
+ echo To use a Log4j 2.x configuration, please see https://logging.apache.org/log4j/2.x/migrate-from-log4j1.html#Log4j2ConfigurationFormat for details about Log4j configuration file migration. >&2
+ echo You can also use the \$KAFKA_HOME/config/connect-log4j2.yaml file as a starting point. Make sure to remove the Log4j 1.x configuration after completing the migration. >&2
fi
if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then
diff --git a/bin/kafka-server-start.sh b/bin/kafka-server-start.sh
index 5a53126172d..6539746160f 100755
--- a/bin/kafka-server-start.sh
+++ b/bin/kafka-server-start.sh
@@ -21,8 +21,12 @@ then
fi
base_dir=$(dirname $0)
-if [ "x$KAFKA_LOG4J_OPTS" = "x" ]; then
- export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/log4j.properties"
+if [ -z "$KAFKA_LOG4J_OPTS" ]; then
+ export KAFKA_LOG4J_OPTS="-Dlog4j2.configurationFile=$base_dir/../config/log4j2.yaml"
+elif echo "$KAFKA_LOG4J_OPTS" | grep -qE "log4j\.[^[:space:]]+$"; then
+ echo DEPRECATED: A Log4j 1.x configuration file has been detected, which is no longer recommended. >&2
+ echo To use a Log4j 2.x configuration, please see https://logging.apache.org/log4j/2.x/migrate-from-log4j1.html#Log4j2ConfigurationFormat for details about Log4j configuration file migration. >&2
+ echo You can also use the \$KAFKA_HOME/config/log4j2.yaml file as a starting point. Make sure to remove the Log4j 1.x configuration after completing the migration. >&2
fi
if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then
diff --git a/bin/windows/connect-distributed.bat b/bin/windows/connect-distributed.bat
index 0535085bde5..43c338fc26a 100644
--- a/bin/windows/connect-distributed.bat
+++ b/bin/windows/connect-distributed.bat
@@ -27,7 +27,14 @@ popd
rem Log4j settings
IF ["%KAFKA_LOG4J_OPTS%"] EQU [""] (
- set KAFKA_LOG4J_OPTS=-Dlog4j.configuration=file:%BASE_DIR%/config/connect-log4j.properties
+ set KAFKA_LOG4J_OPTS=-Dlog4j2.configurationFile=%BASE_DIR%/config/connect-log4j2.yaml
+) ELSE (
+ echo %KAFKA_LOG4J_OPTS% | findstr /r /c:"log4j\.[^ ]*$" >nul
+ IF %ERRORLEVEL% == 0 (
+ echo DEPRECATED: A Log4j 1.x configuration file has been detected, which is no longer recommended.
+ echo To use a Log4j 2.x configuration, please see https://logging.apache.org/log4j/2.x/migrate-from-log4j1.html#Log4j2ConfigurationFormat for details about Log4j configuration file migration. >&2
+ echo You can also use the %BASE_DIR%/config/connect-log4j2.yaml file as a starting point. Make sure to remove the Log4j 1.x configuration after completing the migration. >&2
+ )
)
"%~dp0kafka-run-class.bat" org.apache.kafka.connect.cli.ConnectDistributed %*
diff --git a/bin/windows/connect-standalone.bat b/bin/windows/connect-standalone.bat
index 12ebb21dc9a..bac8bbd1291 100644
--- a/bin/windows/connect-standalone.bat
+++ b/bin/windows/connect-standalone.bat
@@ -27,7 +27,14 @@ popd
rem Log4j settings
IF ["%KAFKA_LOG4J_OPTS%"] EQU [""] (
- set KAFKA_LOG4J_OPTS=-Dlog4j.configuration=file:%BASE_DIR%/config/connect-log4j.properties
+ set KAFKA_LOG4J_OPTS=-Dlog4j2.configurationFile=%BASE_DIR%/config/connect-log4j2.yaml
+) ELSE (
+ echo %KAFKA_LOG4J_OPTS% | findstr /r /c:"log4j\.[^ ]*$" >nul
+ IF %ERRORLEVEL% == 0 (
+ echo DEPRECATED: A Log4j 1.x configuration file has been detected, which is no longer recommended.
+ echo To use a Log4j 2.x configuration, please see https://logging.apache.org/log4j/2.x/migrate-from-log4j1.html#Log4j2ConfigurationFormat for details about Log4j configuration file migration.
+ echo You can also use the %BASE_DIR%/config/connect-log4j2.yaml file as a starting point. Make sure to remove the Log4j 1.x configuration after completing the migration.
+ )
)
"%~dp0kafka-run-class.bat" org.apache.kafka.connect.cli.ConnectStandalone %*
diff --git a/bin/windows/kafka-server-start.bat b/bin/windows/kafka-server-start.bat
index 8624eda9ff0..ff29321d000 100644
--- a/bin/windows/kafka-server-start.bat
+++ b/bin/windows/kafka-server-start.bat
@@ -21,7 +21,14 @@ IF [%1] EQU [] (
SetLocal
IF ["%KAFKA_LOG4J_OPTS%"] EQU [""] (
- set KAFKA_LOG4J_OPTS=-Dlog4j.configuration=file:%~dp0../../config/log4j.properties
+ set KAFKA_LOG4J_OPTS=-Dlog4j2.configurationFile=%~dp0../../config/log4j2.yaml
+) ELSE (
+ echo %KAFKA_LOG4J_OPTS% | findstr /r /c:"log4j\.[^ ]*$" >nul
+ IF %ERRORLEVEL% == 0 (
+ echo DEPRECATED: A Log4j 1.x configuration file has been detected, which is no longer recommended.
+ echo To use a Log4j 2.x configuration, please see https://logging.apache.org/log4j/2.x/migrate-from-log4j1.html#Log4j2ConfigurationFormat for details about Log4j configuration file migration.
+ echo You can also use the %~dp0../../config/log4j2.yaml file as a starting point. Make sure to remove the Log4j 1.x configuration after completing the migration.
+ )
)
IF ["%KAFKA_HEAP_OPTS%"] EQU [""] (
rem detect OS architecture
diff --git a/build.gradle b/build.gradle
index 51a6658f1be..78bf91d799f 100644
--- a/build.gradle
+++ b/build.gradle
@@ -138,7 +138,7 @@ ext {
}
runtimeTestLibs = [
- libs.slf4jReload4j,
+ libs.slf4jLog4j2,
libs.junitPlatformLanucher,
project(":test-common:test-common-runtime")
]
@@ -178,12 +178,14 @@ allprojects {
libs.scalaLibrary,
libs.scalaReflect,
libs.jacksonAnnotations,
+ libs.jacksonDatabindYaml,
// be explicit about the Netty dependency version instead of relying on the version set by
// ZooKeeper (potentially older and containing CVEs)
libs.nettyHandler,
libs.nettyTransportNativeEpoll,
- // be explicit about the reload4j version instead of relying on the transitive versions
- libs.reload4j
+ libs.log4j2Api,
+ libs.log4j2Core,
+ libs.log4j1Bridge2Api
)
}
}
@@ -963,13 +965,15 @@ project(':server') {
implementation libs.slf4jApi
- compileOnly libs.reload4j
+ compileOnly libs.log4j2Api
+ compileOnly libs.log4j2Core
+ compileOnly libs.log4j1Bridge2Api
testImplementation project(':clients').sourceSets.test.output
testImplementation libs.mockitoCore
testImplementation libs.junitJupiter
- testImplementation libs.slf4jReload4j
+ testImplementation libs.slf4jLog4j2
testRuntimeOnly runtimeTestLibs
}
@@ -1028,7 +1032,7 @@ project(':share') {
testImplementation libs.junitJupiter
testImplementation libs.mockitoCore
- testImplementation libs.slf4jReload4j
+ testImplementation libs.slf4jLog4j2
testRuntimeOnly runtimeTestLibs
}
@@ -1099,15 +1103,17 @@ project(':core') {
implementation libs.dropwizardMetrics
exclude module: 'slf4j-log4j12'
exclude module: 'log4j'
- // Both Kafka and Zookeeper use slf4j. ZooKeeper moved from log4j to logback in v3.8.0, but Kafka relies on reload4j.
+ // Both Kafka and Zookeeper use slf4j. ZooKeeper moved from log4j to logback in v3.8.0.
// We are removing Zookeeper's dependency on logback so we have a singular logging backend.
exclude module: 'logback-classic'
exclude module: 'logback-core'
}
// ZooKeeperMain depends on commons-cli but declares the dependency as `provided`
implementation libs.commonsCli
-
- compileOnly libs.reload4j
+ implementation libs.log4j2Core
+ implementation libs.log4j2Api
+ implementation libs.log4j1Bridge2Api
+ implementation libs.jacksonDatabindYaml
testImplementation project(':clients').sourceSets.test.output
testImplementation project(':group-coordinator').sourceSets.test.output
@@ -1137,7 +1143,7 @@ project(':core') {
testImplementation libs.apachedsMavibotPartition
testImplementation libs.apachedsJdbmPartition
testImplementation libs.junitJupiter
- testImplementation libs.slf4jReload4j
+ testImplementation libs.slf4jLog4j2
testImplementation libs.caffeine
testRuntimeOnly runtimeTestLibs
@@ -1169,9 +1175,6 @@ project(':core') {
}
tasks.create(name: "copyDependantLibs", type: Copy) {
- from (configurations.compileClasspath) {
- include('reload4j*jar')
- }
from (configurations.runtimeClasspath) {
exclude('kafka-clients*')
}
@@ -1384,11 +1387,14 @@ project(':metadata') {
implementation libs.jacksonDatabind
implementation libs.jacksonJDK8Datatypes
implementation libs.metrics
- compileOnly libs.reload4j
+ compileOnly libs.log4j2Api
+ compileOnly libs.log4j2Core
+ compileOnly libs.log4j1Bridge2Api
+ testImplementation libs.jacksonDatabindYaml
testImplementation libs.junitJupiter
testImplementation libs.jqwik
testImplementation libs.mockitoCore
- testImplementation libs.slf4jReload4j
+ testImplementation libs.slf4jLog4j2
testImplementation project(':clients').sourceSets.test.output
testImplementation project(':raft').sourceSets.test.output
testImplementation project(':server-common').sourceSets.test.output
@@ -1513,6 +1519,7 @@ project(':group-coordinator') {
testImplementation project(':clients').sourceSets.test.output
testImplementation project(':server-common').sourceSets.test.output
testImplementation project(':coordinator-common').sourceSets.test.output
+ testImplementation libs.jacksonDatabindYaml
testImplementation libs.junitJupiter
testImplementation libs.mockitoCore
@@ -1575,9 +1582,10 @@ project(':test-common') {
implementation project(':storage')
implementation project(':server-common')
implementation libs.slf4jApi
+ implementation libs.jacksonDatabindYaml
testImplementation libs.junitJupiter
testImplementation libs.mockitoCore
-
+
testRuntimeOnly runtimeTestLibs
}
@@ -1883,12 +1891,18 @@ project(':clients') {
testImplementation libs.bcpkix
testImplementation libs.jacksonJakartarsJsonProvider
+ testImplementation libs.jacksonDatabindYaml
testImplementation libs.jose4j
testImplementation libs.junitJupiter
- testImplementation libs.reload4j
+ testImplementation libs.log4j2Api
+ testImplementation libs.log4j2Core
+ testImplementation libs.log4j1Bridge2Api
+ testImplementation libs.spotbugs
testImplementation libs.mockitoCore
testImplementation libs.mockitoJunitJupiter // supports MockitoExtension
+ testCompileOnly libs.bndlib
+
testRuntimeOnly libs.jacksonDatabind
testRuntimeOnly libs.jacksonJDK8Datatypes
testRuntimeOnly runtimeTestLibs
@@ -2046,11 +2060,13 @@ project(':raft') {
implementation project(':clients')
implementation libs.slf4jApi
implementation libs.jacksonDatabind
+ implementation libs.jacksonDatabindYaml
testImplementation project(':server-common')
testImplementation project(':server-common').sourceSets.test.output
testImplementation project(':clients')
testImplementation project(':clients').sourceSets.test.output
+ testImplementation libs.jacksonDatabindYaml
testImplementation libs.junitJupiter
testImplementation libs.mockitoCore
testImplementation libs.jqwik
@@ -2146,6 +2162,7 @@ project(':server-common') {
testImplementation project(':clients')
testImplementation project(':clients').sourceSets.test.output
+ testImplementation libs.jacksonDatabindYaml
testImplementation libs.junitJupiter
testImplementation libs.mockitoCore
@@ -2279,6 +2296,7 @@ project(':storage') {
testImplementation project(':server-common')
testImplementation project(':server-common').sourceSets.test.output
testImplementation libs.hamcrest
+ testImplementation libs.jacksonDatabindYaml
testImplementation libs.junitJupiter
testImplementation libs.mockitoCore
testImplementation libs.bcpkix
@@ -2441,13 +2459,18 @@ project(':tools') {
implementation libs.jacksonDataformatCsv
implementation libs.jacksonJDK8Datatypes
implementation libs.slf4jApi
- implementation libs.slf4jReload4j
+ implementation libs.slf4jLog4j2
+ implementation libs.log4j2Api
+ implementation libs.log4j2Core
+ implementation libs.log4j1Bridge2Api
implementation libs.joptSimple
implementation libs.re2j
implementation libs.jose4j // for SASL/OAUTHBEARER JWT validation
implementation libs.jacksonJakartarsJsonProvider
+ compileOnly libs.spotbugs
+
testImplementation project(':clients')
testImplementation project(':clients').sourceSets.test.output
testImplementation project(':server')
@@ -2473,7 +2496,6 @@ project(':tools') {
testImplementation(libs.jfreechart) {
exclude group: 'junit', module: 'junit'
}
- testImplementation libs.reload4j
testImplementation libs.apachedsCoreApi
testImplementation libs.apachedsInterceptorKerberos
testImplementation libs.apachedsProtocolShared
@@ -2513,7 +2535,9 @@ project(':trogdor') {
implementation libs.jacksonDatabind
implementation libs.jacksonJDK8Datatypes
implementation libs.slf4jApi
- runtimeOnly libs.reload4j
+ runtimeOnly libs.log4j2Api
+ runtimeOnly libs.log4j2Core
+ runtimeOnly libs.log4j1Bridge2Api
implementation libs.jacksonJakartarsJsonProvider
implementation libs.jerseyContainerServlet
@@ -2534,13 +2558,16 @@ project(':trogdor') {
implementation project(':group-coordinator:group-coordinator-api')
testImplementation project(':clients')
- testImplementation libs.junitJupiter
testImplementation project(':clients').sourceSets.test.output
+ testImplementation project(':group-coordinator')
+ testImplementation libs.junitJupiter
testImplementation libs.mockitoCore
- testImplementation project(':group-coordinator')
-
testRuntimeOnly runtimeTestLibs
+ testRuntimeOnly libs.log4j2Api
+ testRuntimeOnly libs.log4j2Core
+ testRuntimeOnly libs.log4j1Bridge2Api
+ testRuntimeOnly libs.junitPlatformLanucher
}
javadoc {
@@ -2585,6 +2612,7 @@ project(':shell') {
testImplementation project(':core')
testImplementation project(':server-common')
testImplementation project(':server-common').sourceSets.test.output
+ testImplementation libs.jacksonDatabindYaml
testImplementation libs.junitJupiter
testRuntimeOnly runtimeTestLibs
@@ -2629,9 +2657,13 @@ project(':streams') {
// testCompileOnly prevents streams from exporting a dependency on test-utils, which would cause a dependency cycle
testCompileOnly project(':streams:test-utils')
+ testCompileOnly libs.bndlib
testImplementation project(':clients').sourceSets.test.output
- testImplementation libs.reload4j
+ testImplementation libs.log4j2Api
+ testImplementation libs.log4j2Core
+ testImplementation libs.log4j1Bridge2Api
+ testImplementation libs.jacksonDatabindYaml
testImplementation libs.junitJupiter
testImplementation libs.bcpkix
testImplementation libs.hamcrest
@@ -2774,6 +2806,7 @@ project(':streams:streams-scala') {
testImplementation project(':clients').sourceSets.test.output
testImplementation project(':streams:test-utils')
+ testImplementation libs.jacksonDatabindYaml
testImplementation libs.junitJupiter
testImplementation libs.mockitoJunitJupiter // supports MockitoExtension
testRuntimeOnly runtimeTestLibs
@@ -2830,10 +2863,13 @@ project(':streams:integration-tests') {
testImplementation project(':transaction-coordinator')
testImplementation libs.bcpkix
testImplementation libs.hamcrest
+ testImplementation libs.jacksonDatabindYaml
testImplementation libs.junitJupiter
testImplementation libs.junitPlatformSuiteEngine // supports suite test
testImplementation libs.mockitoCore
- testImplementation libs.reload4j
+ testImplementation libs.log4j2Api
+ testImplementation libs.log4j2Core
+ testImplementation libs.log4j1Bridge2Api
testImplementation libs.slf4jApi
testImplementation project(':streams:test-utils')
@@ -2873,6 +2909,7 @@ project(':streams:test-utils') {
implementation libs.slf4jApi
testImplementation project(':clients').sourceSets.test.output
+ testImplementation libs.jacksonDatabindYaml
testImplementation libs.junitJupiter
testImplementation libs.mockitoCore
testImplementation libs.hamcrest
@@ -2904,7 +2941,7 @@ project(':streams:examples') {
implementation(project(':connect:json'))
implementation project(':streams')
- implementation libs.slf4jReload4j
+ implementation libs.slf4jLog4j2
testImplementation project(':streams:test-utils')
testImplementation project(':clients').sourceSets.test.output // for org.apache.kafka.test.IntegrationTest
@@ -3299,7 +3336,7 @@ project(':jmh-benchmarks') {
implementation libs.jacksonDatabind
implementation libs.metrics
implementation libs.mockitoCore
- implementation libs.slf4jReload4j
+ implementation libs.slf4jLog4j2
implementation libs.scalaLibrary
}
@@ -3343,7 +3380,9 @@ project(':connect:api') {
dependencies {
api project(':clients')
implementation libs.slf4jApi
- runtimeOnly libs.reload4j
+ runtimeOnly libs.log4j2Api
+ runtimeOnly libs.log4j2Core
+ runtimeOnly libs.log4j1Bridge2Api
implementation libs.jakartaRsApi
testImplementation libs.junitJupiter
@@ -3379,7 +3418,9 @@ project(':connect:transforms') {
api project(':connect:api')
implementation libs.slf4jApi
- runtimeOnly libs.reload4j
+ runtimeOnly libs.log4j2Api
+ runtimeOnly libs.log4j2Core
+ runtimeOnly libs.log4j1Bridge2Api
testImplementation libs.junitJupiter
@@ -3419,7 +3460,9 @@ project(':connect:json') {
api libs.jacksonBlackbird
implementation libs.slf4jApi
- runtimeOnly libs.reload4j
+ runtimeOnly libs.log4j2Api
+ runtimeOnly libs.log4j2Core
+ runtimeOnly libs.log4j1Bridge2Api
testImplementation libs.junitJupiter
@@ -3464,8 +3507,10 @@ project(':connect:runtime') {
api project(':connect:transforms')
implementation libs.slf4jApi
- implementation libs.reload4j
- implementation libs.slf4jReload4j
+ implementation libs.slf4jLog4j2
+ implementation libs.log4j2Api
+ implementation libs.log4j2Core
+ implementation libs.log4j1Bridge2Api
implementation libs.jose4j // for SASL/OAUTHBEARER JWT validation
implementation libs.jacksonAnnotations
implementation libs.jacksonJakartarsJsonProvider
@@ -3489,6 +3534,9 @@ project(':connect:runtime') {
implementation libs.mavenArtifact
implementation libs.swaggerAnnotations
+ compileOnly libs.bndlib
+ compileOnly libs.spotbugs
+
// We use this library to generate OpenAPI docs for the REST API, but we don't want or need it at compile
// or run time. So, we add it to a separate configuration, which we use later on during docs generation
swagger libs.swaggerJaxrs2
@@ -3507,11 +3555,14 @@ project(':connect:runtime') {
testImplementation project(':server-common').sourceSets.test.output
testImplementation project(':test-common:test-common-api')
+ testImplementation libs.jacksonDatabindYaml
testImplementation libs.junitJupiter
testImplementation libs.mockitoCore
testImplementation libs.mockitoJunitJupiter
testImplementation libs.httpclient
+ testCompileOnly libs.bndlib
+
testRuntimeOnly libs.bcpkix
testRuntimeOnly runtimeTestLibs
}
@@ -3606,10 +3657,14 @@ project(':connect:file') {
dependencies {
implementation project(':connect:api')
implementation libs.slf4jApi
- runtimeOnly libs.reload4j
+ runtimeOnly libs.log4j2Api
+ runtimeOnly libs.log4j2Core
+ runtimeOnly libs.log4j1Bridge2Api
+ testImplementation libs.jacksonDatabindYaml
testImplementation libs.junitJupiter
testImplementation libs.mockitoCore
+
testImplementation project(':clients').sourceSets.test.output
testImplementation project(':connect:runtime')
testImplementation project(':connect:runtime').sourceSets.test.output
@@ -3646,7 +3701,9 @@ project(':connect:basic-auth-extension') {
dependencies {
implementation project(':connect:api')
implementation libs.slf4jApi
- runtimeOnly libs.reload4j
+ runtimeOnly libs.log4j2Api
+ runtimeOnly libs.log4j2Core
+ runtimeOnly libs.log4j1Bridge2Api
implementation libs.jakartaRsApi
implementation libs.jaxAnnotationApi
@@ -3691,7 +3748,9 @@ project(':connect:mirror') {
implementation libs.argparse4j
implementation libs.jacksonAnnotations
implementation libs.slf4jApi
- runtimeOnly libs.reload4j
+ runtimeOnly libs.log4j2Api
+ runtimeOnly libs.log4j2Core
+ runtimeOnly libs.log4j1Bridge2Api
implementation libs.jacksonAnnotations
implementation libs.jacksonJakartarsJsonProvider
implementation libs.jerseyContainerServlet
@@ -3713,7 +3772,11 @@ project(':connect:mirror') {
implementation libs.swaggerAnnotations
testImplementation libs.junitJupiter
- testImplementation libs.reload4j
+ testImplementation libs.log4j2Api
+ testImplementation libs.log4j2Core
+ testImplementation libs.log4j1Bridge2Api
+ testImplementation libs.bndlib
+ testImplementation libs.jacksonDatabindYaml
testImplementation libs.mockitoCore
testImplementation project(':clients').sourceSets.test.output
testImplementation project(':connect:runtime').sourceSets.test.output
@@ -3781,7 +3844,9 @@ project(':connect:mirror-client') {
dependencies {
implementation project(':clients')
implementation libs.slf4jApi
- runtimeOnly libs.reload4j
+ runtimeOnly libs.log4j2Api
+ runtimeOnly libs.log4j2Core
+ runtimeOnly libs.log4j1Bridge2Api
testImplementation libs.junitJupiter
testImplementation project(':clients').sourceSets.test.output
diff --git a/checkstyle/import-control-core.xml b/checkstyle/import-control-core.xml
index a8dc78160e3..3cfd0ce663c 100644
--- a/checkstyle/import-control-core.xml
+++ b/checkstyle/import-control-core.xml
@@ -114,7 +114,14 @@
-
+
+
+
+
+
+
+
+
@@ -136,7 +143,7 @@
-
+
@@ -146,6 +153,14 @@
+
+
+
+
+
+
+
+
diff --git a/checkstyle/import-control.xml b/checkstyle/import-control.xml
index 76aa4a2b175..0b9e7dd717b 100644
--- a/checkstyle/import-control.xml
+++ b/checkstyle/import-control.xml
@@ -201,7 +201,7 @@
-
+
@@ -225,7 +225,7 @@
-
+
@@ -308,6 +308,10 @@
+
+
+
+
@@ -391,7 +395,7 @@
-
+
@@ -553,6 +557,7 @@
+
@@ -568,7 +573,7 @@
-
+
@@ -580,6 +585,7 @@
+
diff --git a/clients/src/test/java/org/apache/kafka/clients/consumer/KafkaConsumerTest.java b/clients/src/test/java/org/apache/kafka/clients/consumer/KafkaConsumerTest.java
index 36d15c0fe94..33ca2844305 100644
--- a/clients/src/test/java/org/apache/kafka/clients/consumer/KafkaConsumerTest.java
+++ b/clients/src/test/java/org/apache/kafka/clients/consumer/KafkaConsumerTest.java
@@ -104,7 +104,7 @@ import org.apache.kafka.test.MockConsumerInterceptor;
import org.apache.kafka.test.MockMetricsReporter;
import org.apache.kafka.test.TestUtils;
-import org.apache.log4j.Level;
+import org.apache.logging.log4j.Level;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Timeout;
import org.junit.jupiter.params.ParameterizedTest;
diff --git a/clients/src/test/java/org/apache/kafka/clients/consumer/internals/CoordinatorRequestManagerTest.java b/clients/src/test/java/org/apache/kafka/clients/consumer/internals/CoordinatorRequestManagerTest.java
index 4f59db3d863..7e805dc3cd3 100644
--- a/clients/src/test/java/org/apache/kafka/clients/consumer/internals/CoordinatorRequestManagerTest.java
+++ b/clients/src/test/java/org/apache/kafka/clients/consumer/internals/CoordinatorRequestManagerTest.java
@@ -32,7 +32,7 @@ import org.apache.kafka.common.utils.LogCaptureAppender;
import org.apache.kafka.common.utils.LogContext;
import org.apache.kafka.common.utils.MockTime;
-import org.apache.log4j.Level;
+import org.apache.logging.log4j.Level;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
diff --git a/clients/src/test/java/org/apache/kafka/clients/producer/KafkaProducerTest.java b/clients/src/test/java/org/apache/kafka/clients/producer/KafkaProducerTest.java
index d72e576a78d..08d6d994abc 100644
--- a/clients/src/test/java/org/apache/kafka/clients/producer/KafkaProducerTest.java
+++ b/clients/src/test/java/org/apache/kafka/clients/producer/KafkaProducerTest.java
@@ -94,7 +94,7 @@ import org.apache.kafka.test.MockProducerInterceptor;
import org.apache.kafka.test.MockSerializer;
import org.apache.kafka.test.TestUtils;
-import org.apache.log4j.Level;
+import org.apache.logging.log4j.Level;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestInfo;
diff --git a/clients/src/test/java/org/apache/kafka/common/utils/LogCaptureAppender.java b/clients/src/test/java/org/apache/kafka/common/utils/LogCaptureAppender.java
index 1194b9a5de2..2df74c8681c 100644
--- a/clients/src/test/java/org/apache/kafka/common/utils/LogCaptureAppender.java
+++ b/clients/src/test/java/org/apache/kafka/common/utils/LogCaptureAppender.java
@@ -16,31 +16,37 @@
*/
package org.apache.kafka.common.utils;
-import org.apache.log4j.AppenderSkeleton;
-import org.apache.log4j.Level;
-import org.apache.log4j.Logger;
-import org.apache.log4j.spi.LoggingEvent;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.core.LogEvent;
+import org.apache.logging.log4j.core.LoggerContext;
+import org.apache.logging.log4j.core.appender.AbstractAppender;
+import org.apache.logging.log4j.core.config.Configuration;
+import org.apache.logging.log4j.core.config.LoggerConfig;
+import org.apache.logging.log4j.core.config.Property;
+import java.io.PrintWriter;
+import java.io.StringWriter;
+import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
-public class LogCaptureAppender extends AppenderSkeleton implements AutoCloseable {
- private final List events = new LinkedList<>();
+public class LogCaptureAppender extends AbstractAppender implements AutoCloseable {
+ private final List events = new LinkedList<>();
private final List logLevelChanges = new LinkedList<>();
+ private final List loggers = new ArrayList<>();
public static class LogLevelChange {
+ private final Level originalLevel;
+ private final Class> clazz;
public LogLevelChange(final Level originalLevel, final Class> clazz) {
this.originalLevel = originalLevel;
this.clazz = clazz;
}
-
- private final Level originalLevel;
-
- private final Class> clazz;
-
}
@SuppressWarnings("OptionalUsedAsFieldOrParameterType")
@@ -74,31 +80,53 @@ public class LogCaptureAppender extends AppenderSkeleton implements AutoCloseabl
}
}
+ public LogCaptureAppender(String name) {
+ super(name, null, null, true, Property.EMPTY_ARRAY);
+ }
+
public static LogCaptureAppender createAndRegister() {
- final LogCaptureAppender logCaptureAppender = new LogCaptureAppender();
- Logger.getRootLogger().addAppender(logCaptureAppender);
+ final LogCaptureAppender logCaptureAppender = new LogCaptureAppender("LogCaptureAppender");
+ Logger logger = LogManager.getRootLogger();
+ logCaptureAppender.addToLogger(logger);
return logCaptureAppender;
}
public static LogCaptureAppender createAndRegister(final Class> clazz) {
- final LogCaptureAppender logCaptureAppender = new LogCaptureAppender();
- Logger.getLogger(clazz).addAppender(logCaptureAppender);
+ final LogCaptureAppender logCaptureAppender = new LogCaptureAppender("LogCaptureAppender");
+ Logger logger = LogManager.getLogger(clazz);
+ logCaptureAppender.addToLogger(logger);
return logCaptureAppender;
}
- public void setClassLogger(final Class> clazz, Level level) {
- logLevelChanges.add(new LogLevelChange(Logger.getLogger(clazz).getLevel(), clazz));
- Logger.getLogger(clazz).setLevel(level);
+ public void addToLogger(Logger logger) {
+ org.apache.logging.log4j.core.Logger coreLogger = (org.apache.logging.log4j.core.Logger) logger;
+ this.start();
+ coreLogger.addAppender(this);
+ loggers.add(coreLogger);
}
- public static void unregister(final LogCaptureAppender logCaptureAppender) {
- Logger.getRootLogger().removeAppender(logCaptureAppender);
+ public void setClassLogger(final Class> clazz, Level level) {
+ LoggerContext ctx = (LoggerContext) LogManager.getContext(false);
+ Configuration config = ctx.getConfiguration();
+ String loggerName = clazz.getName();
+ LoggerConfig loggerConfig = config.getLoggerConfig(loggerName);
+
+ Level originalLevel = loggerConfig.getLevel();
+ logLevelChanges.add(new LogLevelChange(originalLevel, clazz));
+
+ if (!loggerConfig.getName().equals(loggerName)) {
+ LoggerConfig newLoggerConfig = new LoggerConfig(loggerName, level, true);
+ config.addLogger(loggerName, newLoggerConfig);
+ } else {
+ loggerConfig.setLevel(level);
+ }
+ ctx.updateLoggers();
}
@Override
- protected void append(final LoggingEvent event) {
+ public void append(final LogEvent event) {
synchronized (events) {
- events.add(event);
+ events.add(event.toImmutable());
}
}
@@ -112,8 +140,8 @@ public class LogCaptureAppender extends AppenderSkeleton implements AutoCloseabl
public List getMessages() {
final LinkedList result = new LinkedList<>();
synchronized (events) {
- for (final LoggingEvent event : events) {
- result.add(event.getRenderedMessage());
+ for (final LogEvent event : events) {
+ result.add(event.getMessage().getFormattedMessage());
}
}
return result;
@@ -122,25 +150,26 @@ public class LogCaptureAppender extends AppenderSkeleton implements AutoCloseabl
public List getEvents() {
final LinkedList result = new LinkedList<>();
synchronized (events) {
- for (final LoggingEvent event : events) {
- final String[] throwableStrRep = event.getThrowableStrRep();
+ for (final LogEvent event : events) {
+ final Throwable throwable = event.getThrown();
final Optional throwableString;
final Optional throwableClassName;
- if (throwableStrRep == null) {
+ if (throwable == null) {
throwableString = Optional.empty();
throwableClassName = Optional.empty();
} else {
- final StringBuilder throwableStringBuilder = new StringBuilder();
-
- for (final String s : throwableStrRep) {
- throwableStringBuilder.append(s);
- }
-
- throwableString = Optional.of(throwableStringBuilder.toString());
- throwableClassName = Optional.of(event.getThrowableInformation().getThrowable().getClass().getName());
+ StringWriter stringWriter = new StringWriter();
+ PrintWriter printWriter = new PrintWriter(stringWriter);
+ throwable.printStackTrace(printWriter);
+ throwableString = Optional.of(stringWriter.toString());
+ throwableClassName = Optional.of(throwable.getClass().getName());
}
- result.add(new Event(event.getLevel().toString(), event.getRenderedMessage(), throwableString, throwableClassName));
+ result.add(new Event(
+ event.getLevel().toString(),
+ event.getMessage().getFormattedMessage(),
+ throwableString,
+ throwableClassName));
}
}
return result;
@@ -148,15 +177,30 @@ public class LogCaptureAppender extends AppenderSkeleton implements AutoCloseabl
@Override
public void close() {
+ LoggerContext ctx = (LoggerContext) LogManager.getContext(false);
+ Configuration config = ctx.getConfiguration();
+
for (final LogLevelChange logLevelChange : logLevelChanges) {
- Logger.getLogger(logLevelChange.clazz).setLevel(logLevelChange.originalLevel);
+ String loggerName = logLevelChange.clazz.getName();
+ LoggerConfig loggerConfig = config.getLoggerConfig(loggerName);
+ if (!loggerConfig.getName().equals(loggerName)) {
+ LoggerConfig newLoggerConfig = new LoggerConfig(loggerName, logLevelChange.originalLevel, true);
+ config.addLogger(loggerName, newLoggerConfig);
+ } else {
+ loggerConfig.setLevel(logLevelChange.originalLevel);
+ }
}
logLevelChanges.clear();
- unregister(this);
+ ctx.updateLoggers();
+
+ unregister();
}
- @Override
- public boolean requiresLayout() {
- return false;
+ public void unregister() {
+ for (org.apache.logging.log4j.core.Logger logger : loggers) {
+ logger.removeAppender(this);
+ }
+ loggers.clear();
+ this.stop();
}
}
diff --git a/clients/src/test/resources/log4j2.yaml b/clients/src/test/resources/log4j2.yaml
new file mode 100644
index 00000000000..bfe8b3835a0
--- /dev/null
+++ b/clients/src/test/resources/log4j2.yaml
@@ -0,0 +1,38 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+Configuration:
+ Properties:
+ Property:
+ - name: "logPattern"
+ value: "[%d] %p %m (%c:%L)%n"
+
+ Appenders:
+ Console:
+ name: STDOUT
+ PatternLayout:
+ pattern: "${logPattern}"
+
+ Loggers:
+ Root:
+ level: OFF
+ AppenderRef:
+ - ref: STDOUT
+ Logger:
+ - name: org.apache.kafka
+ level: ERROR
+ # We are testing for a particular INFO log message in CommonNameLoggingTrustManagerFactoryWrapper
+ - name: org.apache.kafka.common.security.ssl.CommonNameLoggingTrustManagerFactoryWrapper
+ level: INFO
diff --git a/config/connect-log4j.properties b/config/connect-log4j.properties
deleted file mode 100644
index 979cb3869f9..00000000000
--- a/config/connect-log4j.properties
+++ /dev/null
@@ -1,39 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-log4j.rootLogger=INFO, stdout, connectAppender
-
-# Send the logs to the console.
-#
-log4j.appender.stdout=org.apache.log4j.ConsoleAppender
-log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
-
-# Send the logs to a file, rolling the file at midnight local time. For example, the `File` option specifies the
-# location of the log files (e.g. ${kafka.logs.dir}/connect.log), and at midnight local time the file is closed
-# and copied in the same directory but with a filename that ends in the `DatePattern` option.
-#
-log4j.appender.connectAppender=org.apache.log4j.DailyRollingFileAppender
-log4j.appender.connectAppender.DatePattern='.'yyyy-MM-dd-HH
-log4j.appender.connectAppender.File=${kafka.logs.dir}/connect.log
-log4j.appender.connectAppender.layout=org.apache.log4j.PatternLayout
-
-# The `%X{connector.context}` parameter in the layout includes connector-specific and task-specific information
-# in the log messages, where appropriate. This makes it easier to identify those log messages that apply to a
-# specific connector.
-#
-connect.log.pattern=[%d] %p %X{connector.context}%m (%c:%L)%n
-
-log4j.appender.stdout.layout.ConversionPattern=${connect.log.pattern}
-log4j.appender.connectAppender.layout.ConversionPattern=${connect.log.pattern}
diff --git a/config/connect-log4j2.yaml b/config/connect-log4j2.yaml
new file mode 100644
index 00000000000..89a9a967365
--- /dev/null
+++ b/config/connect-log4j2.yaml
@@ -0,0 +1,44 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+Configuration:
+ Properties:
+ Property:
+ - name: "kafka.logs.dir"
+ value: "."
+ - name: "logPattern"
+ value: "[%d] %p %X{connector.context}%m (%c:%L)%n"
+
+ Appenders:
+ Console:
+ name: STDOUT
+ PatternLayout:
+ pattern: "${logPattern}"
+
+ RollingFile:
+ - name: ConnectAppender
+ fileName: "${sys:kafka.logs.dir}/connect.log"
+ filePattern: "${sys:kafka.logs.dir}/connect-%d{yyyy-MM-dd-HH}.log"
+ PatternLayout:
+ pattern: "${logPattern}"
+ TimeBasedTriggeringPolicy:
+ modulate: true
+ interval: 1
+ Loggers:
+ Root:
+ level: INFO
+ AppenderRef:
+ - ref: STDOUT
+ - ref: ConnectAppender
diff --git a/config/log4j.properties b/config/log4j.properties
deleted file mode 100644
index bcf2b9daa4e..00000000000
--- a/config/log4j.properties
+++ /dev/null
@@ -1,93 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Unspecified loggers and loggers with additivity=true output to server.log and stdout
-# Note that INFO only applies to unspecified loggers, the log level of the child logger is used otherwise
-log4j.rootLogger=INFO, stdout, kafkaAppender
-
-log4j.appender.stdout=org.apache.log4j.ConsoleAppender
-log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
-log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c)%n
-
-log4j.appender.kafkaAppender=org.apache.log4j.DailyRollingFileAppender
-log4j.appender.kafkaAppender.DatePattern='.'yyyy-MM-dd-HH
-log4j.appender.kafkaAppender.File=${kafka.logs.dir}/server.log
-log4j.appender.kafkaAppender.layout=org.apache.log4j.PatternLayout
-log4j.appender.kafkaAppender.layout.ConversionPattern=[%d] %p %m (%c)%n
-
-log4j.appender.stateChangeAppender=org.apache.log4j.DailyRollingFileAppender
-log4j.appender.stateChangeAppender.DatePattern='.'yyyy-MM-dd-HH
-log4j.appender.stateChangeAppender.File=${kafka.logs.dir}/state-change.log
-log4j.appender.stateChangeAppender.layout=org.apache.log4j.PatternLayout
-log4j.appender.stateChangeAppender.layout.ConversionPattern=[%d] %p %m (%c)%n
-
-log4j.appender.requestAppender=org.apache.log4j.DailyRollingFileAppender
-log4j.appender.requestAppender.DatePattern='.'yyyy-MM-dd-HH
-log4j.appender.requestAppender.File=${kafka.logs.dir}/kafka-request.log
-log4j.appender.requestAppender.layout=org.apache.log4j.PatternLayout
-log4j.appender.requestAppender.layout.ConversionPattern=[%d] %p %m (%c)%n
-
-log4j.appender.cleanerAppender=org.apache.log4j.DailyRollingFileAppender
-log4j.appender.cleanerAppender.DatePattern='.'yyyy-MM-dd-HH
-log4j.appender.cleanerAppender.File=${kafka.logs.dir}/log-cleaner.log
-log4j.appender.cleanerAppender.layout=org.apache.log4j.PatternLayout
-log4j.appender.cleanerAppender.layout.ConversionPattern=[%d] %p %m (%c)%n
-
-log4j.appender.controllerAppender=org.apache.log4j.DailyRollingFileAppender
-log4j.appender.controllerAppender.DatePattern='.'yyyy-MM-dd-HH
-log4j.appender.controllerAppender.File=${kafka.logs.dir}/controller.log
-log4j.appender.controllerAppender.layout=org.apache.log4j.PatternLayout
-log4j.appender.controllerAppender.layout.ConversionPattern=[%d] %p %m (%c)%n
-
-log4j.appender.authorizerAppender=org.apache.log4j.DailyRollingFileAppender
-log4j.appender.authorizerAppender.DatePattern='.'yyyy-MM-dd-HH
-log4j.appender.authorizerAppender.File=${kafka.logs.dir}/kafka-authorizer.log
-log4j.appender.authorizerAppender.layout=org.apache.log4j.PatternLayout
-log4j.appender.authorizerAppender.layout.ConversionPattern=[%d] %p %m (%c)%n
-
-# Change the two lines below to adjust the general broker logging level (output to server.log and stdout)
-log4j.logger.kafka=INFO
-log4j.logger.org.apache.kafka=INFO
-
-# Change to DEBUG or TRACE to enable request logging
-log4j.logger.kafka.request.logger=WARN, requestAppender
-log4j.additivity.kafka.request.logger=false
-
-# Uncomment the lines below and change log4j.logger.kafka.network.RequestChannel$ to TRACE for additional output
-# related to the handling of requests
-#log4j.logger.kafka.network.Processor=TRACE, requestAppender
-#log4j.logger.kafka.server.KafkaApis=TRACE, requestAppender
-#log4j.additivity.kafka.server.KafkaApis=false
-log4j.logger.kafka.network.RequestChannel$=WARN, requestAppender
-log4j.additivity.kafka.network.RequestChannel$=false
-
-# Change the line below to adjust KRaft mode controller logging
-log4j.logger.org.apache.kafka.controller=INFO, controllerAppender
-log4j.additivity.org.apache.kafka.controller=false
-
-# Change the line below to adjust ZK mode controller logging
-log4j.logger.kafka.controller=TRACE, controllerAppender
-log4j.additivity.kafka.controller=false
-
-log4j.logger.kafka.log.LogCleaner=INFO, cleanerAppender
-log4j.additivity.kafka.log.LogCleaner=false
-
-log4j.logger.state.change.logger=INFO, stateChangeAppender
-log4j.additivity.state.change.logger=false
-
-# Access denials are logged at INFO level, change to DEBUG to also log allowed accesses
-log4j.logger.kafka.authorizer.logger=INFO, authorizerAppender
-log4j.additivity.kafka.authorizer.logger=false
-
diff --git a/config/log4j2.yaml b/config/log4j2.yaml
new file mode 100644
index 00000000000..2b000d407e2
--- /dev/null
+++ b/config/log4j2.yaml
@@ -0,0 +1,158 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Unspecified loggers and loggers with additivity=true output to server.log and stdout
+# Note that INFO only applies to unspecified loggers, the log level of the child logger is used otherwise
+Configuration:
+ Properties:
+ Property:
+ # Fallback if the system property is not set
+ - name: "kafka.logs.dir"
+ value: "."
+ - name: "logPattern"
+ value: "[%d] %p %m (%c)%n"
+
+ # Appenders configuration
+ # See: https://logging.apache.org/log4j/2.x/manual/appenders.html
+ Appenders:
+ Console:
+ name: STDOUT
+ PatternLayout:
+ pattern: "${logPattern}"
+
+ RollingFile:
+ - name: KafkaAppender
+ fileName: "${sys:kafka.logs.dir}/server.log"
+ filePattern: "${sys:kafka.logs.dir}/server.log.%d{yyyy-MM-dd-HH}"
+ PatternLayout:
+ pattern: "${logPattern}"
+ TimeBasedTriggeringPolicy:
+ modulate: true
+ interval: 1
+ # State Change appender
+ - name: StateChangeAppender
+ fileName: "${sys:kafka.logs.dir}/state-change.log"
+ filePattern: "${sys:kafka.logs.dir}/stage-change.log.%d{yyyy-MM-dd-HH}"
+ PatternLayout:
+ pattern: "${logPattern}"
+ TimeBasedTriggeringPolicy:
+ modulate: true
+ interval: 1
+ # Request appender
+ - name: RequestAppender
+ fileName: "${sys:kafka.logs.dir}/kafka-request.log"
+ filePattern: "${sys:kafka.logs.dir}/kafka-request.log.%d{yyyy-MM-dd-HH}"
+ PatternLayout:
+ pattern: "${logPattern}"
+ TimeBasedTriggeringPolicy:
+ modulate: true
+ interval: 1
+ # Cleaner appender
+ - name: CleanerAppender
+ fileName: "${sys:kafka.logs.dir}/log-cleaner.log"
+ filePattern: "${sys:kafka.logs.dir}/log-cleaner.log.%d{yyyy-MM-dd-HH}"
+ PatternLayout:
+ pattern: "${logPattern}"
+ TimeBasedTriggeringPolicy:
+ modulate: true
+ interval: 1
+ # Controller appender
+ - name: ControllerAppender
+ fileName: "${sys:kafka.logs.dir}/controller.log"
+ filePattern: "${sys:kafka.logs.dir}/controller.log.%d{yyyy-MM-dd-HH}"
+ PatternLayout:
+ pattern: "${logPattern}"
+ TimeBasedTriggeringPolicy:
+ modulate: true
+ interval: 1
+ # Authorizer appender
+ - name: AuthorizerAppender
+ fileName: "${sys:kafka.logs.dir}/kafka-authorizer.log"
+ filePattern: "${sys:kafka.logs.dir}/kafka-authorizer.log.%d{yyyy-MM-dd-HH}"
+ PatternLayout:
+ pattern: "${logPattern}"
+ TimeBasedTriggeringPolicy:
+ modulate: true
+ interval: 1
+
+ # Loggers configuration
+ # See: https://logging.apache.org/log4j/2.x/manual/configuration.html#configuring-loggers
+ Loggers:
+ Root:
+ level: INFO
+ AppenderRef:
+ - ref: STDOUT
+ - ref: KafkaAppender
+ Logger:
+ # Kafka logger
+ - name: kafka
+ level: INFO
+ # Kafka org.apache logger
+ - name: org.apache.kafka
+ level: INFO
+ # Kafka request logger
+ - name: kafka.request.logger
+ level: WARN
+ additivity: false
+ AppenderRef:
+ ref: RequestAppender
+ # Uncomment the lines below and change log4j.logger.kafka.network.RequestChannel$ to TRACE
+ # for additional output related to the handling of requests
+# - name: kafka.network.Processor
+# level: TRACE
+# additivity: false
+# AppenderRef:
+# ref: RequestAppender
+# - name: kafka.server.KafkaApis
+# level: TRACE
+# additivity: false
+# AppenderRef:
+# ref: RequestAppender
+ # Kafka network RequestChannel$ logger
+ - name: kafka.network.RequestChannel$
+ level: WARN
+ additivity: false
+ AppenderRef:
+ ref: RequestAppender
+ # KRaft mode controller logger
+ - name: org.apache.kafka.controller
+ level: INFO
+ additivity: false
+ AppenderRef:
+ ref: ControllerAppender
+ # ZK mode controller logger
+ - name: kafka.controller
+ level: TRACE
+ additivity: false
+ AppenderRef:
+ ref: ControllerAppender
+ # LogCleaner logger
+ - name: kafka.log.LogCleaner
+ level: INFO
+ additivity: false
+ AppenderRef:
+ ref: CleanerAppender
+ # State change logger
+ - name: state.change.logger
+ level: INFO
+ additivity: false
+ AppenderRef:
+ ref: StateChangeAppender
+ # Authorizer logger
+ - name: kafka.authorizer.logger
+ level: INFO
+ additivity: false
+ AppenderRef:
+ ref: AuthorizerAppender
\ No newline at end of file
diff --git a/connect/file/src/test/resources/log4j.properties b/connect/file/src/test/resources/log4j.properties
deleted file mode 100644
index 548e8c33cfb..00000000000
--- a/connect/file/src/test/resources/log4j.properties
+++ /dev/null
@@ -1,28 +0,0 @@
-##
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-##
-log4j.rootLogger=INFO, stdout
-
-log4j.appender.stdout=org.apache.log4j.ConsoleAppender
-log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
-#
-# The `%X{connector.context}` parameter in the layout includes connector-specific and task-specific information
-# in the log message, where appropriate. This makes it easier to identify those log messages that apply to a
-# specific connector. Simply add this parameter to the log layout configuration below to include the contextual information.
-#
-log4j.appender.stdout.layout.ConversionPattern=[%d] %p %X{connector.context}%m (%c:%L)%n
-
-log4j.logger.kafka=WARN
diff --git a/connect/file/src/test/resources/log4j2.yaml b/connect/file/src/test/resources/log4j2.yaml
new file mode 100644
index 00000000000..1e9f550fa6d
--- /dev/null
+++ b/connect/file/src/test/resources/log4j2.yaml
@@ -0,0 +1,35 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+Configuration:
+ Properties:
+ Property:
+ - name: "logPattern"
+ value: "[%d] %p %X{connector.context}%m (%c:%L)%n"
+
+ Appenders:
+ Console:
+ name: STDOUT
+ PatternLayout:
+ pattern: "${logPattern}"
+
+ Loggers:
+ Root:
+ level: INFO
+ AppenderRef:
+ - ref: STDOUT
+ Logger:
+ - name: kafka
+ level: WARN
diff --git a/connect/mirror/src/test/java/org/apache/kafka/connect/mirror/MirrorSourceConnectorTest.java b/connect/mirror/src/test/java/org/apache/kafka/connect/mirror/MirrorSourceConnectorTest.java
index 588baf8c090..21bcc7cbad5 100644
--- a/connect/mirror/src/test/java/org/apache/kafka/connect/mirror/MirrorSourceConnectorTest.java
+++ b/connect/mirror/src/test/java/org/apache/kafka/connect/mirror/MirrorSourceConnectorTest.java
@@ -40,7 +40,7 @@ import org.apache.kafka.connect.connector.ConnectorContext;
import org.apache.kafka.connect.errors.ConnectException;
import org.apache.kafka.connect.source.ExactlyOnceSupport;
-import org.apache.log4j.Level;
+import org.apache.logging.log4j.Level;
import org.junit.jupiter.api.Test;
import java.util.ArrayList;
diff --git a/connect/mirror/src/test/resources/log4j.properties b/connect/mirror/src/test/resources/log4j.properties
deleted file mode 100644
index c4ca6a2388f..00000000000
--- a/connect/mirror/src/test/resources/log4j.properties
+++ /dev/null
@@ -1,33 +0,0 @@
-##
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-##
-log4j.rootLogger=INFO, stdout
-
-log4j.appender.stdout=org.apache.log4j.ConsoleAppender
-log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
-#
-# The `%X{connector.context}` parameter in the layout includes connector-specific and task-specific information
-# in the log message, where appropriate. This makes it easier to identify those log messages that apply to a
-# specific connector. Simply add this parameter to the log layout configuration below to include the contextual information.
-#
-log4j.appender.stdout.layout.ConversionPattern=[%d] %p %X{connector.context}%m (%c:%L)%n
-#
-# The following line includes no MDC context parameters:
-#log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c:%L)%n (%t)
-
-log4j.logger.kafka=WARN
-log4j.logger.state.change.logger=OFF
-log4j.logger.org.apache.kafka.connect=DEBUG
diff --git a/connect/mirror/src/test/resources/log4j2.yaml b/connect/mirror/src/test/resources/log4j2.yaml
new file mode 100644
index 00000000000..b63606d0ba5
--- /dev/null
+++ b/connect/mirror/src/test/resources/log4j2.yaml
@@ -0,0 +1,41 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+Configuration:
+ Properties:
+ Property:
+ - name: "logPattern"
+ value: "[%d] %p %X{connector.context}%m (%c:%L)%n"
+
+ Appenders:
+ Console:
+ name: STDOUT
+ PatternLayout:
+ pattern: "${logPattern}"
+
+ Loggers:
+ Root:
+ level: INFO
+ AppenderRef:
+ - ref: STDOUT
+ Logger:
+ - name: kafka
+ level: WARN
+
+ - name: state.change.logger
+ level: "OFF"
+
+ - name: org.apache.kafka.connect
+ level: DEBUG
diff --git a/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/AbstractHerder.java b/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/AbstractHerder.java
index 43772802070..c4f53ad4137 100644
--- a/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/AbstractHerder.java
+++ b/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/AbstractHerder.java
@@ -65,7 +65,7 @@ import org.apache.kafka.connect.util.ConnectorTaskId;
import org.apache.kafka.connect.util.Stage;
import org.apache.kafka.connect.util.TemporaryStage;
-import org.apache.log4j.Level;
+import org.apache.logging.log4j.Level;
import org.apache.maven.artifact.versioning.InvalidVersionSpecificationException;
import org.apache.maven.artifact.versioning.VersionRange;
import org.slf4j.Logger;
diff --git a/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/Loggers.java b/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/Loggers.java
index 9e59b13d34a..0c16d0d6f01 100644
--- a/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/Loggers.java
+++ b/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/Loggers.java
@@ -19,19 +19,23 @@ package org.apache.kafka.connect.runtime;
import org.apache.kafka.common.utils.Time;
import org.apache.kafka.connect.runtime.rest.entities.LoggerLevel;
-import org.apache.log4j.Level;
-import org.apache.log4j.LogManager;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.core.LoggerContext;
+import org.apache.logging.log4j.core.config.Configurator;
+import org.apache.logging.log4j.core.config.LoggerConfig;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
+import java.util.Collection;
import java.util.Collections;
-import java.util.Enumeration;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.TreeMap;
+import java.util.stream.Collectors;
/**
* Manages logging levels on a single worker. Supports dynamic adjustment and querying
@@ -46,6 +50,10 @@ public class Loggers {
/**
* Log4j uses "root" (case-insensitive) as name of the root logger.
+ * Note: In log4j, the root logger's name was "root" and Kafka also followed that name for dynamic logging control feature.
+ *
+ * The root logger's name is changed in log4j2 to empty string (see: [[LogManager.ROOT_LOGGER_NAME]]) but for backward-
+ * compatibility. Kafka keeps its original root logger name. It is why here is a dedicated definition for the root logger name.
*/
private static final String ROOT_LOGGER_NAME = "root";
@@ -66,18 +74,17 @@ public class Loggers {
public synchronized LoggerLevel level(String logger) {
Objects.requireNonNull(logger, "Logger may not be null");
- org.apache.log4j.Logger foundLogger = null;
+ org.apache.logging.log4j.Logger foundLogger = null;
if (ROOT_LOGGER_NAME.equalsIgnoreCase(logger)) {
foundLogger = rootLogger();
} else {
- Enumeration en = currentLoggers();
+ List currentLoggers = currentLoggers();
// search within existing loggers for the given name.
// using LogManger.getLogger() will create a logger if it doesn't exist
// (potential leak since these don't get cleaned up).
- while (en.hasMoreElements()) {
- org.apache.log4j.Logger l = en.nextElement();
- if (logger.equals(l.getName())) {
- foundLogger = l;
+ for (org.apache.logging.log4j.Logger currentLogger : currentLoggers) {
+ if (logger.equals(currentLogger.getName())) {
+ foundLogger = currentLogger;
break;
}
}
@@ -98,14 +105,12 @@ public class Loggers {
public synchronized Map allLevels() {
Map result = new TreeMap<>();
- Enumeration enumeration = currentLoggers();
- Collections.list(enumeration)
- .stream()
- .filter(logger -> logger.getLevel() != null)
+ currentLoggers().stream()
+ .filter(logger -> !logger.getLevel().equals(Level.OFF))
.forEach(logger -> result.put(logger.getName(), loggerLevel(logger)));
- org.apache.log4j.Logger root = rootLogger();
- if (root.getLevel() != null) {
+ org.apache.logging.log4j.Logger root = rootLogger();
+ if (!root.getLevel().equals(Level.OFF)) {
result.put(ROOT_LOGGER_NAME, loggerLevel(root));
}
@@ -124,10 +129,10 @@ public class Loggers {
Objects.requireNonNull(level, "Level may not be null");
log.info("Setting level of namespace {} and children to {}", namespace, level);
- List childLoggers = loggers(namespace);
+ List childLoggers = loggers(namespace);
List result = new ArrayList<>();
- for (org.apache.log4j.Logger logger: childLoggers) {
+ for (org.apache.logging.log4j.Logger logger: childLoggers) {
setLevel(logger, level);
result.add(logger.getName());
}
@@ -143,25 +148,24 @@ public class Loggers {
* @return all loggers that fall under the given namespace; never null, and will always contain
* at least one logger (the ancestor logger for the namespace)
*/
- private synchronized List loggers(String namespace) {
+ private synchronized List loggers(String namespace) {
Objects.requireNonNull(namespace, "Logging namespace may not be null");
if (ROOT_LOGGER_NAME.equalsIgnoreCase(namespace)) {
- List result = Collections.list(currentLoggers());
+ List result = currentLoggers();
result.add(rootLogger());
return result;
}
- List result = new ArrayList<>();
- org.apache.log4j.Logger ancestorLogger = lookupLogger(namespace);
- Enumeration en = currentLoggers();
+ List result = new ArrayList<>();
+ org.apache.logging.log4j.Logger ancestorLogger = lookupLogger(namespace);
+ List currentLoggers = currentLoggers();
boolean present = false;
- while (en.hasMoreElements()) {
- org.apache.log4j.Logger current = en.nextElement();
- if (current.getName().startsWith(namespace)) {
- result.add(current);
+ for (org.apache.logging.log4j.Logger currentLogger : currentLoggers) {
+ if (currentLogger.getName().startsWith(namespace)) {
+ result.add(currentLogger);
}
- if (namespace.equals(current.getName())) {
+ if (namespace.equals(currentLogger.getName())) {
present = true;
}
}
@@ -174,43 +178,46 @@ public class Loggers {
}
// visible for testing
- org.apache.log4j.Logger lookupLogger(String logger) {
+ org.apache.logging.log4j.Logger lookupLogger(String logger) {
return LogManager.getLogger(logger);
}
- @SuppressWarnings("unchecked")
- // visible for testing
- Enumeration currentLoggers() {
- return LogManager.getCurrentLoggers();
+ List currentLoggers() {
+ LoggerContext context = (LoggerContext) LogManager.getContext(false);
+ Collection loggerConfigs = context.getConfiguration().getLoggers().values();
+ return loggerConfigs.stream()
+ .map(LoggerConfig::getName)
+ .distinct()
+ .map(LogManager::getLogger)
+ .collect(Collectors.toCollection(ArrayList::new));
}
// visible for testing
- org.apache.log4j.Logger rootLogger() {
+ org.apache.logging.log4j.Logger rootLogger() {
return LogManager.getRootLogger();
}
- private void setLevel(org.apache.log4j.Logger logger, Level level) {
- Level currentLevel = logger.getLevel();
- if (currentLevel == null)
- currentLevel = logger.getEffectiveLevel();
+ private void setLevel(org.apache.logging.log4j.Logger logger, Level level) {
+ String loggerName = logger.getName();
+ LoggerContext context = (LoggerContext) LogManager.getContext(false);
+ LoggerConfig loggerConfig = context.getConfiguration().getLoggerConfig(loggerName);
+ Level currentLevel = loggerConfig.getLevel();
if (level.equals(currentLevel)) {
- log.debug("Skipping update for logger {} since its level is already {}", logger.getName(), level);
+ log.debug("Skipping update for logger {} since its level is already {}", loggerName, level);
return;
}
- log.debug("Setting level of logger {} (excluding children) to {}", logger.getName(), level);
- logger.setLevel(level);
- lastModifiedTimes.put(logger.getName(), time.milliseconds());
+ log.debug("Setting level of logger {} (excluding children) to {}", loggerName, level);
+ Configurator.setLevel(loggerName, level);
+ lastModifiedTimes.put(loggerName, time.milliseconds());
}
- private LoggerLevel loggerLevel(org.apache.log4j.Logger logger) {
- Level level = logger.getLevel();
- if (level == null)
- level = logger.getEffectiveLevel();
-
+ private LoggerLevel loggerLevel(org.apache.logging.log4j.Logger logger) {
+ LoggerContext context = (LoggerContext) LogManager.getContext(false);
+ LoggerConfig loggerConfig = context.getConfiguration().getLoggerConfig(logger.getName());
+ Level level = loggerConfig.getLevel();
Long lastModified = lastModifiedTimes.get(logger.getName());
return new LoggerLevel(Objects.toString(level), lastModified);
}
-
}
diff --git a/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/rest/resources/LoggingResource.java b/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/rest/resources/LoggingResource.java
index 85be83061eb..dbbfb46375d 100644
--- a/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/rest/resources/LoggingResource.java
+++ b/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/rest/resources/LoggingResource.java
@@ -21,7 +21,7 @@ import org.apache.kafka.connect.runtime.Herder;
import org.apache.kafka.connect.runtime.rest.entities.LoggerLevel;
import org.apache.kafka.connect.runtime.rest.errors.BadRequestException;
-import org.apache.log4j.Level;
+import org.apache.logging.log4j.Level;
import org.slf4j.LoggerFactory;
import java.util.List;
diff --git a/connect/runtime/src/test/java/org/apache/kafka/connect/runtime/LoggersTest.java b/connect/runtime/src/test/java/org/apache/kafka/connect/runtime/LoggersTest.java
index 3dbe688a076..23e5f753cde 100644
--- a/connect/runtime/src/test/java/org/apache/kafka/connect/runtime/LoggersTest.java
+++ b/connect/runtime/src/test/java/org/apache/kafka/connect/runtime/LoggersTest.java
@@ -20,9 +20,13 @@ import org.apache.kafka.common.utils.MockTime;
import org.apache.kafka.common.utils.Time;
import org.apache.kafka.connect.runtime.rest.entities.LoggerLevel;
-import org.apache.log4j.Hierarchy;
-import org.apache.log4j.Level;
-import org.apache.log4j.Logger;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.core.LoggerContext;
+import org.apache.logging.log4j.core.config.Configuration;
+import org.apache.logging.log4j.core.config.Configurator;
+import org.apache.logging.log4j.core.config.LoggerConfig;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
@@ -30,13 +34,12 @@ import org.mockito.junit.jupiter.MockitoExtension;
import org.mockito.junit.jupiter.MockitoSettings;
import org.mockito.quality.Strictness;
+import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
-import java.util.Enumeration;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
-import java.util.Vector;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream;
@@ -59,12 +62,15 @@ public class LoggersTest {
@Test
public void testGetLoggersIgnoresNullLevels() {
- Logger root = logger("root");
+ LoggerContext loggerContext = (LoggerContext) LogManager.getContext(false);
+ Logger root = loggerContext.getRootLogger();
+ Configurator.setLevel(root, Level.OFF);
- Logger a = logger("a");
- a.setLevel(null);
- Logger b = logger("b");
- b.setLevel(Level.INFO);
+ Logger a = loggerContext.getLogger("a");
+ Configurator.setLevel(a, null);
+
+ Logger b = loggerContext.getLogger("b");
+ Configurator.setLevel(b, Level.INFO);
Loggers loggers = new TestLoggers(root, a, b);
@@ -78,14 +84,15 @@ public class LoggersTest {
@Test
public void testGetLoggerFallsBackToEffectiveLogLevel() {
- Logger root = logger("root");
- root.setLevel(Level.ERROR);
+ LoggerContext loggerContext = (LoggerContext) LogManager.getContext(false);
+ Logger root = loggerContext.getRootLogger();
+ Configurator.setLevel(root, Level.ERROR);
- Hierarchy hierarchy = new Hierarchy(root);
- Logger a = hierarchy.getLogger("a");
- a.setLevel(null);
- Logger b = hierarchy.getLogger("b");
- b.setLevel(Level.INFO);
+ Logger a = loggerContext.getLogger("a");
+ Configurator.setLevel(a, null);
+
+ Logger b = loggerContext.getLogger("b");
+ Configurator.setLevel(b, Level.INFO);
Loggers loggers = new TestLoggers(root, a, b);
@@ -96,14 +103,15 @@ public class LoggersTest {
@Test
public void testGetUnknownLogger() {
- Logger root = logger("root");
- root.setLevel(Level.ERROR);
+ LoggerContext loggerContext = (LoggerContext) LogManager.getContext(false);
+ Logger root = loggerContext.getRootLogger();
+ Configurator.setLevel(root, Level.ERROR);
- Hierarchy hierarchy = new Hierarchy(root);
- Logger a = hierarchy.getLogger("a");
- a.setLevel(null);
- Logger b = hierarchy.getLogger("b");
- b.setLevel(Level.INFO);
+ Logger a = loggerContext.getLogger("a");
+ Configurator.setLevel(a, null);
+
+ Logger b = loggerContext.getLogger("b");
+ Configurator.setLevel(b, Level.INFO);
Loggers loggers = new TestLoggers(root, a, b);
@@ -113,17 +121,18 @@ public class LoggersTest {
@Test
public void testSetLevel() {
- Logger root = logger("root");
- root.setLevel(Level.ERROR);
+ LoggerContext loggerContext = (LoggerContext) LogManager.getContext(false);
+ Logger root = loggerContext.getRootLogger();
+ Configurator.setLevel(root, Level.ERROR);
- Logger x = logger("a.b.c.p.X");
- Logger y = logger("a.b.c.p.Y");
- Logger z = logger("a.b.c.p.Z");
- Logger w = logger("a.b.c.s.W");
- x.setLevel(Level.INFO);
- y.setLevel(Level.INFO);
- z.setLevel(Level.INFO);
- w.setLevel(Level.INFO);
+ Logger x = loggerContext.getLogger("a.b.c.p.X");
+ Logger y = loggerContext.getLogger("a.b.c.p.Y");
+ Logger z = loggerContext.getLogger("a.b.c.p.Z");
+ Logger w = loggerContext.getLogger("a.b.c.s.W");
+ Configurator.setLevel(x, Level.INFO);
+ Configurator.setLevel(y, Level.INFO);
+ Configurator.setLevel(z, Level.INFO);
+ Configurator.setLevel(w, Level.INFO);
// We don't explicitly register a logger for a.b.c.p, so it won't appear in the list of current loggers;
// one should be created by the Loggers instance when we set the level
@@ -166,25 +175,37 @@ public class LoggersTest {
@Test
public void testSetRootLevel() {
- Logger root = logger("root");
- root.setLevel(Level.ERROR);
+ // In this test case, we focus on setting the level for the root logger.
+ // Ideally, we want to start with a "clean" configuration to conduct this test case.
+ // By programmatically creating a new configuration at the beginning, we can ensure
+ // that this test case is not affected by existing Log4j configurations.
+ LoggerContext loggerContext = (LoggerContext) LogManager.getContext(false);
+ Configuration config = loggerContext.getConfiguration();
+ String rootLoggerName = "root";
+ LoggerConfig rootConfig = new LoggerConfig(rootLoggerName, Level.ERROR, false);
+ config.addLogger(rootLoggerName, rootConfig);
+ loggerContext.updateLoggers();
- Logger p = logger("a.b.c.p");
- Logger x = logger("a.b.c.p.X");
- Logger y = logger("a.b.c.p.Y");
- Logger z = logger("a.b.c.p.Z");
- Logger w = logger("a.b.c.s.W");
- x.setLevel(Level.INFO);
- y.setLevel(Level.INFO);
- z.setLevel(Level.INFO);
- w.setLevel(Level.INFO);
+ Logger root = LogManager.getLogger(rootLoggerName);
+ Configurator.setLevel(root, Level.ERROR);
+
+ Logger p = loggerContext.getLogger("a.b.c.p");
+ Logger x = loggerContext.getLogger("a.b.c.p.X");
+ Logger y = loggerContext.getLogger("a.b.c.p.Y");
+ Logger z = loggerContext.getLogger("a.b.c.p.Z");
+ Logger w = loggerContext.getLogger("a.b.c.s.W");
+ Configurator.setLevel(p, Level.INFO);
+ Configurator.setLevel(x, Level.INFO);
+ Configurator.setLevel(y, Level.INFO);
+ Configurator.setLevel(z, Level.INFO);
+ Configurator.setLevel(w, Level.INFO);
Loggers loggers = new TestLoggers(root, x, y, z, w);
- List modified = loggers.setLevel("root", Level.DEBUG);
- assertEquals(Arrays.asList("a.b.c.p.X", "a.b.c.p.Y", "a.b.c.p.Z", "a.b.c.s.W", "root"), modified);
+ List modified = loggers.setLevel(rootLoggerName, Level.DEBUG);
+ assertEquals(Arrays.asList("a.b.c.p.X", "a.b.c.p.Y", "a.b.c.p.Z", "a.b.c.s.W", rootLoggerName), modified);
- assertNull(p.getLevel());
+ assertEquals(p.getLevel(), Level.INFO);
assertEquals(root.getLevel(), Level.DEBUG);
@@ -194,7 +215,7 @@ public class LoggersTest {
assertEquals(z.getLevel(), Level.DEBUG);
Map expectedLevels = new HashMap<>();
- expectedLevels.put("root", new LoggerLevel(Level.DEBUG.toString(), INITIAL_TIME));
+ expectedLevels.put(rootLoggerName, new LoggerLevel(Level.DEBUG.toString(), INITIAL_TIME));
expectedLevels.put("a.b.c.p.X", new LoggerLevel(Level.DEBUG.toString(), INITIAL_TIME));
expectedLevels.put("a.b.c.p.Y", new LoggerLevel(Level.DEBUG.toString(), INITIAL_TIME));
expectedLevels.put("a.b.c.p.Z", new LoggerLevel(Level.DEBUG.toString(), INITIAL_TIME));
@@ -206,7 +227,8 @@ public class LoggersTest {
@Test
public void testSetLevelNullArguments() {
- Logger root = logger("root");
+ LoggerContext loggerContext = (LoggerContext) LogManager.getContext(false);
+ Logger root = loggerContext.getRootLogger();
Loggers loggers = new TestLoggers(root);
assertThrows(NullPointerException.class, () -> loggers.setLevel(null, Level.INFO));
assertThrows(NullPointerException.class, () -> loggers.setLevel("root", null));
@@ -229,12 +251,12 @@ public class LoggersTest {
@Override
Logger lookupLogger(String logger) {
- return currentLoggers.computeIfAbsent(logger, l -> new Logger(logger) { });
+ return currentLoggers.computeIfAbsent(logger, LogManager::getLogger);
}
@Override
- Enumeration currentLoggers() {
- return new Vector<>(currentLoggers.values()).elements();
+ List currentLoggers() {
+ return new ArrayList<>(currentLoggers.values());
}
@Override
@@ -242,9 +264,4 @@ public class LoggersTest {
return rootLogger;
}
}
-
- private Logger logger(String name) {
- return new Logger(name) { };
- }
-
}
diff --git a/connect/runtime/src/test/java/org/apache/kafka/connect/runtime/SourceTaskOffsetCommitterTest.java b/connect/runtime/src/test/java/org/apache/kafka/connect/runtime/SourceTaskOffsetCommitterTest.java
index bd70ed357c6..106659d0f8f 100644
--- a/connect/runtime/src/test/java/org/apache/kafka/connect/runtime/SourceTaskOffsetCommitterTest.java
+++ b/connect/runtime/src/test/java/org/apache/kafka/connect/runtime/SourceTaskOffsetCommitterTest.java
@@ -22,7 +22,7 @@ import org.apache.kafka.connect.errors.ConnectException;
import org.apache.kafka.connect.runtime.standalone.StandaloneConfig;
import org.apache.kafka.connect.util.ConnectorTaskId;
-import org.apache.log4j.Level;
+import org.apache.logging.log4j.Level;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
diff --git a/connect/runtime/src/test/java/org/apache/kafka/connect/runtime/WorkerSourceTaskTest.java b/connect/runtime/src/test/java/org/apache/kafka/connect/runtime/WorkerSourceTaskTest.java
index 77d56a207d7..3ddbf164494 100644
--- a/connect/runtime/src/test/java/org/apache/kafka/connect/runtime/WorkerSourceTaskTest.java
+++ b/connect/runtime/src/test/java/org/apache/kafka/connect/runtime/WorkerSourceTaskTest.java
@@ -54,7 +54,7 @@ import org.apache.kafka.connect.util.ConnectorTaskId;
import org.apache.kafka.connect.util.TopicAdmin;
import org.apache.kafka.connect.util.TopicCreationGroup;
-import org.apache.log4j.Level;
+import org.apache.logging.log4j.Level;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.extension.ExtendWith;
import org.junit.jupiter.params.ParameterizedTest;
diff --git a/connect/runtime/src/test/java/org/apache/kafka/connect/runtime/rest/ConnectRestServerTest.java b/connect/runtime/src/test/java/org/apache/kafka/connect/runtime/rest/ConnectRestServerTest.java
index 58ca25ce0f0..13727c5b438 100644
--- a/connect/runtime/src/test/java/org/apache/kafka/connect/runtime/rest/ConnectRestServerTest.java
+++ b/connect/runtime/src/test/java/org/apache/kafka/connect/runtime/rest/ConnectRestServerTest.java
@@ -353,7 +353,6 @@ public class ConnectRestServerTest {
server.stop();
Collection logMessages = restServerAppender.getMessages();
- LogCaptureAppender.unregister(restServerAppender);
restServerAppender.close();
String expectedlogContent = "\"GET / HTTP/1.1\" " + response.getStatusLine().getStatusCode();
assertTrue(logMessages.stream().anyMatch(logMessage -> logMessage.contains(expectedlogContent)));
diff --git a/connect/runtime/src/test/resources/log4j.properties b/connect/runtime/src/test/resources/log4j.properties
deleted file mode 100644
index de7180c282a..00000000000
--- a/connect/runtime/src/test/resources/log4j.properties
+++ /dev/null
@@ -1,37 +0,0 @@
-##
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-##
-log4j.rootLogger=INFO, stdout
-
-log4j.appender.stdout=org.apache.log4j.ConsoleAppender
-log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
-#
-# The `%X{connector.context}` parameter in the layout includes connector-specific and task-specific information
-# in the log message, where appropriate. This makes it easier to identify those log messages that apply to a
-# specific connector. Simply add this parameter to the log layout configuration below to include the contextual information.
-#
-log4j.appender.stdout.layout.ConversionPattern=[%d] %p %X{connector.context}%m (%c:%L)%n
-#
-# The following line includes no MDC context parameters:
-#log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c:%L)%n (%t)
-
-log4j.logger.kafka=WARN
-log4j.logger.state.change.logger=OFF
-log4j.logger.org.apache.kafka.connect=DEBUG
-
-# Troubleshooting KAFKA-17493.
-log4j.logger.org.apache.kafka.consumer=DEBUG
-log4j.logger.org.apache.kafka.coordinator.group=DEBUG
\ No newline at end of file
diff --git a/connect/runtime/src/test/resources/log4j2.yaml b/connect/runtime/src/test/resources/log4j2.yaml
new file mode 100644
index 00000000000..45faa635378
--- /dev/null
+++ b/connect/runtime/src/test/resources/log4j2.yaml
@@ -0,0 +1,48 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+Configuration:
+ Properties:
+ Property:
+ - name: "logPattern"
+ value: "[%d] %p %X{connector.context}%m (%c:%L)%n"
+
+ Appenders:
+ Console:
+ name: STDOUT
+ PatternLayout:
+ pattern: "${logPattern}"
+
+ Loggers:
+ Root:
+ level: INFO
+ AppenderRef:
+ - ref: STDOUT
+ Logger:
+ - name: kafka
+ level: WARN
+
+ - name: state.change.logger
+ level: "OFF"
+
+ - name: org.apache.kafka.connect
+ level: DEBUG
+
+ # Troubleshooting KAFKA-17493.
+ - name: org.apache.kafka.consumer
+ level: DEBUG
+
+ - name: org.apache.kafka.coordinator.group
+ level: DEBUG
diff --git a/core/src/main/scala/kafka/utils/Log4jController.scala b/core/src/main/scala/kafka/utils/Log4jController.scala
index 0d54c74e075..4bc022dadfe 100755
--- a/core/src/main/scala/kafka/utils/Log4jController.scala
+++ b/core/src/main/scala/kafka/utils/Log4jController.scala
@@ -17,83 +17,90 @@
package kafka.utils
+import org.apache.kafka.common.utils.Utils
+import org.apache.logging.log4j.core.LoggerContext
+import org.apache.logging.log4j.core.config.Configurator
+import org.apache.logging.log4j.{Level, LogManager}
+
import java.util
import java.util.Locale
-
-import org.apache.kafka.common.utils.Utils
-import org.apache.log4j.{Level, LogManager, Logger}
-
-import scala.collection.mutable
import scala.jdk.CollectionConverters._
object Log4jController {
+
+ /**
+ * Note: In log4j, the root logger's name was "root" and Kafka also followed that name for dynamic logging control feature.
+ *
+ * The root logger's name is changed in log4j2 to empty string (see: [[LogManager.ROOT_LOGGER_NAME]]) but for backward-
+ * compatibility. Kafka keeps its original root logger name. It is why here is a dedicated definition for the root logger name.
+ */
val ROOT_LOGGER = "root"
- private def resolveLevel(logger: Logger): String = {
- var name = logger.getName
- var level = logger.getLevel
- while (level == null) {
- val index = name.lastIndexOf(".")
- if (index > 0) {
- name = name.substring(0, index)
- val ancestor = existingLogger(name)
- if (ancestor != null) {
- level = ancestor.getLevel
- }
- } else {
- level = existingLogger(ROOT_LOGGER).getLevel
- }
- }
- level.toString
+ /**
+ * Returns a map of the log4j loggers and their assigned log level.
+ * If a logger does not have a log level assigned, we return the log level of the first ancestor with a level configured.
+ */
+ def loggers: Map[String, String] = {
+ val logContext = LogManager.getContext(false).asInstanceOf[LoggerContext]
+ val rootLoggerLevel = logContext.getRootLogger.getLevel.toString
+
+ // Loggers defined in the configuration
+ val configured = logContext.getConfiguration.getLoggers.asScala
+ .values
+ .filterNot(_.getName.equals(LogManager.ROOT_LOGGER_NAME))
+ .map { logger =>
+ logger.getName -> logger.getLevel.toString
+ }.toMap
+
+ // Loggers actually running
+ val actual = logContext.getLoggers.asScala
+ .filterNot(_.getName.equals(LogManager.ROOT_LOGGER_NAME))
+ .map { logger =>
+ logger.getName -> logger.getLevel.toString
+ }.toMap
+
+ (configured ++ actual) + (ROOT_LOGGER -> rootLoggerLevel)
}
/**
- * Returns a map of the log4j loggers and their assigned log level.
- * If a logger does not have a log level assigned, we return the root logger's log level
- */
- def loggers: mutable.Map[String, String] = {
- val logs = new mutable.HashMap[String, String]()
- val rootLoggerLvl = existingLogger(ROOT_LOGGER).getLevel.toString
- logs.put(ROOT_LOGGER, rootLoggerLvl)
-
- val loggers = LogManager.getCurrentLoggers
- while (loggers.hasMoreElements) {
- val logger = loggers.nextElement().asInstanceOf[Logger]
- if (logger != null) {
- logs.put(logger.getName, resolveLevel(logger))
- }
- }
- logs
- }
-
- /**
- * Sets the log level of a particular logger
- */
+ * Sets the log level of a particular logger. If the given logLevel is not an available log4j level
+ * (i.e., one of OFF, FATAL, ERROR, WARN, INFO, DEBUG, TRACE, ALL) it falls back to DEBUG.
+ *
+ * @see [[Level.toLevel]]
+ */
def logLevel(loggerName: String, logLevel: String): Boolean = {
- val log = existingLogger(loggerName)
- if (!Utils.isBlank(loggerName) && !Utils.isBlank(logLevel) && log != null) {
- log.setLevel(Level.toLevel(logLevel.toUpperCase(Locale.ROOT)))
+ if (Utils.isBlank(loggerName) || Utils.isBlank(logLevel))
+ return false
+
+ val level = Level.toLevel(logLevel.toUpperCase(Locale.ROOT))
+
+ if (loggerName == ROOT_LOGGER) {
+ Configurator.setAllLevels(LogManager.ROOT_LOGGER_NAME, level)
true
+ } else {
+ if (loggerExists(loggerName) && level != null) {
+ Configurator.setAllLevels(loggerName, level)
+ true
+ }
+ else false
}
- else false
}
def unsetLogLevel(loggerName: String): Boolean = {
- val log = existingLogger(loggerName)
- if (!Utils.isBlank(loggerName) && log != null) {
- log.setLevel(null)
+ if (loggerName == ROOT_LOGGER) {
+ Configurator.setAllLevels(LogManager.ROOT_LOGGER_NAME, null)
true
+ } else {
+ if (loggerExists(loggerName)) {
+ Configurator.setAllLevels(loggerName, null)
+ true
+ }
+ else false
}
- else false
}
- def loggerExists(loggerName: String): Boolean = existingLogger(loggerName) != null
-
- private def existingLogger(loggerName: String) =
- if (loggerName == ROOT_LOGGER)
- LogManager.getRootLogger
- else LogManager.exists(loggerName)
+ def loggerExists(loggerName: String): Boolean = loggers.contains(loggerName)
}
/**
@@ -113,15 +120,7 @@ class Log4jController extends Log4jControllerMBean {
def getLogLevel(loggerName: String): String = {
- val log = Log4jController.existingLogger(loggerName)
- if (log != null) {
- val level = log.getLevel
- if (level != null)
- log.getLevel.toString
- else
- Log4jController.resolveLevel(log)
- }
- else "No such logger."
+ Log4jController.loggers.getOrElse(loggerName, "No such logger.")
}
def setLogLevel(loggerName: String, level: String): Boolean = Log4jController.logLevel(loggerName, level)
diff --git a/core/src/test/java/kafka/admin/AclCommandTest.java b/core/src/test/java/kafka/admin/AclCommandTest.java
index 4f9bfff0f9d..e71c348c272 100644
--- a/core/src/test/java/kafka/admin/AclCommandTest.java
+++ b/core/src/test/java/kafka/admin/AclCommandTest.java
@@ -40,7 +40,7 @@ import org.apache.kafka.common.utils.SecurityUtils;
import org.apache.kafka.metadata.authorizer.StandardAuthorizer;
import org.apache.kafka.test.TestUtils;
-import org.apache.log4j.Level;
+import org.apache.logging.log4j.Level;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
diff --git a/core/src/test/resources/log4j2.yaml b/core/src/test/resources/log4j2.yaml
new file mode 100644
index 00000000000..016a542689b
--- /dev/null
+++ b/core/src/test/resources/log4j2.yaml
@@ -0,0 +1,38 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+Configuration:
+ Properties:
+ Property:
+ - name: "logPattern"
+ value: "[%d] %p %m (%c:%L)%n"
+
+ Appenders:
+ Console:
+ name: STDOUT
+ PatternLayout:
+ pattern: "${logPattern}"
+
+ Loggers:
+ Root:
+ level: OFF
+ AppenderRef:
+ - ref: STDOUT
+ Logger:
+ - name: kafka
+ level: WARN
+
+ - name: org.apache.kafka
+ level: WARN
diff --git a/core/src/test/scala/integration/kafka/api/PlaintextAdminIntegrationTest.scala b/core/src/test/scala/integration/kafka/api/PlaintextAdminIntegrationTest.scala
index cff801b2b50..890aff6da1e 100644
--- a/core/src/test/scala/integration/kafka/api/PlaintextAdminIntegrationTest.scala
+++ b/core/src/test/scala/integration/kafka/api/PlaintextAdminIntegrationTest.scala
@@ -57,9 +57,9 @@ import org.apache.kafka.security.authorizer.AclEntry
import org.apache.kafka.server.config.{QuotaConfig, ServerConfigs, ServerLogConfigs, ZkConfigs}
import org.apache.kafka.storage.internals.log.{CleanerConfig, LogConfig, LogFileUtils}
import org.apache.kafka.test.TestUtils.{DEFAULT_MAX_WAIT_MS, assertFutureThrows}
-import org.apache.log4j.PropertyConfigurator
+import org.apache.logging.log4j.core.config.Configurator
import org.junit.jupiter.api.Assertions._
-import org.junit.jupiter.api.{AfterEach, BeforeEach, TestInfo, Timeout}
+import org.junit.jupiter.api.{BeforeEach, TestInfo, Timeout}
import org.junit.jupiter.params.ParameterizedTest
import org.junit.jupiter.params.provider.{MethodSource, ValueSource}
import org.slf4j.LoggerFactory
@@ -89,18 +89,11 @@ class PlaintextAdminIntegrationTest extends BaseAdminIntegrationTest {
@BeforeEach
override def setUp(testInfo: TestInfo): Unit = {
super.setUp(testInfo)
+ Configurator.reconfigure();
brokerLoggerConfigResource = new ConfigResource(
ConfigResource.Type.BROKER_LOGGER, brokers.head.config.brokerId.toString)
}
- @AfterEach
- override def tearDown(): Unit = {
- // Due to the fact that log4j is not re-initialized across tests, changing a logger's log level persists
- // across test classes. We need to clean up the changes done after testing.
- resetLogging()
- super.tearDown()
- }
-
@ParameterizedTest
@Timeout(30)
@ValueSource(strings = Array("kraft"))
@@ -3766,6 +3759,27 @@ class PlaintextAdminIntegrationTest extends BaseAdminIntegrationTest {
assertEquals(newAncestorLogLevel, newAncestorLoggerConfig.get("kafka.server.ControllerServer").value())
}
+ @ParameterizedTest
+ @ValueSource(strings = Array("kraft"))
+ def testIncrementalAlterConfigsForLog4jLogLevelsCanSetToRootLogger(quorum: String): Unit = {
+ client = createAdminClient
+ val initialLoggerConfig = describeBrokerLoggers()
+ val initialRootLogLevel = initialLoggerConfig.get(Log4jController.ROOT_LOGGER).value()
+ val newRootLogLevel = LogLevelConfig.DEBUG_LOG_LEVEL
+
+ val alterRootLoggerEntry = Seq(
+ new AlterConfigOp(new ConfigEntry(Log4jController.ROOT_LOGGER, newRootLogLevel), AlterConfigOp.OpType.SET)
+ ).asJavaCollection
+
+ alterBrokerLoggers(alterRootLoggerEntry, validateOnly = true)
+ val validatedRootLoggerConfig = describeBrokerLoggers()
+ assertEquals(initialRootLogLevel, validatedRootLoggerConfig.get(Log4jController.ROOT_LOGGER).value())
+
+ alterBrokerLoggers(alterRootLoggerEntry)
+ val changedRootLoggerConfig = describeBrokerLoggers()
+ assertEquals(newRootLogLevel, changedRootLoggerConfig.get(Log4jController.ROOT_LOGGER).value())
+ }
+
@ParameterizedTest
@ValueSource(strings = Array("kraft"))
def testIncrementalAlterConfigsForLog4jLogLevelsCannotResetRootLogger(quorum: String): Unit = {
@@ -4108,17 +4122,4 @@ object PlaintextAdminIntegrationTest {
assertEquals(LogConfig.DEFAULT_COMPRESSION_TYPE, configs.get(brokerResource).get(ServerConfigs.COMPRESSION_TYPE_CONFIG).value)
}
-
- /**
- * Resets the logging configuration after the test.
- */
- def resetLogging(): Unit = {
- org.apache.log4j.LogManager.resetConfiguration()
- val stream = this.getClass.getResourceAsStream("/log4j.properties")
- try {
- PropertyConfigurator.configure(stream)
- } finally {
- stream.close()
- }
- }
}
diff --git a/core/src/test/scala/other/kafka.log4j.properties b/core/src/test/scala/other/kafka.log4j.properties
deleted file mode 100644
index 1a53fd5d286..00000000000
--- a/core/src/test/scala/other/kafka.log4j.properties
+++ /dev/null
@@ -1,22 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-log4j.rootLogger=INFO, KAFKA
-
-log4j.appender.KAFKA=kafka.log4j.KafkaAppender
-
-log4j.appender.KAFKA.Port=9092
-log4j.appender.KAFKA.Host=localhost
-log4j.appender.KAFKA.Topic=test-logger
-log4j.appender.KAFKA.Serializer=kafka.AppenderStringSerializer
diff --git a/core/src/test/scala/unit/kafka/integration/UncleanLeaderElectionTest.scala b/core/src/test/scala/unit/kafka/integration/UncleanLeaderElectionTest.scala
index b12e60980fc..3d78bf27aff 100755
--- a/core/src/test/scala/unit/kafka/integration/UncleanLeaderElectionTest.scala
+++ b/core/src/test/scala/unit/kafka/integration/UncleanLeaderElectionTest.scala
@@ -34,13 +34,14 @@ import org.apache.kafka.common.security.auth.SecurityProtocol
import org.apache.kafka.clients.admin.{Admin, AdminClientConfig, AlterConfigOp, AlterConfigsResult, ConfigEntry}
import org.apache.kafka.server.config.ReplicationConfigs
import org.apache.kafka.server.metrics.KafkaYammerMetrics
-import org.apache.log4j.{Level, Logger}
+import org.apache.logging.log4j.{Level, LogManager}
import org.junit.jupiter.api.{AfterEach, BeforeEach, TestInfo}
import org.junit.jupiter.api.Assertions._
import org.junit.jupiter.params.ParameterizedTest
import org.junit.jupiter.params.provider.MethodSource
import com.yammer.metrics.core.Meter
import org.apache.kafka.metadata.LeaderConstants
+import org.apache.logging.log4j.core.config.Configurator
class UncleanLeaderElectionTest extends QuorumTestHarness {
val brokerId1 = 0
@@ -63,8 +64,8 @@ class UncleanLeaderElectionTest extends QuorumTestHarness {
val partitionId = 0
val topicPartition = new TopicPartition(topic, partitionId)
- val kafkaApisLogger = Logger.getLogger(classOf[kafka.server.KafkaApis])
- val networkProcessorLogger = Logger.getLogger(classOf[kafka.network.Processor])
+ val kafkaApisLogger = LogManager.getLogger(classOf[kafka.server.KafkaApis])
+ val networkProcessorLogger = LogManager.getLogger(classOf[kafka.network.Processor])
@BeforeEach
override def setUp(testInfo: TestInfo): Unit = {
@@ -80,8 +81,8 @@ class UncleanLeaderElectionTest extends QuorumTestHarness {
}
// temporarily set loggers to a higher level so that tests run quietly
- kafkaApisLogger.setLevel(Level.FATAL)
- networkProcessorLogger.setLevel(Level.FATAL)
+ Configurator.setLevel(kafkaApisLogger.getName, Level.FATAL)
+ Configurator.setLevel(networkProcessorLogger.getName, Level.FATAL)
}
@AfterEach
@@ -90,8 +91,8 @@ class UncleanLeaderElectionTest extends QuorumTestHarness {
brokers.foreach(broker => CoreUtils.delete(broker.config.logDirs))
// restore log levels
- kafkaApisLogger.setLevel(Level.ERROR)
- networkProcessorLogger.setLevel(Level.ERROR)
+ Configurator.setLevel(kafkaApisLogger.getName, Level.ERROR)
+ Configurator.setLevel(networkProcessorLogger.getName, Level.ERROR)
admin.close()
diff --git a/core/src/test/scala/unit/kafka/network/SocketServerTest.scala b/core/src/test/scala/unit/kafka/network/SocketServerTest.scala
index ca4156eacd2..362796381c2 100644
--- a/core/src/test/scala/unit/kafka/network/SocketServerTest.scala
+++ b/core/src/test/scala/unit/kafka/network/SocketServerTest.scala
@@ -45,7 +45,8 @@ import org.apache.kafka.server.metrics.KafkaYammerMetrics
import org.apache.kafka.server.network.ConnectionDisconnectListener
import org.apache.kafka.server.quota.{ThrottleCallback, ThrottledChannel}
import org.apache.kafka.test.{TestSslUtils, TestUtils => JTestUtils}
-import org.apache.log4j.Level
+import org.apache.logging.log4j.{Level, LogManager}
+import org.apache.logging.log4j.core.config.Configurator
import org.junit.jupiter.api.Assertions._
import org.junit.jupiter.api._
@@ -88,7 +89,7 @@ class SocketServerTest {
var server: SocketServer = _
val sockets = new ArrayBuffer[Socket]
- private val kafkaLogger = org.apache.log4j.LogManager.getLogger("kafka")
+ private val kafkaLogger = LogManager.getLogger("kafka")
private var logLevelToRestore: Level = _
def endpoint: EndPoint = {
KafkaConfig.fromProps(props, doLog = false).dataPlaneListeners.head
@@ -102,7 +103,7 @@ class SocketServerTest {
server.enableRequestProcessing(Map.empty).get(1, TimeUnit.MINUTES)
// Run the tests with TRACE logging to exercise request logging path
logLevelToRestore = kafkaLogger.getLevel
- kafkaLogger.setLevel(Level.TRACE)
+ Configurator.setLevel(kafkaLogger.getName, Level.TRACE)
assertTrue(server.controlPlaneRequestChannelOpt.isEmpty)
}
@@ -112,7 +113,7 @@ class SocketServerTest {
shutdownServerAndMetrics(server)
sockets.foreach(_.close())
sockets.clear()
- kafkaLogger.setLevel(logLevelToRestore)
+ Configurator.setLevel(kafkaLogger.getName, logLevelToRestore)
TestUtils.clearYammerMetrics()
}
diff --git a/gradle/dependencies.gradle b/gradle/dependencies.gradle
index 1dd01d5e0eb..37677869afb 100644
--- a/gradle/dependencies.gradle
+++ b/gradle/dependencies.gradle
@@ -54,10 +54,11 @@ versions += [
apacheds: "2.0.0-M24",
argparse4j: "0.7.0",
bcpkix: "1.78.1",
- // Version >=3.1.2 includes an improvement to prevent hash DOS attacks,
- // but currently, tests are failing in >=3.1.2. Therefore, we are temporarily using version 3.1.1.
+ // Version >=3.1.2 includes an improvement to prevent hash DOS attacks,
+ // but currently, tests are failing in >=3.1.2. Therefore, we are temporarily using version 3.1.1.
// The failing tests should be fixed under KAFKA-18089, allowing us to upgrade to >=3.1.2.
caffeine: "3.1.1",
+ bndlib: "7.0.0",
checkstyle: project.hasProperty('checkstyleVersion') ? checkstyleVersion : "10.20.2",
commonsCli: "1.4",
commonsIo: "2.14.0", // ZooKeeper dependency. Do not use, this is going away.
@@ -106,6 +107,7 @@ versions += [
kafka_37: "3.7.1",
kafka_38: "3.8.1",
kafka_39: "3.9.0",
+ log4j2: "2.24.1",
// When updating lz4 make sure the compression levels in org.apache.kafka.common.record.CompressionType are still valid
lz4: "1.8.0",
mavenArtifact: "3.9.6",
@@ -115,7 +117,6 @@ versions += [
opentelemetryProto: "1.0.0-alpha",
protobuf: "3.25.5", // a dependency of opentelemetryProto
pcollections: "4.0.1",
- reload4j: "1.2.25",
re2j: "1.7",
rocksDB: "7.9.2",
// When updating the scalafmt version please also update the version field in checkstyle/.scalafmt.conf. scalafmt now
@@ -148,6 +149,7 @@ libs += [
apachedsJdbmPartition: "org.apache.directory.server:apacheds-jdbm-partition:$versions.apacheds",
argparse4j: "net.sourceforge.argparse4j:argparse4j:$versions.argparse4j",
bcpkix: "org.bouncycastle:bcpkix-jdk18on:$versions.bcpkix",
+ bndlib:"biz.aQute.bnd:biz.aQute.bndlib:$versions.bndlib",
caffeine: "com.github.ben-manes.caffeine:caffeine:$versions.caffeine",
classgraph: "io.github.classgraph:classgraph:$versions.classgraph",
commonsCli: "commons-cli:commons-cli:$versions.commonsCli",
@@ -155,6 +157,7 @@ libs += [
commonsValidator: "commons-validator:commons-validator:$versions.commonsValidator",
jacksonAnnotations: "com.fasterxml.jackson.core:jackson-annotations:$versions.jackson",
jacksonDatabind: "com.fasterxml.jackson.core:jackson-databind:$versions.jackson",
+ jacksonDatabindYaml: "com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:$versions.jackson",
jacksonDataformatCsv: "com.fasterxml.jackson.dataformat:jackson-dataformat-csv:$versions.jackson",
jacksonModuleScala: "com.fasterxml.jackson.module:jackson-module-scala_$versions.baseScala:$versions.jackson",
jacksonJDK8Datatypes: "com.fasterxml.jackson.datatype:jackson-datatype-jdk8:$versions.jackson",
@@ -204,6 +207,10 @@ libs += [
kafkaStreams_37: "org.apache.kafka:kafka-streams:$versions.kafka_37",
kafkaStreams_38: "org.apache.kafka:kafka-streams:$versions.kafka_38",
kafkaStreams_39: "org.apache.kafka:kafka-streams:$versions.kafka_39",
+ log4j1Bridge2Api: "org.apache.logging.log4j:log4j-1.2-api:$versions.log4j2",
+ log4j2Api: "org.apache.logging.log4j:log4j-api:$versions.log4j2",
+ log4j2Core: "org.apache.logging.log4j:log4j-core:$versions.log4j2",
+ log4j2CoreTest: "org.apache.logging.log4j:log4j-core-test:$versions.log4j2",
lz4: "org.lz4:lz4-java:$versions.lz4",
metrics: "com.yammer.metrics:metrics-core:$versions.metrics",
dropwizardMetrics: "io.dropwizard.metrics:metrics-core:$versions.dropwizardMetrics",
@@ -214,15 +221,15 @@ libs += [
pcollections: "org.pcollections:pcollections:$versions.pcollections",
opentelemetryProto: "io.opentelemetry.proto:opentelemetry-proto:$versions.opentelemetryProto",
protobuf: "com.google.protobuf:protobuf-java:$versions.protobuf",
- reload4j: "ch.qos.reload4j:reload4j:$versions.reload4j",
re2j: "com.google.re2j:re2j:$versions.re2j",
rocksDBJni: "org.rocksdb:rocksdbjni:$versions.rocksDB",
scalaLibrary: "org.scala-lang:scala-library:$versions.scala",
scalaLogging: "com.typesafe.scala-logging:scala-logging_$versions.baseScala:$versions.scalaLogging",
scalaReflect: "org.scala-lang:scala-reflect:$versions.scala",
slf4jApi: "org.slf4j:slf4j-api:$versions.slf4j",
- slf4jReload4j: "org.slf4j:slf4j-reload4j:$versions.slf4j",
+ slf4jLog4j2: "org.apache.logging.log4j:log4j-slf4j-impl:$versions.log4j2",
snappy: "org.xerial.snappy:snappy-java:$versions.snappy",
+ spotbugs: "com.github.spotbugs:spotbugs-annotations:$versions.spotbugs",
swaggerAnnotations: "io.swagger.core.v3:swagger-annotations:$swaggerVersion",
swaggerJaxrs2: "io.swagger.core.v3:swagger-jaxrs2:$swaggerVersion",
zookeeper: "org.apache.zookeeper:zookeeper:$versions.zookeeper",
diff --git a/shell/src/test/resources/log4j.properties b/group-coordinator/src/test/resources/log4j2.yaml
similarity index 57%
rename from shell/src/test/resources/log4j.properties
rename to group-coordinator/src/test/resources/log4j2.yaml
index a72a9693de2..59b02951909 100644
--- a/shell/src/test/resources/log4j.properties
+++ b/group-coordinator/src/test/resources/log4j2.yaml
@@ -1,6 +1,3 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
@@ -12,8 +9,23 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-log4j.rootLogger=DEBUG, stdout
+Configuration:
+ Properties:
+ Property:
+ - name: "logPattern"
+ value: "[%d] %p %m (%c:%L)%n"
-log4j.appender.stdout=org.apache.log4j.ConsoleAppender
-log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
-log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c:%L)%n
+ Appenders:
+ Console:
+ name: STDOUT
+ PatternLayout:
+ pattern: "${logPattern}"
+
+ Loggers:
+ Root:
+ level: DEBUG
+ AppenderRef:
+ - ref: STDOUT
+ Logger:
+ - name: org.apache.kafka
+ level: DEBUG
diff --git a/metadata/src/test/resources/log4j2.yaml b/metadata/src/test/resources/log4j2.yaml
new file mode 100644
index 00000000000..fd94a4974e2
--- /dev/null
+++ b/metadata/src/test/resources/log4j2.yaml
@@ -0,0 +1,36 @@
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+Configuration:
+ Properties:
+ Property:
+ - name: "logPattern"
+ value: "[%d] %p %m (%c:%L)%n"
+
+ Appenders:
+ Console:
+ name: STDOUT
+ PatternLayout:
+ pattern: "${logPattern}"
+
+ Loggers:
+ Root:
+ level: DEBUG
+ AppenderRef:
+ - ref: STDOUT
+ Logger:
+ - name: org.apache.kafka
+ level: DEBUG
diff --git a/raft/bin/test-kraft-server-start.sh b/raft/bin/test-kraft-server-start.sh
index 701bc1864a4..ad7d755752f 100755
--- a/raft/bin/test-kraft-server-start.sh
+++ b/raft/bin/test-kraft-server-start.sh
@@ -17,6 +17,7 @@
base_dir=$(dirname $0)
if [ "x$KAFKA_LOG4J_OPTS" = "x" ]; then
+ echo "DEPRECATED: using log4j 1.x configuration. To use log4j 2.x configuration, run with: 'export KAFKA_LOG4J_OPTS=\"-Dlog4j.configurationFile=file:$base_dir/../config/kraft-log4j2.yml\"'"
export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/kraft-log4j.properties"
fi
diff --git a/raft/config/kraft-log4j2.yaml b/raft/config/kraft-log4j2.yaml
new file mode 100644
index 00000000000..3bfd01ca5cf
--- /dev/null
+++ b/raft/config/kraft-log4j2.yaml
@@ -0,0 +1,39 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+Configuration:
+ Properties:
+ Property:
+ - name: "logPattern"
+ value: "[%d] %p %m (%c)%n"
+
+ Appenders:
+ Console:
+ name: STDERR
+ target: SYSTEM_ERR
+ PatternLayout:
+ pattern: "${logPattern}"
+
+ Loggers:
+ Root:
+ level: INFO
+ AppenderRef:
+ - ref: STDERR
+ Logger:
+ - name: org.apache.kafka.raft
+ level: INFO
+
+ - name: org.apache.kafka.snapshot
+ level: INFO
diff --git a/raft/src/test/resources/log4j2.yaml b/raft/src/test/resources/log4j2.yaml
new file mode 100644
index 00000000000..50d9e781b8e
--- /dev/null
+++ b/raft/src/test/resources/log4j2.yaml
@@ -0,0 +1,38 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+Configuration:
+ Properties:
+ Property:
+ - name: "logPattern"
+ value: "[%d] %p %m (%c:%L)%n"
+
+ Appenders:
+ Console:
+ name: STDOUT
+ PatternLayout:
+ pattern: "${logPattern}"
+
+ Loggers:
+ Root:
+ level: OFF
+ AppenderRef:
+ - ref: STDOUT
+ Logger:
+ - name: org.apache.kafka.raft
+ level: ERROR
+
+ - name: org.apache.kafka.snapshot
+ level: ERROR
diff --git a/server-common/src/test/resources/log4j.properties b/server-common/src/test/resources/log4j.properties
deleted file mode 100644
index be36f90299a..00000000000
--- a/server-common/src/test/resources/log4j.properties
+++ /dev/null
@@ -1,21 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-log4j.rootLogger=INFO, stdout
-
-log4j.appender.stdout=org.apache.log4j.ConsoleAppender
-log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
-log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c:%L)%n
-
-log4j.logger.org.apache.kafka=INFO
diff --git a/group-coordinator/src/test/resources/log4j.properties b/server-common/src/test/resources/log4j2.yaml
similarity index 57%
rename from group-coordinator/src/test/resources/log4j.properties
rename to server-common/src/test/resources/log4j2.yaml
index 9c8357947d9..be546a18b55 100644
--- a/group-coordinator/src/test/resources/log4j.properties
+++ b/server-common/src/test/resources/log4j2.yaml
@@ -1,9 +1,9 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
+# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
+# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
@@ -12,10 +12,24 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-log4j.rootLogger=DEBUG, stdout
-log4j.appender.stdout=org.apache.log4j.ConsoleAppender
-log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
-log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c:%L)%n
+Configuration:
+ Properties:
+ Property:
+ - name: "logPattern"
+ value: "[%d] %p %m (%c:%L)%n"
-log4j.logger.org.apache.kafka=DEBUG
+ Appenders:
+ Console:
+ name: STDOUT
+ PatternLayout:
+ pattern: "${logPattern}"
+
+ Loggers:
+ Root:
+ level: INFO
+ AppenderRef:
+ - ref: STDOUT
+ Logger:
+ - name: org.apache.kafka
+ level: INFO
diff --git a/metadata/src/test/resources/log4j.properties b/shell/src/test/resources/log4j2.yaml
similarity index 61%
rename from metadata/src/test/resources/log4j.properties
rename to shell/src/test/resources/log4j2.yaml
index 9c8357947d9..c229cbce316 100644
--- a/metadata/src/test/resources/log4j.properties
+++ b/shell/src/test/resources/log4j2.yaml
@@ -1,9 +1,9 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
+# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
+# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
@@ -12,10 +12,21 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-log4j.rootLogger=DEBUG, stdout
-log4j.appender.stdout=org.apache.log4j.ConsoleAppender
-log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
-log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c:%L)%n
+Configuration:
+ Properties:
+ Property:
+ - name: "logPattern"
+ value: "[%d] %p %m (%c:%L)%n"
-log4j.logger.org.apache.kafka=DEBUG
+ Appenders:
+ Console:
+ name: STDOUT
+ PatternLayout:
+ pattern: "${logPattern}"
+
+ Loggers:
+ Root:
+ level: DEBUG
+ AppenderRef:
+ - ref: STDOUT
diff --git a/storage/src/test/resources/log4j.properties b/storage/src/test/resources/log4j.properties
deleted file mode 100644
index 7ee388a407f..00000000000
--- a/storage/src/test/resources/log4j.properties
+++ /dev/null
@@ -1,28 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-log4j.rootLogger=OFF, stdout
-
-log4j.appender.stdout=org.apache.log4j.ConsoleAppender
-log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
-log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c:%L)%n
-
-log4j.appender.fileAppender=org.apache.log4j.RollingFileAppender
-log4j.appender.fileAppender.layout=org.apache.log4j.PatternLayout
-log4j.appender.fileAppender.layout.ConversionPattern=%d [%t] %-5p %c %x - %m%n
-log4j.appender.fileAppender.File=storage.log
-
-log4j.logger.org.apache.kafka.server.log.remote.storage=INFO
-log4j.logger.org.apache.kafka.server.log.remote.metadata.storage=INFO
-log4j.logger.kafka.log.remote=INFO
diff --git a/storage/src/test/resources/log4j2.yaml b/storage/src/test/resources/log4j2.yaml
new file mode 100644
index 00000000000..e2050ad723d
--- /dev/null
+++ b/storage/src/test/resources/log4j2.yaml
@@ -0,0 +1,57 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+Configuration:
+ Properties:
+ Property:
+ - name: "logPattern"
+ value: "[%d] %p %m (%c:%L)%n"
+ - name: "fileLogPattern"
+ value: "%d [%t] %-5p %c %x - %m%n"
+
+ Appenders:
+ Console:
+ name: STDOUT
+ PatternLayout:
+ pattern: "${logPattern}"
+ RollingFile:
+ - name: FileAppender
+ fileName: storage.log
+ filePattern: "storage-%d{yyyy-MM-dd}.log"
+ PatternLayout:
+ pattern: "${fileLogPattern}"
+ TimeBasedTriggeringPolicy:
+ interval: 1
+
+ Loggers:
+ Root:
+ level: OFF
+ AppenderRef:
+ - ref: STDOUT
+ Logger:
+ - name: org.apache.kafka.server.log.remote.storage
+ level: INFO
+ AppenderRef:
+ - ref: FileAppender
+
+ - name: org.apache.kafka.server.log.remote.metadata.storage
+ level: INFO
+ AppenderRef:
+ - ref: FileAppender
+
+ - name: kafka.log.remote
+ level: INFO
+ AppenderRef:
+ - ref: FileAppender
diff --git a/streams/integration-tests/src/test/resources/log4j.properties b/streams/integration-tests/src/test/resources/log4j.properties
deleted file mode 100644
index 104b46df20f..00000000000
--- a/streams/integration-tests/src/test/resources/log4j.properties
+++ /dev/null
@@ -1,36 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-log4j.rootLogger=INFO, stdout
-
-log4j.appender.stdout=org.apache.log4j.ConsoleAppender
-log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
-log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c:%L)%n
-
-log4j.logger.kafka=ERROR
-log4j.logger.state.change.logger=ERROR
-log4j.logger.org.apache.kafka=ERROR
-log4j.logger.org.apache.kafka.clients=ERROR
-
-# These are the only logs we will likely ever find anything useful in to debug Streams test failures
-log4j.logger.org.apache.kafka.clients.consumer=INFO
-log4j.logger.org.apache.kafka.clients.producer=INFO
-log4j.logger.org.apache.kafka.streams=INFO
-
-# printing out the configs takes up a huge amount of the allotted characters,
-# and provides little value as we can always figure out the test configs without the logs
-log4j.logger.org.apache.kafka.clients.producer.ProducerConfig=ERROR
-log4j.logger.org.apache.kafka.clients.consumer.ConsumerConfig=ERROR
-log4j.logger.org.apache.kafka.clients.admin.AdminClientConfig=ERROR
-log4j.logger.org.apache.kafka.streams.StreamsConfig=ERROR
diff --git a/streams/integration-tests/src/test/resources/log4j2.yaml b/streams/integration-tests/src/test/resources/log4j2.yaml
new file mode 100644
index 00000000000..0942036a33c
--- /dev/null
+++ b/streams/integration-tests/src/test/resources/log4j2.yaml
@@ -0,0 +1,65 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+Configuration:
+ Properties:
+ Property:
+ - name: "logPattern"
+ value: "[%d] %p %m (%c:%L)%n"
+
+ Appenders:
+ Console:
+ name: STDOUT
+ PatternLayout:
+ pattern: "${logPattern}"
+
+ Loggers:
+ Root:
+ level: INFO
+ AppenderRef:
+ - ref: STDOUT
+ Logger:
+ - name: kafka
+ level: ERROR
+
+ - name: state.change.logger
+ level: ERROR
+
+ - name: org.apache.kafka
+ level: ERROR
+
+ - name: org.apache.kafka.clients
+ level: ERROR
+
+ - name: org.apache.kafka.clients.consumer
+ level: INFO
+
+ - name: org.apache.kafka.clients.producer
+ level: INFO
+
+ - name: org.apache.kafka.streams
+ level: INFO
+
+ - name: org.apache.kafka.clients.producer.ProducerConfig
+ level: ERROR
+
+ - name: org.apache.kafka.clients.consumer.ConsumerConfig
+ level: ERROR
+
+ - name: org.apache.kafka.clients.admin.AdminClientConfig
+ level: ERROR
+
+ - name: org.apache.kafka.streams.StreamsConfig
+ level: ERROR
diff --git a/streams/quickstart/java/src/main/resources/archetype-resources/src/main/resources/log4j.properties b/streams/quickstart/java/src/main/resources/archetype-resources/src/main/resources/log4j.properties
deleted file mode 100644
index b620f1bb390..00000000000
--- a/streams/quickstart/java/src/main/resources/archetype-resources/src/main/resources/log4j.properties
+++ /dev/null
@@ -1,19 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-log4j.rootLogger=INFO, console
-
-log4j.appender.console=org.apache.log4j.ConsoleAppender
-log4j.appender.console.layout=org.apache.log4j.PatternLayout
-log4j.appender.console.layout.ConversionPattern=%d{HH:mm:ss,SSS} %-5p %-60c %x - %m%n
\ No newline at end of file
diff --git a/raft/src/test/resources/log4j.properties b/streams/quickstart/java/src/main/resources/archetype-resources/src/main/resources/log4j2.yaml
similarity index 60%
rename from raft/src/test/resources/log4j.properties
rename to streams/quickstart/java/src/main/resources/archetype-resources/src/main/resources/log4j2.yaml
index 6d90f6dd348..0c112dd06d6 100644
--- a/raft/src/test/resources/log4j.properties
+++ b/streams/quickstart/java/src/main/resources/archetype-resources/src/main/resources/log4j2.yaml
@@ -1,9 +1,9 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
+# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
+# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
@@ -12,11 +12,21 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-log4j.rootLogger=OFF, stdout
-log4j.appender.stdout=org.apache.log4j.ConsoleAppender
-log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
-log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c:%L)%n
+Configuration:
+ Properties:
+ Property:
+ - name: "logPattern"
+ value: "%d{HH:mm:ss,SSS} %-5p %-60c %x - %m%n"
-log4j.logger.org.apache.kafka.raft=ERROR
-log4j.logger.org.apache.kafka.snapshot=ERROR
+ Appenders:
+ Console:
+ name: STDOUT
+ PatternLayout:
+ pattern: "${logPattern}"
+
+ Loggers:
+ Root:
+ level: INFO
+ AppenderRef:
+ - ref: STDOUT
diff --git a/streams/src/test/java/org/apache/kafka/streams/StreamsConfigTest.java b/streams/src/test/java/org/apache/kafka/streams/StreamsConfigTest.java
index 4467e252b92..5895d3a632a 100644
--- a/streams/src/test/java/org/apache/kafka/streams/StreamsConfigTest.java
+++ b/streams/src/test/java/org/apache/kafka/streams/StreamsConfigTest.java
@@ -44,7 +44,7 @@ import org.apache.kafka.streams.processor.internals.StreamsPartitionAssignor;
import org.apache.kafka.streams.state.BuiltInDslStoreSuppliers;
import org.apache.kafka.streams.utils.TestUtils.RecordingProcessorWrapper;
-import org.apache.log4j.Level;
+import org.apache.logging.log4j.Level;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.Timeout;
diff --git a/streams/src/test/java/org/apache/kafka/streams/processor/internals/InternalTopicManagerTest.java b/streams/src/test/java/org/apache/kafka/streams/processor/internals/InternalTopicManagerTest.java
index b0152da0be4..14470db2efa 100644
--- a/streams/src/test/java/org/apache/kafka/streams/processor/internals/InternalTopicManagerTest.java
+++ b/streams/src/test/java/org/apache/kafka/streams/processor/internals/InternalTopicManagerTest.java
@@ -53,7 +53,7 @@ import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.errors.StreamsException;
import org.apache.kafka.streams.processor.internals.InternalTopicManager.ValidationResult;
-import org.apache.log4j.Level;
+import org.apache.logging.log4j.Level;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
diff --git a/streams/src/test/java/org/apache/kafka/streams/processor/internals/PartitionGroupTest.java b/streams/src/test/java/org/apache/kafka/streams/processor/internals/PartitionGroupTest.java
index 95a5210ae34..e29af81095b 100644
--- a/streams/src/test/java/org/apache/kafka/streams/processor/internals/PartitionGroupTest.java
+++ b/streams/src/test/java/org/apache/kafka/streams/processor/internals/PartitionGroupTest.java
@@ -40,7 +40,7 @@ import org.apache.kafka.test.InternalMockProcessorContext;
import org.apache.kafka.test.MockSourceNode;
import org.apache.kafka.test.MockTimestampExtractor;
-import org.apache.log4j.Level;
+import org.apache.logging.log4j.Level;
import org.hamcrest.Matchers;
import org.junit.jupiter.api.Test;
diff --git a/streams/src/test/java/org/apache/kafka/streams/processor/internals/RecordCollectorTest.java b/streams/src/test/java/org/apache/kafka/streams/processor/internals/RecordCollectorTest.java
index f7ec5784890..66a581fb8f4 100644
--- a/streams/src/test/java/org/apache/kafka/streams/processor/internals/RecordCollectorTest.java
+++ b/streams/src/test/java/org/apache/kafka/streams/processor/internals/RecordCollectorTest.java
@@ -59,7 +59,8 @@ import org.apache.kafka.streams.processor.TaskId;
import org.apache.kafka.streams.processor.internals.metrics.StreamsMetricsImpl;
import org.apache.kafka.test.InternalMockProcessorContext;
-import org.apache.log4j.Level;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.core.filter.ThresholdFilter;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
@@ -1283,7 +1284,7 @@ public class RecordCollectorTest {
try (final LogCaptureAppender logCaptureAppender =
LogCaptureAppender.createAndRegister(RecordCollectorImpl.class)) {
- logCaptureAppender.setThreshold(Level.INFO);
+ logCaptureAppender.addFilter(ThresholdFilter.createFilter(Level.INFO, null, null));
collector.send(topic, "3", "0", null, null, stringSerializer, stringSerializer, sinkNodeName, context, streamPartitioner);
collector.flush();
diff --git a/streams/src/test/java/org/apache/kafka/streams/processor/internals/StoreChangelogReaderTest.java b/streams/src/test/java/org/apache/kafka/streams/processor/internals/StoreChangelogReaderTest.java
index 7d3167bf32e..320494c5348 100644
--- a/streams/src/test/java/org/apache/kafka/streams/processor/internals/StoreChangelogReaderTest.java
+++ b/streams/src/test/java/org/apache/kafka/streams/processor/internals/StoreChangelogReaderTest.java
@@ -45,7 +45,7 @@ import org.apache.kafka.test.MockStandbyUpdateListener;
import org.apache.kafka.test.MockStateRestoreListener;
import org.apache.kafka.test.StreamsTestUtils;
-import org.apache.log4j.Level;
+import org.apache.logging.log4j.Level;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.junit.jupiter.params.ParameterizedTest;
diff --git a/streams/src/test/java/org/apache/kafka/streams/processor/internals/TaskManagerTest.java b/streams/src/test/java/org/apache/kafka/streams/processor/internals/TaskManagerTest.java
index b55c9a12fcf..6d812e0119e 100644
--- a/streams/src/test/java/org/apache/kafka/streams/processor/internals/TaskManagerTest.java
+++ b/streams/src/test/java/org/apache/kafka/streams/processor/internals/TaskManagerTest.java
@@ -55,7 +55,7 @@ import org.apache.kafka.streams.processor.internals.tasks.DefaultTaskManager;
import org.apache.kafka.streams.processor.internals.testutil.DummyStreamsConfig;
import org.apache.kafka.streams.state.internals.OffsetCheckpoint;
-import org.apache.log4j.Level;
+import org.apache.logging.log4j.Level;
import org.hamcrest.Matchers;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
diff --git a/streams/src/test/resources/log4j.properties b/streams/src/test/resources/log4j.properties
deleted file mode 100644
index 104b46df20f..00000000000
--- a/streams/src/test/resources/log4j.properties
+++ /dev/null
@@ -1,36 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-log4j.rootLogger=INFO, stdout
-
-log4j.appender.stdout=org.apache.log4j.ConsoleAppender
-log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
-log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c:%L)%n
-
-log4j.logger.kafka=ERROR
-log4j.logger.state.change.logger=ERROR
-log4j.logger.org.apache.kafka=ERROR
-log4j.logger.org.apache.kafka.clients=ERROR
-
-# These are the only logs we will likely ever find anything useful in to debug Streams test failures
-log4j.logger.org.apache.kafka.clients.consumer=INFO
-log4j.logger.org.apache.kafka.clients.producer=INFO
-log4j.logger.org.apache.kafka.streams=INFO
-
-# printing out the configs takes up a huge amount of the allotted characters,
-# and provides little value as we can always figure out the test configs without the logs
-log4j.logger.org.apache.kafka.clients.producer.ProducerConfig=ERROR
-log4j.logger.org.apache.kafka.clients.consumer.ConsumerConfig=ERROR
-log4j.logger.org.apache.kafka.clients.admin.AdminClientConfig=ERROR
-log4j.logger.org.apache.kafka.streams.StreamsConfig=ERROR
diff --git a/streams/src/test/resources/log4j2.yaml b/streams/src/test/resources/log4j2.yaml
new file mode 100644
index 00000000000..0942036a33c
--- /dev/null
+++ b/streams/src/test/resources/log4j2.yaml
@@ -0,0 +1,65 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+Configuration:
+ Properties:
+ Property:
+ - name: "logPattern"
+ value: "[%d] %p %m (%c:%L)%n"
+
+ Appenders:
+ Console:
+ name: STDOUT
+ PatternLayout:
+ pattern: "${logPattern}"
+
+ Loggers:
+ Root:
+ level: INFO
+ AppenderRef:
+ - ref: STDOUT
+ Logger:
+ - name: kafka
+ level: ERROR
+
+ - name: state.change.logger
+ level: ERROR
+
+ - name: org.apache.kafka
+ level: ERROR
+
+ - name: org.apache.kafka.clients
+ level: ERROR
+
+ - name: org.apache.kafka.clients.consumer
+ level: INFO
+
+ - name: org.apache.kafka.clients.producer
+ level: INFO
+
+ - name: org.apache.kafka.streams
+ level: INFO
+
+ - name: org.apache.kafka.clients.producer.ProducerConfig
+ level: ERROR
+
+ - name: org.apache.kafka.clients.consumer.ConsumerConfig
+ level: ERROR
+
+ - name: org.apache.kafka.clients.admin.AdminClientConfig
+ level: ERROR
+
+ - name: org.apache.kafka.streams.StreamsConfig
+ level: ERROR
diff --git a/streams/streams-scala/src/test/resources/log4j.properties b/streams/streams-scala/src/test/resources/log4j.properties
deleted file mode 100644
index 93ffc165654..00000000000
--- a/streams/streams-scala/src/test/resources/log4j.properties
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright (C) 2018 Lightbend Inc.
-# Copyright (C) 2017-2018 Alexis Seigneurin.
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Set root logger level to DEBUG and its only appender to A1.
-log4j.rootLogger=INFO, R
-
-# A1 is set to be a ConsoleAppender.
-log4j.appender.A1=org.apache.log4j.ConsoleAppender
-
-log4j.appender.R=org.apache.log4j.RollingFileAppender
-log4j.appender.R.File=logs/kafka-streams-scala.log
-
-log4j.appender.R.MaxFileSize=100KB
-# Keep one backup file
-log4j.appender.R.MaxBackupIndex=1
-
-# A1 uses PatternLayout.
-log4j.appender.R.layout=org.apache.log4j.PatternLayout
-log4j.appender.R.layout.ConversionPattern=%-4r [%t] %-5p %c %x - %m%n
diff --git a/streams/streams-scala/src/test/resources/log4j2.yaml b/streams/streams-scala/src/test/resources/log4j2.yaml
new file mode 100644
index 00000000000..6e1d18834be
--- /dev/null
+++ b/streams/streams-scala/src/test/resources/log4j2.yaml
@@ -0,0 +1,40 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+Configuration:
+ Properties:
+ Property:
+ - name: "logPattern"
+ value: "%-4r [%t] %-5p %c %x - %m%n"
+
+ Appenders:
+ Console:
+ name: A1
+ RollingFile:
+ - name: R
+ fileName: logs/kafka-streams-scala.log
+ filePattern: "streams-scala-%d{yyyy-MM-dd}.log"
+ PatternLayout:
+ pattern: "${logPattern}"
+ SizeBasedTriggeringPolicy:
+ size: "100KB"
+ DefaultRolloverStrategy:
+ max: 1
+
+ Loggers:
+ Root:
+ level: INFO
+ AppenderRef:
+ - ref: R
diff --git a/streams/test-utils/src/test/resources/log4j.properties b/streams/test-utils/src/test/resources/log4j.properties
deleted file mode 100644
index be36f90299a..00000000000
--- a/streams/test-utils/src/test/resources/log4j.properties
+++ /dev/null
@@ -1,21 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-log4j.rootLogger=INFO, stdout
-
-log4j.appender.stdout=org.apache.log4j.ConsoleAppender
-log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
-log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c:%L)%n
-
-log4j.logger.org.apache.kafka=INFO
diff --git a/streams/test-utils/src/test/resources/log4j2.yaml b/streams/test-utils/src/test/resources/log4j2.yaml
new file mode 100644
index 00000000000..be546a18b55
--- /dev/null
+++ b/streams/test-utils/src/test/resources/log4j2.yaml
@@ -0,0 +1,35 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+Configuration:
+ Properties:
+ Property:
+ - name: "logPattern"
+ value: "[%d] %p %m (%c:%L)%n"
+
+ Appenders:
+ Console:
+ name: STDOUT
+ PatternLayout:
+ pattern: "${logPattern}"
+
+ Loggers:
+ Root:
+ level: INFO
+ AppenderRef:
+ - ref: STDOUT
+ Logger:
+ - name: org.apache.kafka
+ level: INFO
diff --git a/test-common/src/main/resources/log4j2.yaml b/test-common/src/main/resources/log4j2.yaml
new file mode 100644
index 00000000000..be546a18b55
--- /dev/null
+++ b/test-common/src/main/resources/log4j2.yaml
@@ -0,0 +1,35 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+Configuration:
+ Properties:
+ Property:
+ - name: "logPattern"
+ value: "[%d] %p %m (%c:%L)%n"
+
+ Appenders:
+ Console:
+ name: STDOUT
+ PatternLayout:
+ pattern: "${logPattern}"
+
+ Loggers:
+ Root:
+ level: INFO
+ AppenderRef:
+ - ref: STDOUT
+ Logger:
+ - name: org.apache.kafka
+ level: INFO
diff --git a/tests/kafkatest/services/connect.py b/tests/kafkatest/services/connect.py
index c84a3ec43c3..e09eba30b3e 100644
--- a/tests/kafkatest/services/connect.py
+++ b/tests/kafkatest/services/connect.py
@@ -25,7 +25,7 @@ from ducktape.services.service import Service
from ducktape.utils.util import wait_until
from kafkatest.directory_layout.kafka_path import KafkaPathResolverMixin
-from kafkatest.services.kafka.util import fix_opts_for_new_jvm
+from kafkatest.services.kafka.util import fix_opts_for_new_jvm, get_log4j_config_param, get_log4j_config_for_connect
class ConnectServiceBase(KafkaPathResolverMixin, Service):
@@ -38,7 +38,6 @@ class ConnectServiceBase(KafkaPathResolverMixin, Service):
LOG_FILE = os.path.join(PERSISTENT_ROOT, "connect.log")
STDOUT_FILE = os.path.join(PERSISTENT_ROOT, "connect.stdout")
STDERR_FILE = os.path.join(PERSISTENT_ROOT, "connect.stderr")
- LOG4J_CONFIG_FILE = os.path.join(PERSISTENT_ROOT, "connect-log4j.properties")
PID_FILE = os.path.join(PERSISTENT_ROOT, "connect.pid")
EXTERNAL_CONFIGS_FILE = os.path.join(PERSISTENT_ROOT, "connect-external-configs.properties")
CONNECT_REST_PORT = 8083
@@ -340,7 +339,8 @@ class ConnectStandaloneService(ConnectServiceBase):
return self.nodes[0]
def start_cmd(self, node, connector_configs):
- cmd = "( export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%s\"; " % self.LOG4J_CONFIG_FILE
+ cmd = "( export KAFKA_LOG4J_OPTS=\"%s%s\"; " % \
+ (get_log4j_config_param(node), os.path.join(self.PERSISTENT_ROOT, get_log4j_config_for_connect(node)))
heap_kafka_opts = "-XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=%s" % \
self.logs["connect_heap_dump_file"]["path"]
other_kafka_opts = self.security_config.kafka_opts.strip('\"')
@@ -364,7 +364,8 @@ class ConnectStandaloneService(ConnectServiceBase):
if self.external_config_template_func:
node.account.create_file(self.EXTERNAL_CONFIGS_FILE, self.external_config_template_func(node))
node.account.create_file(self.CONFIG_FILE, self.config_template_func(node))
- node.account.create_file(self.LOG4J_CONFIG_FILE, self.render('connect_log4j.properties', log_file=self.LOG_FILE))
+ node.account.create_file(os.path.join(self.PERSISTENT_ROOT, get_log4j_config_for_connect(node)),
+ self.render(get_log4j_config_for_connect(node), log_file=self.LOG_FILE))
remote_connector_configs = []
for idx, template in enumerate(self.connector_config_templates):
target_file = os.path.join(self.PERSISTENT_ROOT, "connect-connector-" + str(idx) + ".properties")
@@ -400,7 +401,8 @@ class ConnectDistributedService(ConnectServiceBase):
# connector_configs argument is intentionally ignored in distributed service.
def start_cmd(self, node, connector_configs):
- cmd = "( export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%s\"; " % self.LOG4J_CONFIG_FILE
+ cmd = ("( export KAFKA_LOG4J_OPTS=\"%s%s\"; " %
+ (get_log4j_config_param(node), os.path.join(self.PERSISTENT_ROOT, get_log4j_config_for_connect(node))))
heap_kafka_opts = "-XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=%s" % \
self.logs["connect_heap_dump_file"]["path"]
other_kafka_opts = self.security_config.kafka_opts.strip('\"')
@@ -421,7 +423,8 @@ class ConnectDistributedService(ConnectServiceBase):
if self.external_config_template_func:
node.account.create_file(self.EXTERNAL_CONFIGS_FILE, self.external_config_template_func(node))
node.account.create_file(self.CONFIG_FILE, self.config_template_func(node))
- node.account.create_file(self.LOG4J_CONFIG_FILE, self.render('connect_log4j.properties', log_file=self.LOG_FILE))
+ node.account.create_file(os.path.join(self.PERSISTENT_ROOT, get_log4j_config_for_connect(node)),
+ self.render(get_log4j_config_for_connect(node), log_file=self.LOG_FILE))
if self.connector_config_templates:
raise DucktapeError("Config files are not valid in distributed mode, submit connectors via the REST API")
diff --git a/tests/kafkatest/services/console_consumer.py b/tests/kafkatest/services/console_consumer.py
index 3e65efc3483..9755faa1969 100644
--- a/tests/kafkatest/services/console_consumer.py
+++ b/tests/kafkatest/services/console_consumer.py
@@ -21,8 +21,8 @@ from ducktape.utils.util import wait_until
from kafkatest.directory_layout.kafka_path import KafkaPathResolverMixin
from kafkatest.services.monitor.jmx import JmxMixin, JmxTool
-from kafkatest.version import DEV_BRANCH, LATEST_3_7
-from kafkatest.services.kafka.util import fix_opts_for_new_jvm
+from kafkatest.version import DEV_BRANCH, LATEST_3_7, get_version, LATEST_4_0
+from kafkatest.services.kafka.util import fix_opts_for_new_jvm, get_log4j_config_param, get_log4j_config_for_tools
"""
The console consumer is a tool that reads data from Kafka and outputs it to standard output.
@@ -36,7 +36,6 @@ class ConsoleConsumer(KafkaPathResolverMixin, JmxMixin, BackgroundThreadService)
STDERR_CAPTURE = os.path.join(PERSISTENT_ROOT, "console_consumer.stderr")
LOG_DIR = os.path.join(PERSISTENT_ROOT, "logs")
LOG_FILE = os.path.join(LOG_DIR, "console_consumer.log")
- LOG4J_CONFIG = os.path.join(PERSISTENT_ROOT, "tools-log4j.properties")
CONFIG_FILE = os.path.join(PERSISTENT_ROOT, "console_consumer.properties")
JMX_TOOL_LOG = os.path.join(PERSISTENT_ROOT, "jmx_tool.log")
JMX_TOOL_ERROR_LOG = os.path.join(PERSISTENT_ROOT, "jmx_tool.err.log")
@@ -146,7 +145,8 @@ class ConsoleConsumer(KafkaPathResolverMixin, JmxMixin, BackgroundThreadService)
args['stdout'] = ConsoleConsumer.STDOUT_CAPTURE
args['stderr'] = ConsoleConsumer.STDERR_CAPTURE
args['log_dir'] = ConsoleConsumer.LOG_DIR
- args['log4j_config'] = ConsoleConsumer.LOG4J_CONFIG
+ args['log4j_param'] = get_log4j_config_param(node)
+ args['log4j_config'] = get_log4j_config_for_tools(node)
args['config_file'] = ConsoleConsumer.CONFIG_FILE
args['stdout'] = ConsoleConsumer.STDOUT_CAPTURE
args['jmx_port'] = self.jmx_port
@@ -160,7 +160,7 @@ class ConsoleConsumer(KafkaPathResolverMixin, JmxMixin, BackgroundThreadService)
cmd = fix_opts_for_new_jvm(node)
cmd += "export JMX_PORT=%(jmx_port)s; " \
"export LOG_DIR=%(log_dir)s; " \
- "export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%(log4j_config)s\"; " \
+ "export KAFKA_LOG4J_OPTS=\"%(log4j_param)s%(log4j_config)s\"; " \
"export KAFKA_OPTS=%(kafka_opts)s; " \
"%(console_consumer)s " \
"--topic %(topic)s " \
@@ -226,8 +226,8 @@ class ConsoleConsumer(KafkaPathResolverMixin, JmxMixin, BackgroundThreadService)
node.account.create_file(ConsoleConsumer.CONFIG_FILE, prop_file)
# Create and upload log properties
- log_config = self.render('tools_log4j.properties', log_file=ConsoleConsumer.LOG_FILE)
- node.account.create_file(ConsoleConsumer.LOG4J_CONFIG, log_config)
+ log_config = self.render(get_log4j_config_for_tools(node), log_file=ConsoleConsumer.LOG_FILE)
+ node.account.create_file(get_log4j_config_for_tools(node), log_config)
# Run and capture output
cmd = self.start_cmd(node)
diff --git a/tests/kafkatest/services/kafka/kafka.py b/tests/kafkatest/services/kafka/kafka.py
index acfa5c7f6c2..b713aacb04e 100644
--- a/tests/kafkatest/services/kafka/kafka.py
+++ b/tests/kafkatest/services/kafka/kafka.py
@@ -33,7 +33,7 @@ from kafkatest.services.security.listener_security_config import ListenerSecurit
from kafkatest.services.security.security_config import SecurityConfig
from kafkatest.version import DEV_BRANCH
from kafkatest.version import KafkaVersion
-from kafkatest.services.kafka.util import fix_opts_for_new_jvm
+from kafkatest.services.kafka.util import fix_opts_for_new_jvm, get_log4j_config_param, get_log4j_config
class KafkaListener:
@@ -145,7 +145,6 @@ class KafkaService(KafkaPathResolverMixin, JmxMixin, Service):
"""
PERSISTENT_ROOT = "/mnt/kafka"
STDOUT_STDERR_CAPTURE = os.path.join(PERSISTENT_ROOT, "server-start-stdout-stderr.log")
- LOG4J_CONFIG = os.path.join(PERSISTENT_ROOT, "kafka-log4j.properties")
# Logs such as controller.log, server.log, etc all go here
OPERATIONAL_LOG_DIR = os.path.join(PERSISTENT_ROOT, "kafka-operational-logs")
OPERATIONAL_LOG_INFO_DIR = os.path.join(OPERATIONAL_LOG_DIR, "info")
@@ -805,7 +804,7 @@ class KafkaService(KafkaPathResolverMixin, JmxMixin, Service):
kafka_mode = self.context.globals.get("kafka_mode", "")
cmd = f"export KAFKA_MODE={kafka_mode}; "
cmd += "export JMX_PORT=%d; " % self.jmx_port
- cmd += "export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%s\"; " % self.LOG4J_CONFIG
+ cmd += "export KAFKA_LOG4J_OPTS=\"%s%s\"; " % (get_log4j_config_param(node), os.path.join(self.PERSISTENT_ROOT, get_log4j_config(node)))
heap_kafka_opts = "-XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=%s" % \
self.logs["kafka_heap_dump_file"]["path"]
security_kafka_opts = self.security_config.kafka_opts.strip('\"')
@@ -874,7 +873,8 @@ class KafkaService(KafkaPathResolverMixin, JmxMixin, Service):
self.logger.info("kafka.properties:")
self.logger.info(prop_file)
node.account.create_file(KafkaService.CONFIG_FILE, prop_file)
- node.account.create_file(self.LOG4J_CONFIG, self.render('log4j.properties', log_dir=KafkaService.OPERATIONAL_LOG_DIR))
+ node.account.create_file(os.path.join(self.PERSISTENT_ROOT, get_log4j_config(node)),
+ self.render(get_log4j_config(node), log_dir=KafkaService.OPERATIONAL_LOG_DIR))
if self.quorum_info.using_kraft:
# format log directories if necessary
diff --git a/tests/kafkatest/services/kafka/templates/log4j2.yaml b/tests/kafkatest/services/kafka/templates/log4j2.yaml
new file mode 100644
index 00000000000..22e3f118f68
--- /dev/null
+++ b/tests/kafkatest/services/kafka/templates/log4j2.yaml
@@ -0,0 +1,283 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+Configuration:
+ Properties:
+ Property:
+ - name: "log_dir"
+ value: {{ log_dir }}
+ - name: "logPattern"
+ value: "[%d] %p %m (%c)%n"
+
+ Appenders:
+ Console:
+ name: STDOUT
+ PatternLayout:
+ pattern: "${logPattern}"
+
+ RollingFile:
+ - name: KafkaInfoAppender
+ fileName: "${log_dir}/info/server.log"
+ filePattern: "${log_dir}/info/server.log.%d{yyyy-MM-dd-HH}"
+ PatternLayout:
+ pattern: "${logPattern}"
+ TimeBasedTriggeringPolicy:
+ interval: 1
+ Filters:
+ ThresholdFilter:
+ level: INFO
+ onMatch: ACCEPT
+
+ - name: StateChangeInfoAppender
+ fileName: "${log_dir}/info/state-change.log"
+ filePattern: "${log_dir}/info/state-change.log.%d{yyyy-MM-dd-HH}"
+ PatternLayout:
+ pattern: "${logPattern}"
+ TimeBasedTriggeringPolicy:
+ interval: 1
+ Filters:
+ ThresholdFilter:
+ level: INFO
+ onMatch: ACCEPT
+
+ - name: RequestInfoAppender
+ fileName: "${log_dir}/info/kafka-request.log"
+ filePattern: "${log_dir}/info/kafka-request.log.%d{yyyy-MM-dd-HH}"
+ PatternLayout:
+ pattern: "${logPattern}"
+ TimeBasedTriggeringPolicy:
+ interval: 1
+ Filters:
+ ThresholdFilter:
+ level: INFO
+ onMatch: ACCEPT
+
+ - name: CleanerInfoAppender
+ fileName: "${log_dir}/info/log-cleaner.log"
+ filePattern: "${log_dir}/info/log-cleaner.log.%d{yyyy-MM-dd-HH}"
+ PatternLayout:
+ pattern: "${logPattern}"
+ TimeBasedTriggeringPolicy:
+ interval: 1
+ Filters:
+ ThresholdFilter:
+ level: INFO
+ onMatch: ACCEPT
+
+ - name: ControllerInfoAppender
+ fileName: "${log_dir}/info/controller.log"
+ filePattern: "${log_dir}/info/controller.log.%d{yyyy-MM-dd-HH}"
+ PatternLayout:
+ pattern: "${logPattern}"
+ TimeBasedTriggeringPolicy:
+ interval: 1
+ Filters:
+ ThresholdFilter:
+ level: INFO
+ onMatch: ACCEPT
+
+ - name: AuthorizerInfoAppender
+ fileName: "${log_dir}/info/kafka-authorizer.log"
+ filePattern: "${log_dir}/info/kafka-authorizer.log.%d{yyyy-MM-dd-HH}"
+ PatternLayout:
+ pattern: "${logPattern}"
+ TimeBasedTriggeringPolicy:
+ interval: 1
+ Filters:
+ ThresholdFilter:
+ level: INFO
+ onMatch: ACCEPT
+
+ - name: KafkaDebugAppender
+ fileName: "${log_dir}/debug/server.log"
+ filePattern: "${log_dir}/debug/server.log.%d{yyyy-MM-dd-HH}"
+ PatternLayout:
+ pattern: "${logPattern}"
+ TimeBasedTriggeringPolicy:
+ interval: 1
+ Filters:
+ ThresholdFilter:
+ level: DEBUG
+ onMatch: ACCEPT
+
+ - name: StateChangeDebugAppender
+ fileName: "${log_dir}/debug/state-change.log"
+ filePattern: "${log_dir}/debug/state-change.log.%d{yyyy-MM-dd-HH}"
+ PatternLayout:
+ pattern: "${logPattern}"
+ TimeBasedTriggeringPolicy:
+ interval: 1
+ Filters:
+ ThresholdFilter:
+ level: DEBUG
+ onMatch: ACCEPT
+
+ - name: RequestDebugAppender
+ fileName: "${log_dir}/debug/kafka-request.log"
+ filePattern: "${log_dir}/debug/kafka-request.log.%d{yyyy-MM-dd-HH}"
+ PatternLayout:
+ pattern: "${logPattern}"
+ TimeBasedTriggeringPolicy:
+ interval: 1
+ Filters:
+ ThresholdFilter:
+ level: DEBUG
+ onMatch: ACCEPT
+
+ - name: CleanerDebugAppender
+ fileName: "${log_dir}/debug/log-cleaner.log"
+ filePattern: "${log_dir}/debug/log-cleaner.log.%d{yyyy-MM-dd-HH}"
+ PatternLayout:
+ pattern: "${logPattern}"
+ TimeBasedTriggeringPolicy:
+ interval: 1
+ Filters:
+ ThresholdFilter:
+ level: DEBUG
+ onMatch: ACCEPT
+
+ - name: ControllerDebugAppender
+ fileName: "${log_dir}/debug/controller.log"
+ filePattern: "${log_dir}/debug/controller.log.%d{yyyy-MM-dd-HH}"
+ PatternLayout:
+ pattern: "${logPattern}"
+ TimeBasedTriggeringPolicy:
+ interval: 1
+ Filters:
+ ThresholdFilter:
+ level: DEBUG
+ onMatch: ACCEPT
+
+ - name: AuthorizerDebugAppender
+ fileName: "${log_dir}/debug/kafka-authorizer.log"
+ filePattern: "${log_dir}/debug/kafka-authorizer.log.%d{yyyy-MM-dd-HH}"
+ PatternLayout:
+ pattern: "${logPattern}"
+ TimeBasedTriggeringPolicy:
+ interval: 1
+ Filters:
+ ThresholdFilter:
+ level: DEBUG
+ onMatch: ACCEPT
+
+ Loggers:
+ Root:
+ level: {{ log_level|default("DEBUG") }}
+ AppenderRef:
+ - ref: STDOUT
+
+ Logger:
+ - name: kafka.producer.async.DefaultEventHandler
+ level: {{ log_level|default("DEBUG") }}
+ AppenderRef:
+ - ref: KafkaInfoAppender
+ - ref: KafkaDebugAppender
+
+ - name: kafka.client.ClientUtils
+ level: {{ log_level|default("DEBUG") }}
+ AppenderRef:
+ - ref: KafkaInfoAppender
+ - ref: KafkaDebugAppender
+
+ - name: kafka.perf
+ level: {{ log_level|default("DEBUG") }}
+ AppenderRef:
+ - ref: KafkaInfoAppender
+ - ref: KafkaDebugAppender
+
+ - name: kafka.perf.ProducerPerformance$ProducerThread
+ level: {{ log_level|default("DEBUG") }}
+ AppenderRef:
+ - ref: KafkaInfoAppender
+ - ref: KafkaDebugAppender
+
+ - name: kafka
+ level: {{ log_level|default("DEBUG") }}
+ AppenderRef:
+ - ref: KafkaInfoAppender
+ - ref: KafkaDebugAppender
+
+ - name: kafka.network.RequestChannel$
+ level: {{ log_level|default("DEBUG") }}
+ additivity: false
+ AppenderRef:
+ - ref: RequestInfoAppender
+ - ref: RequestDebugAppender
+
+ - name: kafka.network.Processor
+ level: {{ log_level|default("DEBUG") }}
+ AppenderRef:
+ - ref: RequestInfoAppender
+ - ref: RequestDebugAppender
+
+ - name: kafka.server.KafkaApis
+ level: {{ log_level|default("DEBUG") }}
+ additivity: false
+ AppenderRef:
+ - ref: RequestInfoAppender
+ - ref: RequestDebugAppender
+
+ - name: kafka.request.logger
+ level: {{ log_level|default("DEBUG") }}
+ additivity: false
+ AppenderRef:
+ - ref: RequestInfoAppender
+ - ref: RequestDebugAppender
+
+ - name: org.apache.kafka.raft
+ level: {{ log_level|default("DEBUG") }}
+ AppenderRef:
+ - ref: ControllerInfoAppender
+ - ref: ControllerDebugAppender
+
+ - name: org.apache.kafka.controller
+ level: {{ log_level|default("DEBUG") }}
+ AppenderRef:
+ - ref: ControllerInfoAppender
+ - ref: ControllerDebugAppender
+
+ - name: kafka.controller
+ level: {{ log_level|default("DEBUG") }}
+ additivity: false
+ AppenderRef:
+ - ref: ControllerInfoAppender
+ - ref: ControllerDebugAppender
+
+ - name: kafka.log.LogCleaner
+ level: {{ log_level|default("DEBUG") }}
+ additivity: false
+ AppenderRef:
+ - ref: CleanerInfoAppender
+ - ref: CleanerDebugAppender
+
+ - name: state.change.logger
+ level: {{ log_level|default("DEBUG") }}
+ additivity: false
+ AppenderRef:
+ - ref: StateChangeInfoAppender
+ - ref: StateChangeDebugAppender
+
+ - name: kafka.authorizer.logger
+ level: {{ log_level|default("DEBUG") }}
+ additivity: false
+ AppenderRef:
+ - ref: AuthorizerInfoAppender
+ - ref: AuthorizerDebugAppender
+
+ - name: org.apache.kafka.coordinator.group
+ level: {{ log_level|default("DEBUG") }}
+ additivity: false
+ AppenderRef:
+ - ref: KafkaInfoAppender
+ - ref: KafkaDebugAppender
diff --git a/tests/kafkatest/services/kafka/util.py b/tests/kafkatest/services/kafka/util.py
index 0965fd9d4e4..a2e22ac32b7 100644
--- a/tests/kafkatest/services/kafka/util.py
+++ b/tests/kafkatest/services/kafka/util.py
@@ -16,6 +16,7 @@
from collections import namedtuple
from kafkatest.utils.remote_account import java_version
+from kafkatest.version import LATEST_4_0, get_version
TopicPartition = namedtuple('TopicPartition', ['topic', 'partition'])
@@ -30,4 +31,20 @@ def fix_opts_for_new_jvm(node):
return ""
+def get_log4j_config_param(node):
+ return '-Dlog4j2.configurationFile=file:' if get_version(node) >= LATEST_4_0 else '-Dlog4j.configuration=file:'
+def get_log4j_config(node):
+ return 'log4j2.yaml' if get_version(node) >= LATEST_4_0 else 'log4j.properties'
+
+def get_log4j_config_for_connect(node):
+ return 'connect_log4j2.yaml' if get_version(node) >= LATEST_4_0 else 'connect_log4j.properties'
+
+def get_log4j_config_for_tools(node):
+ return 'tools_log4j2.yaml' if get_version(node) >= LATEST_4_0 else 'tools_log4j.properties'
+
+def get_log4j_config_for_trogdor_coordinator(node):
+ return 'trogdor-coordinator-log4j2.yaml' if get_version(node) >= LATEST_4_0 else 'trogdor-coordinator-log4j.properties'
+
+def get_log4j_config_for_trogdor_agent(node):
+ return 'trogdor-agent-log4j2.yaml' if get_version(node) >= LATEST_4_0 else 'trogdor-agent-log4j.properties'
diff --git a/tests/kafkatest/services/performance/consumer_performance.py b/tests/kafkatest/services/performance/consumer_performance.py
index eea91cbfd90..28086e82818 100644
--- a/tests/kafkatest/services/performance/consumer_performance.py
+++ b/tests/kafkatest/services/performance/consumer_performance.py
@@ -16,7 +16,7 @@
import os
-from kafkatest.services.kafka.util import fix_opts_for_new_jvm
+from kafkatest.services.kafka.util import fix_opts_for_new_jvm, get_log4j_config_param, get_log4j_config_for_tools
from kafkatest.services.performance import PerformanceService
from kafkatest.version import V_2_5_0, DEV_BRANCH
@@ -49,7 +49,6 @@ class ConsumerPerformanceService(PerformanceService):
STDOUT_CAPTURE = os.path.join(PERSISTENT_ROOT, "consumer_performance.stdout")
STDERR_CAPTURE = os.path.join(PERSISTENT_ROOT, "consumer_performance.stderr")
LOG_FILE = os.path.join(LOG_DIR, "consumer_performance.log")
- LOG4J_CONFIG = os.path.join(PERSISTENT_ROOT, "tools-log4j.properties")
CONFIG_FILE = os.path.join(PERSISTENT_ROOT, "consumer.properties")
logs = {
@@ -111,7 +110,7 @@ class ConsumerPerformanceService(PerformanceService):
cmd = fix_opts_for_new_jvm(node)
cmd += "export LOG_DIR=%s;" % ConsumerPerformanceService.LOG_DIR
cmd += " export KAFKA_OPTS=%s;" % self.security_config.kafka_opts
- cmd += " export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%s\";" % ConsumerPerformanceService.LOG4J_CONFIG
+ cmd += " export KAFKA_LOG4J_OPTS=\"%s%s\";" % (get_log4j_config_param(node), get_log4j_config_for_tools(node))
cmd += " %s" % self.path.script("kafka-consumer-perf-test.sh", node)
for key, value in self.args(node.version).items():
cmd += " --%s %s" % (key, value)
@@ -128,8 +127,8 @@ class ConsumerPerformanceService(PerformanceService):
def _worker(self, idx, node):
node.account.ssh("mkdir -p %s" % ConsumerPerformanceService.PERSISTENT_ROOT, allow_fail=False)
- log_config = self.render('tools_log4j.properties', log_file=ConsumerPerformanceService.LOG_FILE)
- node.account.create_file(ConsumerPerformanceService.LOG4J_CONFIG, log_config)
+ log_config = self.render(get_log4j_config_for_tools(node), log_file=ConsumerPerformanceService.LOG_FILE)
+ node.account.create_file(get_log4j_config_for_tools(node), log_config)
node.account.create_file(ConsumerPerformanceService.CONFIG_FILE, str(self.security_config))
self.security_config.setup_node(node)
diff --git a/tests/kafkatest/services/performance/end_to_end_latency.py b/tests/kafkatest/services/performance/end_to_end_latency.py
index e7e0100e511..15915557705 100644
--- a/tests/kafkatest/services/performance/end_to_end_latency.py
+++ b/tests/kafkatest/services/performance/end_to_end_latency.py
@@ -15,9 +15,8 @@
import os
-from kafkatest.services.kafka.util import fix_opts_for_new_jvm
+from kafkatest.services.kafka.util import fix_opts_for_new_jvm, get_log4j_config_param, get_log4j_config_for_tools
from kafkatest.services.performance import PerformanceService
-from kafkatest.services.security.security_config import SecurityConfig
from kafkatest.version import get_version, V_3_4_0, DEV_BRANCH
@@ -31,7 +30,6 @@ class EndToEndLatencyService(PerformanceService):
STDOUT_CAPTURE = os.path.join(PERSISTENT_ROOT, "end_to_end_latency.stdout")
STDERR_CAPTURE = os.path.join(PERSISTENT_ROOT, "end_to_end_latency.stderr")
LOG_FILE = os.path.join(LOG_DIR, "end_to_end_latency.log")
- LOG4J_CONFIG = os.path.join(PERSISTENT_ROOT, "tools-log4j.properties")
CONFIG_FILE = os.path.join(PERSISTENT_ROOT, "client.properties")
logs = {
@@ -76,7 +74,7 @@ class EndToEndLatencyService(PerformanceService):
})
cmd = fix_opts_for_new_jvm(node)
- cmd += "export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%s\"; " % EndToEndLatencyService.LOG4J_CONFIG
+ cmd += "export KAFKA_LOG4J_OPTS=\"%s%s\"; " % (get_log4j_config_param(node), get_log4j_config_for_tools(node))
cmd += "KAFKA_OPTS=%(kafka_opts)s %(kafka_run_class)s %(java_class_name)s " % args
cmd += "%(bootstrap_servers)s %(topic)s %(num_records)d %(acks)d %(message_bytes)d %(config_file)s" % args
@@ -88,9 +86,9 @@ class EndToEndLatencyService(PerformanceService):
def _worker(self, idx, node):
node.account.ssh("mkdir -p %s" % EndToEndLatencyService.PERSISTENT_ROOT, allow_fail=False)
- log_config = self.render('tools_log4j.properties', log_file=EndToEndLatencyService.LOG_FILE)
+ log_config = self.render(get_log4j_config_for_tools(node), log_file=EndToEndLatencyService.LOG_FILE)
- node.account.create_file(EndToEndLatencyService.LOG4J_CONFIG, log_config)
+ node.account.create_file(get_log4j_config_for_tools(node), log_config)
client_config = str(self.security_config)
client_config += "compression_type=%(compression_type)s" % self.args
node.account.create_file(EndToEndLatencyService.CONFIG_FILE, client_config)
diff --git a/tests/kafkatest/services/performance/producer_performance.py b/tests/kafkatest/services/performance/producer_performance.py
index acb0aec8650..acfe4790d73 100644
--- a/tests/kafkatest/services/performance/producer_performance.py
+++ b/tests/kafkatest/services/performance/producer_performance.py
@@ -19,7 +19,7 @@ from ducktape.utils.util import wait_until
from ducktape.cluster.remoteaccount import RemoteCommandError
from kafkatest.directory_layout.kafka_path import TOOLS_JAR_NAME, TOOLS_DEPENDANT_TEST_LIBS_JAR_NAME
-from kafkatest.services.kafka.util import fix_opts_for_new_jvm
+from kafkatest.services.kafka.util import fix_opts_for_new_jvm, get_log4j_config_param, get_log4j_config_for_tools
from kafkatest.services.monitor.http import HttpMetricsCollector
from kafkatest.services.performance import PerformanceService
from kafkatest.services.security.security_config import SecurityConfig
@@ -33,7 +33,6 @@ class ProducerPerformanceService(HttpMetricsCollector, PerformanceService):
STDERR_CAPTURE = os.path.join(PERSISTENT_ROOT, "producer_performance.stderr")
LOG_DIR = os.path.join(PERSISTENT_ROOT, "logs")
LOG_FILE = os.path.join(LOG_DIR, "producer_performance.log")
- LOG4J_CONFIG = os.path.join(PERSISTENT_ROOT, "tools-log4j.properties")
def __init__(self, context, num_nodes, kafka, topic, num_records, record_size, throughput, version=DEV_BRANCH, settings=None,
intermediate_stats=False, client_id="producer-performance"):
@@ -90,7 +89,7 @@ class ProducerPerformanceService(HttpMetricsCollector, PerformanceService):
cmd += "for file in %s; do CLASSPATH=$CLASSPATH:$file; done; " % jar
cmd += "export CLASSPATH; "
- cmd += " export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%s\"; " % ProducerPerformanceService.LOG4J_CONFIG
+ cmd += " export KAFKA_LOG4J_OPTS=\"%s%s\"; " % (get_log4j_config_param(node), get_log4j_config_for_tools(node))
cmd += "KAFKA_OPTS=%(kafka_opts)s KAFKA_HEAP_OPTS=\"-XX:+HeapDumpOnOutOfMemoryError\" %(kafka_run_class)s org.apache.kafka.tools.ProducerPerformance " \
"--topic %(topic)s --num-records %(num_records)d --record-size %(record_size)d --throughput %(throughput)d --producer-props bootstrap.servers=%(bootstrap_servers)s client.id=%(client_id)s %(metrics_props)s" % args
@@ -119,8 +118,8 @@ class ProducerPerformanceService(HttpMetricsCollector, PerformanceService):
node.account.ssh("mkdir -p %s" % ProducerPerformanceService.PERSISTENT_ROOT, allow_fail=False)
# Create and upload log properties
- log_config = self.render('tools_log4j.properties', log_file=ProducerPerformanceService.LOG_FILE)
- node.account.create_file(ProducerPerformanceService.LOG4J_CONFIG, log_config)
+ log_config = self.render(get_log4j_config_for_tools(node), log_file=ProducerPerformanceService.LOG_FILE)
+ node.account.create_file(get_log4j_config_for_tools(node), log_config)
cmd = self.start_cmd(node)
self.logger.debug("Producer performance %d command: %s", idx, cmd)
diff --git a/core/src/test/resources/log4j.properties b/tests/kafkatest/services/performance/templates/tools_log4j2.yaml
similarity index 60%
rename from core/src/test/resources/log4j.properties
rename to tests/kafkatest/services/performance/templates/tools_log4j2.yaml
index 833d63e1e8e..5c5e1099f94 100644
--- a/core/src/test/resources/log4j.properties
+++ b/tests/kafkatest/services/performance/templates/tools_log4j2.yaml
@@ -1,9 +1,9 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
+# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
+# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
@@ -12,11 +12,19 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-log4j.rootLogger=OFF, stdout
-log4j.appender.stdout=org.apache.log4j.ConsoleAppender
-log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
-log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c:%L)%n
+Configuration:
+ Appenders:
+ File:
+ name: FILE
+ fileName: {{ log_file }}
+ append: true
+ immediateFlush: true
+ PatternLayout:
+ pattern: "[%d] %p %m (%c)%n"
-log4j.logger.kafka=WARN
-log4j.logger.org.apache.kafka=WARN
+ Loggers:
+ Root:
+ level: {{ log_level|default("INFO") }}
+ AppenderRef:
+ - ref: FILE
diff --git a/tests/kafkatest/services/streams.py b/tests/kafkatest/services/streams.py
index 3848fea686d..df8a0b39230 100644
--- a/tests/kafkatest/services/streams.py
+++ b/tests/kafkatest/services/streams.py
@@ -22,6 +22,7 @@ from ducktape.utils.util import wait_until
from kafkatest.directory_layout.kafka_path import KafkaPathResolverMixin
from kafkatest.services.kafka import KafkaConfig
from kafkatest.services.monitor.jmx import JmxMixin
+from .kafka.util import get_log4j_config_param, get_log4j_config_for_tools
STATE_DIR = "state.dir"
@@ -37,7 +38,6 @@ class StreamsTestBaseService(KafkaPathResolverMixin, JmxMixin, Service):
STDERR_FILE = os.path.join(PERSISTENT_ROOT, "streams.stderr")
JMX_LOG_FILE = os.path.join(PERSISTENT_ROOT, "jmx_tool.log")
JMX_ERR_FILE = os.path.join(PERSISTENT_ROOT, "jmx_tool.err.log")
- LOG4J_CONFIG_FILE = os.path.join(PERSISTENT_ROOT, "tools-log4j.properties")
PID_FILE = os.path.join(PERSISTENT_ROOT, "streams.pid")
CLEAN_NODE_ENABLED = True
@@ -285,10 +285,11 @@ class StreamsTestBaseService(KafkaPathResolverMixin, JmxMixin, Service):
args['stdout'] = self.STDOUT_FILE
args['stderr'] = self.STDERR_FILE
args['pidfile'] = self.PID_FILE
- args['log4j'] = self.LOG4J_CONFIG_FILE
+ args['log4j_param'] = get_log4j_config_param(node)
+ args['log4j'] = get_log4j_config_for_tools(node)
args['kafka_run_class'] = self.path.script("kafka-run-class.sh", node)
- cmd = "( export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%(log4j)s\"; " \
+ cmd = "( export KAFKA_LOG4J_OPTS=\"%(log4j_param)s%(log4j)s\"; " \
"INCLUDE_TEST_JARS=true %(kafka_run_class)s %(streams_class_name)s " \
" %(config_file)s %(user_test_args1)s %(user_test_args2)s %(user_test_args3)s" \
" %(user_test_args4)s & echo $! >&3 ) 1>> %(stdout)s 2>> %(stderr)s 3> %(pidfile)s" % args
@@ -305,7 +306,7 @@ class StreamsTestBaseService(KafkaPathResolverMixin, JmxMixin, Service):
node.account.mkdirs(self.PERSISTENT_ROOT)
prop_file = self.prop_file()
node.account.create_file(self.CONFIG_FILE, prop_file)
- node.account.create_file(self.LOG4J_CONFIG_FILE, self.render('tools_log4j.properties', log_file=self.LOG_FILE))
+ node.account.create_file(get_log4j_config_for_tools(node), self.render(get_log4j_config_for_tools(node), log_file=self.LOG_FILE))
self.logger.info("Starting StreamsTest process on " + str(node.account))
with node.account.monitor_log(self.STDOUT_FILE) as monitor:
@@ -363,11 +364,12 @@ class StreamsSmokeTestBaseService(StreamsTestBaseService):
args['stdout'] = self.STDOUT_FILE
args['stderr'] = self.STDERR_FILE
args['pidfile'] = self.PID_FILE
- args['log4j'] = self.LOG4J_CONFIG_FILE
+ args['log4j_param'] = get_log4j_config_param(node)
+ args['log4j'] = get_log4j_config_for_tools(node)
args['version'] = self.KAFKA_STREAMS_VERSION
args['kafka_run_class'] = self.path.script("kafka-run-class.sh", node)
- cmd = "( export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%(log4j)s\";" \
+ cmd = "( export KAFKA_LOG4J_OPTS=\"%(log4j_param)s%(log4j)s\";" \
" INCLUDE_TEST_JARS=true UPGRADE_KAFKA_STREAMS_TEST_VERSION=%(version)s" \
" %(kafka_run_class)s %(streams_class_name)s" \
" %(config_file)s %(user_test_args1)s" \
@@ -419,11 +421,12 @@ class StreamsSmokeTestDriverService(StreamsSmokeTestBaseService):
args['stdout'] = self.STDOUT_FILE
args['stderr'] = self.STDERR_FILE
args['pidfile'] = self.PID_FILE
- args['log4j'] = self.LOG4J_CONFIG_FILE
+ args['log4j_param'] = get_log4j_config_param(node)
+ args['log4j'] = get_log4j_config_for_tools(node)
args['disable_auto_terminate'] = self.DISABLE_AUTO_TERMINATE
args['kafka_run_class'] = self.path.script("kafka-run-class.sh", node)
- cmd = "( export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%(log4j)s\"; " \
+ cmd = "( export KAFKA_LOG4J_OPTS=\"%(log4j_param)s%(log4j)s\"; " \
"INCLUDE_TEST_JARS=true %(kafka_run_class)s %(streams_class_name)s " \
" %(config_file)s %(user_test_args1)s %(disable_auto_terminate)s" \
" & echo $! >&3 ) 1>> %(stdout)s 2>> %(stderr)s 3> %(pidfile)s" % args
@@ -496,10 +499,11 @@ class StreamsBrokerDownResilienceService(StreamsTestBaseService):
args['stdout'] = self.STDOUT_FILE
args['stderr'] = self.STDERR_FILE
args['pidfile'] = self.PID_FILE
- args['log4j'] = self.LOG4J_CONFIG_FILE
+ args['log4j_param'] = get_log4j_config_param(node)
+ args['log4j'] = get_log4j_config_for_tools(node)
args['kafka_run_class'] = self.path.script("kafka-run-class.sh", node)
- cmd = "( export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%(log4j)s\"; " \
+ cmd = "( export KAFKA_LOG4J_OPTS=\"%(log4j_param)s%(log4j)s\"; " \
"INCLUDE_TEST_JARS=true %(kafka_run_class)s %(streams_class_name)s " \
" %(config_file)s %(user_test_args1)s %(user_test_args2)s %(user_test_args3)s" \
" %(user_test_args4)s & echo $! >&3 ) 1>> %(stdout)s 2>> %(stderr)s 3> %(pidfile)s" % args
@@ -535,12 +539,13 @@ class StreamsResetter(StreamsTestBaseService):
args['stdout'] = self.STDOUT_FILE
args['stderr'] = self.STDERR_FILE
args['pidfile'] = self.PID_FILE
- args['log4j'] = self.LOG4J_CONFIG_FILE
+ args['log4j_param'] = get_log4j_config_param(node)
+ args['log4j'] = get_log4j_config_for_tools(node)
args['application.id'] = self.applicationId
args['input.topics'] = self.topic
args['kafka_run_class'] = self.path.script("kafka-run-class.sh", node)
- cmd = "(export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%(log4j)s\"; " \
+ cmd = "(export KAFKA_LOG4J_OPTS=\"%(log4j_param)s%(log4j)s\";" \
"%(kafka_run_class)s %(streams_class_name)s " \
"--bootstrap-server %(bootstrap.servers)s " \
"--force " \
@@ -630,11 +635,12 @@ class StreamsUpgradeTestJobRunnerService(StreamsTestBaseService):
args['stdout'] = self.STDOUT_FILE
args['stderr'] = self.STDERR_FILE
args['pidfile'] = self.PID_FILE
- args['log4j'] = self.LOG4J_CONFIG_FILE
+ args['log4j_param'] = get_log4j_config_param(node)
+ args['log4j'] = get_log4j_config_for_tools(node)
args['version'] = self.KAFKA_STREAMS_VERSION
args['kafka_run_class'] = self.path.script("kafka-run-class.sh", node)
- cmd = "( export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%(log4j)s\"; " \
+ cmd = "( export KAFKA_LOG4J_OPTS=\"%(log4j_param)s%(log4j)s\"; " \
"INCLUDE_TEST_JARS=true UPGRADE_KAFKA_STREAMS_TEST_VERSION=%(version)s " \
" %(kafka_run_class)s %(streams_class_name)s %(config_file)s " \
" & echo $! >&3 ) 1>> %(stdout)s 2>> %(stderr)s 3> %(pidfile)s" % args
@@ -730,11 +736,12 @@ class CooperativeRebalanceUpgradeService(StreamsTestBaseService):
args['stdout'] = self.STDOUT_FILE
args['stderr'] = self.STDERR_FILE
args['pidfile'] = self.PID_FILE
- args['log4j'] = self.LOG4J_CONFIG_FILE
+ args['log4j_param'] = get_log4j_config_param(node)
+ args['log4j'] = get_log4j_config_for_tools(node)
args['version'] = self.KAFKA_STREAMS_VERSION
args['kafka_run_class'] = self.path.script("kafka-run-class.sh", node)
- cmd = "( export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%(log4j)s\"; " \
+ cmd = "( export KAFKA_LOG4J_OPTS=\"%(log4j_param)s%(log4j)s\"; " \
"INCLUDE_TEST_JARS=true UPGRADE_KAFKA_STREAMS_TEST_VERSION=%(version)s " \
" %(kafka_run_class)s %(streams_class_name)s %(config_file)s " \
" & echo $! >&3 ) 1>> %(stdout)s 2>> %(stderr)s 3> %(pidfile)s" % args
diff --git a/tests/kafkatest/services/templates/connect_log4j2.yaml b/tests/kafkatest/services/templates/connect_log4j2.yaml
new file mode 100644
index 00000000000..71f9f0f39bd
--- /dev/null
+++ b/tests/kafkatest/services/templates/connect_log4j2.yaml
@@ -0,0 +1,35 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+Configuration:
+ Properties:
+ Property:
+ - name: "logPattern"
+ value: "[%d] %p %m (%c)%n"
+
+ Appenders:
+ File:
+ - name: FILE
+ fileName: {{ log_file }}
+ append: true
+ immediateFlush: true
+ PatternLayout:
+ pattern: "${logPattern}"
+
+ Loggers:
+ Root:
+ level: {{ log_level|default("INFO") }}
+ AppenderRef:
+ - ref: FILE
diff --git a/tests/kafkatest/services/templates/tools_log4j2.yaml b/tests/kafkatest/services/templates/tools_log4j2.yaml
new file mode 100644
index 00000000000..2f41025d485
--- /dev/null
+++ b/tests/kafkatest/services/templates/tools_log4j2.yaml
@@ -0,0 +1,39 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+Configuration:
+ Appenders:
+ File:
+ name: FILE
+ fileName: {{ log_file }}
+ append: true
+ immediateFlush: true
+ PatternLayout:
+ pattern: "[%d] %p %m (%c)%n"
+
+ Loggers:
+ Root:
+ level: {{ log_level|default("INFO") }}
+ AppenderRef:
+ - ref: FILE
+
+ {% if loggers is defined %}
+ Logger:
+ # Add additional loggers dynamically if defined
+ {% for logger, log_level in loggers.items() %}
+ - name: {{ logger }}
+ level: {{ log_level }}
+ {% endfor %}
+ {% endif %}
\ No newline at end of file
diff --git a/tests/kafkatest/services/transactional_message_copier.py b/tests/kafkatest/services/transactional_message_copier.py
index 564a23fdcc3..d1f918cd8e1 100644
--- a/tests/kafkatest/services/transactional_message_copier.py
+++ b/tests/kafkatest/services/transactional_message_copier.py
@@ -22,6 +22,9 @@ from ducktape.services.background_thread import BackgroundThreadService
from kafkatest.directory_layout.kafka_path import KafkaPathResolverMixin
from ducktape.cluster.remoteaccount import RemoteCommandError
+from kafkatest.services.kafka.util import get_log4j_config_param, get_log4j_config_for_tools
+
+
class TransactionalMessageCopier(KafkaPathResolverMixin, BackgroundThreadService):
"""This service wraps org.apache.kafka.tools.TransactionalMessageCopier for
use in system testing.
@@ -31,7 +34,6 @@ class TransactionalMessageCopier(KafkaPathResolverMixin, BackgroundThreadService
STDERR_CAPTURE = os.path.join(PERSISTENT_ROOT, "transactional_message_copier.stderr")
LOG_DIR = os.path.join(PERSISTENT_ROOT, "logs")
LOG_FILE = os.path.join(LOG_DIR, "transactional_message_copier.log")
- LOG4J_CONFIG = os.path.join(PERSISTENT_ROOT, "tools-log4j.properties")
logs = {
"transactional_message_copier_stdout": {
@@ -75,9 +77,9 @@ class TransactionalMessageCopier(KafkaPathResolverMixin, BackgroundThreadService
node.account.ssh("mkdir -p %s" % TransactionalMessageCopier.PERSISTENT_ROOT,
allow_fail=False)
# Create and upload log properties
- log_config = self.render('tools_log4j.properties',
+ log_config = self.render(get_log4j_config_for_tools(node),
log_file=TransactionalMessageCopier.LOG_FILE)
- node.account.create_file(TransactionalMessageCopier.LOG4J_CONFIG, log_config)
+ node.account.create_file(get_log4j_config_for_tools(node).LOG4J_CONFIG, log_config)
# Configure security
self.security_config = self.kafka.security_config.client_config(node=node)
self.security_config.setup_node(node)
@@ -114,7 +116,7 @@ class TransactionalMessageCopier(KafkaPathResolverMixin, BackgroundThreadService
def start_cmd(self, node, idx):
cmd = "export LOG_DIR=%s;" % TransactionalMessageCopier.LOG_DIR
cmd += " export KAFKA_OPTS=%s;" % self.security_config.kafka_opts
- cmd += " export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%s\"; " % TransactionalMessageCopier.LOG4J_CONFIG
+ cmd += " export KAFKA_LOG4J_OPTS=\"%s%s\"; " % (get_log4j_config_param(node), get_log4j_config_for_tools(node))
cmd += self.path.script("kafka-run-class.sh", node) + " org.apache.kafka.tools." + "TransactionalMessageCopier"
cmd += " --broker-list %s" % self.kafka.bootstrap_servers(self.security_config.security_protocol)
cmd += " --transactional-id %s" % self.transactional_id
diff --git a/tests/kafkatest/services/trogdor/templates/log4j2.yaml b/tests/kafkatest/services/trogdor/templates/log4j2.yaml
new file mode 100644
index 00000000000..42c1aa281e7
--- /dev/null
+++ b/tests/kafkatest/services/trogdor/templates/log4j2.yaml
@@ -0,0 +1,42 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+Configuration:
+ Properties:
+ Property:
+ - name: "logPattern"
+ value: "[%d] %p %m (%c)%n"
+
+ Appenders:
+ File:
+ - name: MyFileLogger
+ fileName: {{ log_path }}
+ PatternLayout:
+ pattern: "${logPattern}"
+ Loggers:
+ Root:
+ level: DEBUG
+ AppenderRef:
+ - ref: MyFileLogger
+
+ Logger:
+ - name: kafka
+ level: DEBUG
+
+ - name: org.apache.kafka
+ level: DEBUG
+
+ - name: org.eclipse
+ level: INFO
diff --git a/tests/kafkatest/services/trogdor/trogdor.py b/tests/kafkatest/services/trogdor/trogdor.py
index 3b941fe9059..618c68d7851 100644
--- a/tests/kafkatest/services/trogdor/trogdor.py
+++ b/tests/kafkatest/services/trogdor/trogdor.py
@@ -22,6 +22,8 @@ from requests.packages.urllib3 import Retry
from ducktape.services.service import Service
from ducktape.utils.util import wait_until
from kafkatest.directory_layout.kafka_path import KafkaPathResolverMixin
+from kafkatest.services.kafka.util import get_log4j_config_param, get_log4j_config, \
+ get_log4j_config_for_trogdor_coordinator, get_log4j_config_for_trogdor_agent
class TrogdorService(KafkaPathResolverMixin, Service):
@@ -48,8 +50,6 @@ class TrogdorService(KafkaPathResolverMixin, Service):
AGENT_STDOUT_STDERR = os.path.join(PERSISTENT_ROOT, "trogdor-agent-stdout-stderr.log")
COORDINATOR_LOG = os.path.join(PERSISTENT_ROOT, "trogdor-coordinator.log")
AGENT_LOG = os.path.join(PERSISTENT_ROOT, "trogdor-agent.log")
- COORDINATOR_LOG4J_PROPERTIES = os.path.join(PERSISTENT_ROOT, "trogdor-coordinator-log4j.properties")
- AGENT_LOG4J_PROPERTIES = os.path.join(PERSISTENT_ROOT, "trogdor-agent-log4j.properties")
CONFIG_PATH = os.path.join(PERSISTENT_ROOT, "trogdor.conf")
DEFAULT_AGENT_PORT=8888
DEFAULT_COORDINATOR_PORT=8889
@@ -141,26 +141,26 @@ class TrogdorService(KafkaPathResolverMixin, Service):
self._start_agent_node(node)
def _start_coordinator_node(self, node):
- node.account.create_file(TrogdorService.COORDINATOR_LOG4J_PROPERTIES,
- self.render('log4j.properties',
+ node.account.create_file(get_log4j_config_for_trogdor_coordinator(node),
+ self.render(get_log4j_config(node),
log_path=TrogdorService.COORDINATOR_LOG))
self._start_trogdor_daemon("coordinator", TrogdorService.COORDINATOR_STDOUT_STDERR,
- TrogdorService.COORDINATOR_LOG4J_PROPERTIES,
+ get_log4j_config_for_trogdor_coordinator(node),
TrogdorService.COORDINATOR_LOG, node)
self.logger.info("Started trogdor coordinator on %s." % node.name)
def _start_agent_node(self, node):
- node.account.create_file(TrogdorService.AGENT_LOG4J_PROPERTIES,
- self.render('log4j.properties',
+ node.account.create_file(get_log4j_config_for_trogdor_agent(node),
+ self.render(get_log4j_config(node),
log_path=TrogdorService.AGENT_LOG))
self._start_trogdor_daemon("agent", TrogdorService.AGENT_STDOUT_STDERR,
- TrogdorService.AGENT_LOG4J_PROPERTIES,
+ get_log4j_config_for_trogdor_agent(node),
TrogdorService.AGENT_LOG, node)
self.logger.info("Started trogdor agent on %s." % node.name)
def _start_trogdor_daemon(self, daemon_name, stdout_stderr_capture_path,
log4j_properties_path, log_path, node):
- cmd = "export KAFKA_LOG4J_OPTS='-Dlog4j.configuration=file:%s'; " % log4j_properties_path
+ cmd = "export KAFKA_LOG4J_OPTS='%s%s'; " % (get_log4j_config_param(node), log4j_properties_path)
cmd += "%s %s --%s.config %s --node-name %s 1>> %s 2>> %s &" % \
(self.path.script("trogdor.sh", node),
daemon_name,
diff --git a/tests/kafkatest/services/verifiable_consumer.py b/tests/kafkatest/services/verifiable_consumer.py
index 4b93a785bfd..8264566f1c2 100644
--- a/tests/kafkatest/services/verifiable_consumer.py
+++ b/tests/kafkatest/services/verifiable_consumer.py
@@ -20,6 +20,7 @@ from ducktape.services.background_thread import BackgroundThreadService
from kafkatest.directory_layout.kafka_path import KafkaPathResolverMixin
from kafkatest.services.kafka import TopicPartition, consumer_group
+from kafkatest.services.kafka.util import get_log4j_config_param, get_log4j_config_for_tools
from kafkatest.services.verifiable_client import VerifiableClientMixin
from kafkatest.version import DEV_BRANCH, V_2_3_0, V_2_3_1, V_3_7_0, V_4_0_0
@@ -215,7 +216,6 @@ class VerifiableConsumer(KafkaPathResolverMixin, VerifiableClientMixin, Backgrou
STDERR_CAPTURE = os.path.join(PERSISTENT_ROOT, "verifiable_consumer.stderr")
LOG_DIR = os.path.join(PERSISTENT_ROOT, "logs")
LOG_FILE = os.path.join(LOG_DIR, "verifiable_consumer.log")
- LOG4J_CONFIG = os.path.join(PERSISTENT_ROOT, "tools-log4j.properties")
CONFIG_FILE = os.path.join(PERSISTENT_ROOT, "verifiable_consumer.properties")
logs = {
@@ -296,8 +296,8 @@ class VerifiableConsumer(KafkaPathResolverMixin, VerifiableClientMixin, Backgrou
node.account.ssh("mkdir -p %s" % VerifiableConsumer.PERSISTENT_ROOT, allow_fail=False)
# Create and upload log properties
- log_config = self.render('tools_log4j.properties', log_file=VerifiableConsumer.LOG_FILE)
- node.account.create_file(VerifiableConsumer.LOG4J_CONFIG, log_config)
+ log_config = self.render(get_log4j_config_for_tools(node), log_file=VerifiableConsumer.LOG_FILE)
+ node.account.create_file(get_log4j_config_for_tools(node), log_config)
# Create and upload config file
self.security_config = self.kafka.security_config.client_config(self.prop_file, node,
@@ -380,7 +380,7 @@ class VerifiableConsumer(KafkaPathResolverMixin, VerifiableClientMixin, Backgrou
cmd = ""
cmd += "export LOG_DIR=%s;" % VerifiableConsumer.LOG_DIR
cmd += " export KAFKA_OPTS=%s;" % self.security_config.kafka_opts
- cmd += " export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%s\"; " % VerifiableConsumer.LOG4J_CONFIG
+ cmd += " export KAFKA_LOG4J_OPTS=\"%s%s\"; " % (get_log4j_config_param(node), get_log4j_config_for_tools(node))
cmd += self.impl.exec_cmd(node)
if self.on_record_consumed:
cmd += " --verbose"
diff --git a/tests/kafkatest/services/verifiable_producer.py b/tests/kafkatest/services/verifiable_producer.py
index ea6292d5772..6f473d8bb13 100644
--- a/tests/kafkatest/services/verifiable_producer.py
+++ b/tests/kafkatest/services/verifiable_producer.py
@@ -24,7 +24,7 @@ from kafkatest.services.kafka import TopicPartition
from kafkatest.services.verifiable_client import VerifiableClientMixin
from kafkatest.utils import is_int, is_int_with_prefix
from kafkatest.version import get_version, V_2_5_0, DEV_BRANCH
-from kafkatest.services.kafka.util import fix_opts_for_new_jvm
+from kafkatest.services.kafka.util import fix_opts_for_new_jvm, get_log4j_config_param, get_log4j_config_for_tools
class VerifiableProducer(KafkaPathResolverMixin, VerifiableClientMixin, BackgroundThreadService):
@@ -41,7 +41,6 @@ class VerifiableProducer(KafkaPathResolverMixin, VerifiableClientMixin, Backgrou
STDERR_CAPTURE = os.path.join(PERSISTENT_ROOT, "verifiable_producer.stderr")
LOG_DIR = os.path.join(PERSISTENT_ROOT, "logs")
LOG_FILE = os.path.join(LOG_DIR, "verifiable_producer.log")
- LOG4J_CONFIG = os.path.join(PERSISTENT_ROOT, "tools-log4j.properties")
CONFIG_FILE = os.path.join(PERSISTENT_ROOT, "verifiable_producer.properties")
logs = {
@@ -127,8 +126,8 @@ class VerifiableProducer(KafkaPathResolverMixin, VerifiableClientMixin, Backgrou
node.account.ssh("mkdir -p %s" % VerifiableProducer.PERSISTENT_ROOT, allow_fail=False)
# Create and upload log properties
- log_config = self.render('tools_log4j.properties', log_file=VerifiableProducer.LOG_FILE)
- node.account.create_file(VerifiableProducer.LOG4J_CONFIG, log_config)
+ log_config = self.render(get_log4j_config_for_tools(node), log_file=VerifiableProducer.LOG_FILE)
+ node.account.create_file(get_log4j_config_for_tools(node), log_config)
# Configure security
self.security_config = self.kafka.security_config.client_config(node=node,
@@ -222,7 +221,7 @@ class VerifiableProducer(KafkaPathResolverMixin, VerifiableClientMixin, Backgrou
cmd += " export KAFKA_OPTS=%s;" % self.security_config.kafka_opts
cmd += fix_opts_for_new_jvm(node)
- cmd += " export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%s\"; " % VerifiableProducer.LOG4J_CONFIG
+ cmd += " export KAFKA_LOG4J_OPTS=\"%s%s\"; " % (get_log4j_config_param(node), get_log4j_config_for_tools(node))
cmd += self.impl.exec_cmd(node)
version = get_version(node)
if version >= V_2_5_0:
diff --git a/tests/kafkatest/tests/streams/streams_relational_smoke_test.py b/tests/kafkatest/tests/streams/streams_relational_smoke_test.py
index 55737a185c2..a879d95c418 100644
--- a/tests/kafkatest/tests/streams/streams_relational_smoke_test.py
+++ b/tests/kafkatest/tests/streams/streams_relational_smoke_test.py
@@ -18,8 +18,10 @@ from ducktape.mark import matrix
from ducktape.mark.resource import cluster
from ducktape.utils.util import wait_until
from kafkatest.services.kafka import quorum
+from kafkatest.services.kafka.util import get_log4j_config_param, get_log4j_config_for_tools
from kafkatest.services.streams import StreamsTestBaseService
from kafkatest.tests.kafka_test import KafkaTest
+from kafkatest.version import LATEST_4_0
class StreamsRelationalSmokeTestService(StreamsTestBaseService):
@@ -33,14 +35,15 @@ class StreamsRelationalSmokeTestService(StreamsTestBaseService):
self.mode = mode
self.nodeId = nodeId
self.processing_guarantee = processing_guarantee
- self.log4j_template = 'log4j_template.properties'
+ self.log4j_template = "log4j2_template.yaml" if (self.node.version >= LATEST_4_0) else "log4j_template.properties"
def start_cmd(self, node):
- return "( export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%(log4j)s\"; " \
+ return "( export KAFKA_LOG4J_OPTS=\"%(log4j_param)s%(log4j)s\"; " \
"INCLUDE_TEST_JARS=true %(kafka_run_class)s org.apache.kafka.streams.tests.RelationalSmokeTest " \
" %(mode)s %(kafka)s %(nodeId)s %(processing_guarantee)s %(state_dir)s" \
" & echo $! >&3 ) 1>> %(stdout)s 2>> %(stderr)s 3> %(pidfile)s" % {
- "log4j": self.LOG4J_CONFIG_FILE,
+ "log4j_param": get_log4j_config_param(node),
+ "log4j": get_log4j_config_for_tools(node),
"kafka_run_class": self.path.script("kafka-run-class.sh", node),
"mode": self.mode,
"kafka": self.kafka.bootstrap_servers(),
@@ -54,8 +57,9 @@ class StreamsRelationalSmokeTestService(StreamsTestBaseService):
def start_node(self, node):
node.account.mkdirs(self.PERSISTENT_ROOT)
- node.account.create_file(self.LOG4J_CONFIG_FILE,
- self.render("log4j_template.properties", log_file=self.LOG_FILE))
+ node.account.create_file(get_log4j_config_for_tools(node),
+ self.render("log4j2_template.yaml" if node.version >= LATEST_4_0 else "log4j_template.properties",
+ log_file=self.LOG_FILE))
self.logger.info("Starting process on " + str(node.account))
node.account.ssh(self.start_cmd(node))
diff --git a/clients/src/test/resources/log4j.properties b/tests/kafkatest/tests/streams/templates/log4j2_template.yaml
similarity index 58%
rename from clients/src/test/resources/log4j.properties
rename to tests/kafkatest/tests/streams/templates/log4j2_template.yaml
index 0992580eca1..f94e7d437a9 100644
--- a/clients/src/test/resources/log4j.properties
+++ b/tests/kafkatest/tests/streams/templates/log4j2_template.yaml
@@ -12,12 +12,28 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-log4j.rootLogger=OFF, stdout
-log4j.appender.stdout=org.apache.log4j.ConsoleAppender
-log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
-log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c:%L)%n
+# Define the root logger with appender file
+Configuration:
+ Appenders:
+ File:
+ name: FILE
+ fileName: {{ log_file }}
+ append: true
+ immediateFlush: true
+ PatternLayout:
+ pattern: "[%d] %p %m (%c)%n"
-log4j.logger.org.apache.kafka=ERROR
-# We are testing for a particular INFO log message in CommonNameLoggingTrustManagerFactoryWrapper
-log4j.logger.org.apache.kafka.common.security.ssl.CommonNameLoggingTrustManagerFactoryWrapper=INFO
+ Loggers:
+ Root:
+ level: {{ log_level|default("INFO") }}
+ AppenderRef:
+ - ref: FILE
+
+ Logger:
+ {% if loggers is defined %}
+ {% for logger, log_level in loggers.items() %}
+ - name: {{ logger }}
+ level: {{ log_level }}
+ {% endfor %}
+ {% endif %}
\ No newline at end of file
diff --git a/tools/src/test/java/org/apache/kafka/tools/other/ReplicationQuotasTestRig.java b/tools/src/test/java/org/apache/kafka/tools/other/ReplicationQuotasTestRig.java
index a6b2f13e3c4..0dedf567c49 100644
--- a/tools/src/test/java/org/apache/kafka/tools/other/ReplicationQuotasTestRig.java
+++ b/tools/src/test/java/org/apache/kafka/tools/other/ReplicationQuotasTestRig.java
@@ -41,7 +41,7 @@ import org.apache.kafka.metadata.PartitionRegistration;
import org.apache.kafka.server.quota.QuotaType;
import org.apache.kafka.tools.reassign.ReassignPartitionsCommand;
-import org.apache.log4j.PropertyConfigurator;
+import org.apache.logging.log4j.core.config.Configurator;
import org.jfree.chart.ChartFactory;
import org.jfree.chart.ChartFrame;
import org.jfree.chart.JFreeChart;
@@ -95,7 +95,7 @@ public class ReplicationQuotasTestRig {
private static final String DIR;
static {
- PropertyConfigurator.configure("core/src/test/resources/log4j.properties");
+ Configurator.reconfigure();
new File("Experiments").mkdir();
DIR = "Experiments/Run" + Long.valueOf(System.currentTimeMillis()).toString().substring(8);
diff --git a/tools/src/test/resources/log4j.properties b/tools/src/test/resources/log4j.properties
deleted file mode 100644
index 3aca07dc530..00000000000
--- a/tools/src/test/resources/log4j.properties
+++ /dev/null
@@ -1,22 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-log4j.rootLogger=INFO, stdout
-
-log4j.appender.stdout=org.apache.log4j.ConsoleAppender
-log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
-log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c:%L)%n
-
-log4j.logger.org.apache.kafka=INFO
-log4j.logger.org.eclipse.jetty=INFO
diff --git a/tools/src/test/resources/log4j2.yaml b/tools/src/test/resources/log4j2.yaml
new file mode 100644
index 00000000000..aef7e561628
--- /dev/null
+++ b/tools/src/test/resources/log4j2.yaml
@@ -0,0 +1,39 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+Configuration:
+ Properties:
+ Property:
+ - name: "logPattern"
+ value: "[%d] %p %m (%c:%L)%n"
+
+ Appenders:
+ Console:
+ name: STDOUT
+ PatternLayout:
+ pattern: "${logPattern}"
+
+ Loggers:
+ Root:
+ level: INFO
+ AppenderRef:
+ - ref: STDOUT
+
+ Logger:
+ - name: org.apache.kafka
+ level: ERROR
+
+ - name: org.eclipse.jetty
+ level: ERROR
diff --git a/trogdor/src/test/resources/log4j.properties b/trogdor/src/test/resources/log4j.properties
deleted file mode 100644
index 5291604d49a..00000000000
--- a/trogdor/src/test/resources/log4j.properties
+++ /dev/null
@@ -1,22 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements. See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-log4j.rootLogger=TRACE, stdout
-
-log4j.appender.stdout=org.apache.log4j.ConsoleAppender
-log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
-log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c:%L)%n
-
-log4j.logger.org.apache.kafka=INFO
-log4j.logger.org.eclipse.jetty=INFO
diff --git a/trogdor/src/test/resources/log4j2.yaml b/trogdor/src/test/resources/log4j2.yaml
new file mode 100644
index 00000000000..4c3355e307e
--- /dev/null
+++ b/trogdor/src/test/resources/log4j2.yaml
@@ -0,0 +1,39 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+Configuration:
+ Properties:
+ Property:
+ - name: "logPattern"
+ value: "[%d] %p %m (%c:%L)%n"
+
+ Appenders:
+ Console:
+ name: STDOUT
+ PatternLayout:
+ pattern: "${logPattern}"
+
+ Loggers:
+ Root:
+ level: TRACE
+ AppenderRef:
+ - ref: STDOUT
+
+ Logger:
+ - name: org.apache.kafka
+ level: ERROR
+
+ - name: org.eclipse.jetty
+ level: ERROR