From 4cc09d9f3d18e052f0be7489ac4597f936f744d2 Mon Sep 17 00:00:00 2001 From: frankvicky Date: Thu, 3 Oct 2024 00:08:11 +0800 Subject: [PATCH 01/46] WIP milestone 1 --- README.md | 4 +- bin/connect-distributed.sh | 2 +- bin/connect-mirror-maker.sh | 2 +- bin/connect-standalone.sh | 2 +- bin/kafka-run-class.sh | 2 +- bin/kafka-server-start.sh | 2 +- bin/windows/connect-distributed.bat | 2 +- bin/windows/connect-standalone.bat | 2 +- bin/windows/kafka-run-class.bat | 2 +- bin/windows/kafka-server-start.bat | 2 +- bin/windows/zookeeper-server-start.bat | 2 +- bin/zookeeper-server-start.sh | 2 +- build.gradle | 432 ++++++++++-------- checkstyle/import-control-core.xml | 13 +- checkstyle/import-control.xml | 10 +- .../common/utils/LogCaptureAppender.java | 137 ++++-- clients/src/test/resources/log4j.properties | 23 - clients/src/test/resources/log4j2.properties | 31 ++ config/connect-log4j.properties | 39 -- config/connect-log4j2.properties | 39 ++ config/log4j.properties | 96 ---- config/log4j2.properties | 157 +++++++ .../tools-log4j2.properties | 16 +- .../file/src/test/resources/log4j2.properties | 23 +- .../mirror/MirrorSourceConnectorTest.java | 6 +- .../src/test/resources/log4j.properties | 33 -- .../src/test/resources/log4j2.properties} | 29 +- .../kafka/connect/runtime/AbstractHerder.java | 2 +- .../apache/kafka/connect/runtime/Loggers.java | 90 ++-- .../rest/resources/LoggingResource.java | 2 +- .../kafka/connect/runtime/LoggersTest.java | 110 +++-- .../SourceTaskOffsetCommitterTest.java | 2 +- .../connect/runtime/WorkerSourceTaskTest.java | 2 +- .../runtime/rest/ConnectRestServerTest.java | 3 +- .../src/test/resources/log4j.properties | 37 -- .../src/test/resources/log4j2.properties | 49 ++ .../kafka/docker/KafkaDockerWrapper.scala | 8 +- .../scala/kafka/utils/Log4jController.scala | 127 ++--- .../test/java/kafka/admin/AclCommandTest.java | 6 +- .../src/test/resources/log4j2.properties | 33 +- ...g4j.properties => kafka.log4j2.properties} | 18 +- .../kafka/docker/KafkaDockerWrapperTest.scala | 44 +- .../UncleanLeaderElectionTest.scala | 15 +- .../unit/kafka/network/SocketServerTest.scala | 9 +- .../3.7.0/jvm/Dockerfile | 4 +- docker/examples/README.md | 6 +- docker/jvm/Dockerfile | 4 +- docker/native/Dockerfile | 4 +- docker/native/launch | 4 +- docs/ops.html | 2 +- docs/streams/tutorial.html | 2 +- gradle/dependencies.gradle | 12 +- .../src/test/resources/log4j2.properties | 39 ++ metadata/src/test/resources/log4j2.properties | 33 ++ raft/bin/test-kraft-server-start.sh | 2 +- raft/config/kraft-log4j2.properties | 35 ++ raft/src/test/resources/log4j.properties | 22 - raft/src/test/resources/log4j2.properties | 33 ++ .../src/test/resources/test/log4j.properties | 21 - .../src/test/resources/test/log4j2.properties | 20 +- shell/src/test/resources/log4j.properties | 19 - .../src/test/resources/log4j2.properties | 14 +- storage/src/test/resources/log4j2.properties | 53 +++ .../src/main/resources/log4j.properties | 19 - .../src/main/resources/log4j2.properties | 15 +- .../kafka/streams/StreamsConfigTest.java | 2 +- .../internals/InternalTopicManagerTest.java | 2 +- .../internals/PartitionGroupTest.java | 2 +- .../internals/RecordCollectorTest.java | 5 +- .../internals/StoreChangelogReaderTest.java | 2 +- .../processor/internals/TaskManagerTest.java | 2 +- .../internals/AbstractKeyValueStoreTest.java | 2 +- .../AbstractSessionBytesStoreTest.java | 2 +- .../AbstractWindowBytesStoreTest.java | 2 +- streams/src/test/resources/log4j.properties | 37 -- streams/src/test/resources/log4j2.properties | 69 +++ .../{log4j.properties => log4j2.properties} | 33 +- .../src/test/resources/log4j2.properties | 21 +- tests/kafkatest/services/connect.py | 6 +- tests/kafkatest/services/console_consumer.py | 4 +- tests/kafkatest/services/kafka/kafka.py | 4 +- .../services/kafka/templates/log4j.properties | 141 ------ .../kafka/templates/log4j2.properties | 298 ++++++++++++ .../performance/consumer_performance.py | 4 +- .../performance/end_to_end_latency.py | 4 +- .../performance/producer_performance.py | 4 +- ...g4j.properties => tools_log4j2.properties} | 22 +- tests/kafkatest/services/streams.py | 4 +- ...j.properties => connect_log4j2.properties} | 21 +- .../templates/tools_log4j2.properties} | 27 +- .../services/transactional_message_copier.py | 4 +- .../trogdor/templates/log4j.properties | 23 - .../trogdor/templates/log4j2.properties | 45 ++ tests/kafkatest/services/trogdor/trogdor.py | 12 +- .../kafkatest/services/verifiable_consumer.py | 4 +- .../kafkatest/services/verifiable_producer.py | 4 +- .../streams/streams_relational_smoke_test.py | 4 +- .../templates/log4j2_template.properties} | 27 +- .../kafka/tools/VerifiableLog4jAppender.java | 21 +- .../tools/other/ReplicationQuotasTestRig.java | 4 +- tools/src/test/resources/log4j.properties | 22 - tools/src/test/resources/log4j2.properties | 33 ++ trogdor/src/test/resources/log4j.properties | 22 - trogdor/src/test/resources/log4j2.properties | 33 ++ 104 files changed, 1837 insertions(+), 1171 deletions(-) delete mode 100644 clients/src/test/resources/log4j.properties create mode 100644 clients/src/test/resources/log4j2.properties delete mode 100644 config/connect-log4j.properties create mode 100644 config/connect-log4j2.properties delete mode 100644 config/log4j.properties create mode 100644 config/log4j2.properties rename group-coordinator/src/test/resources/log4j.properties => config/tools-log4j2.properties (71%) rename config/tools-log4j.properties => connect/file/src/test/resources/log4j2.properties (61%) delete mode 100644 connect/mirror/src/test/resources/log4j.properties rename connect/{file/src/test/resources/log4j.properties => mirror/src/test/resources/log4j2.properties} (52%) delete mode 100644 connect/runtime/src/test/resources/log4j.properties create mode 100644 connect/runtime/src/test/resources/log4j2.properties rename storage/src/test/resources/log4j.properties => core/src/test/resources/log4j2.properties (52%) rename core/src/test/scala/other/{kafka.log4j.properties => kafka.log4j2.properties} (61%) create mode 100644 group-coordinator/src/test/resources/log4j2.properties create mode 100644 metadata/src/test/resources/log4j2.properties create mode 100644 raft/config/kraft-log4j2.properties delete mode 100644 raft/src/test/resources/log4j.properties create mode 100644 raft/src/test/resources/log4j2.properties delete mode 100644 server-common/src/test/resources/test/log4j.properties rename raft/config/kraft-log4j.properties => server-common/src/test/resources/test/log4j2.properties (66%) delete mode 100644 shell/src/test/resources/log4j.properties rename streams/test-utils/src/test/resources/log4j.properties => shell/src/test/resources/log4j2.properties (73%) create mode 100644 storage/src/test/resources/log4j2.properties delete mode 100644 streams/quickstart/java/src/main/resources/archetype-resources/src/main/resources/log4j.properties rename metadata/src/test/resources/log4j.properties => streams/quickstart/java/src/main/resources/archetype-resources/src/main/resources/log4j2.properties (70%) delete mode 100644 streams/src/test/resources/log4j.properties create mode 100644 streams/src/test/resources/log4j2.properties rename streams/streams-scala/src/test/resources/{log4j.properties => log4j2.properties} (56%) rename core/src/test/resources/log4j.properties => streams/test-utils/src/test/resources/log4j2.properties (66%) delete mode 100644 tests/kafkatest/services/kafka/templates/log4j.properties create mode 100644 tests/kafkatest/services/kafka/templates/log4j2.properties rename tests/kafkatest/services/performance/templates/{tools_log4j.properties => tools_log4j2.properties} (66%) rename tests/kafkatest/services/templates/{connect_log4j.properties => connect_log4j2.properties} (67%) rename tests/kafkatest/{tests/streams/templates/log4j_template.properties => services/templates/tools_log4j2.properties} (65%) delete mode 100644 tests/kafkatest/services/trogdor/templates/log4j.properties create mode 100644 tests/kafkatest/services/trogdor/templates/log4j2.properties rename tests/kafkatest/{services/templates/tools_log4j.properties => tests/streams/templates/log4j2_template.properties} (65%) delete mode 100644 tools/src/test/resources/log4j.properties create mode 100644 tools/src/test/resources/log4j2.properties delete mode 100644 trogdor/src/test/resources/log4j.properties create mode 100644 trogdor/src/test/resources/log4j2.properties diff --git a/README.md b/README.md index 7bacfee75d825..0bbef9d34a66f 100644 --- a/README.md +++ b/README.md @@ -54,9 +54,9 @@ Follow instructions in https://kafka.apache.org/quickstart ./gradlew clients:test --tests org.apache.kafka.clients.MetadataTest.testTimeToNextUpdate ### Running a particular unit/integration test with log4j output ### -By default, there will be only small number of logs output while testing. You can adjust it by changing the `log4j.properties` file in the module's `src/test/resources` directory. +By default, there will be only small number of logs output while testing. You can adjust it by changing the `log4j2.properties` file in the module's `src/test/resources` directory. -For example, if you want to see more logs for clients project tests, you can modify [the line](https://github.com/apache/kafka/blob/trunk/clients/src/test/resources/log4j.properties#L21) in `clients/src/test/resources/log4j.properties` +For example, if you want to see more logs for clients project tests, you can modify [the line](https://github.com/apache/kafka/blob/trunk/clients/src/test/resources/log4j.properties#L21) in `clients/src/test/resources/log4j2.properties` to `log4j.logger.org.apache.kafka=INFO` and then run: ./gradlew cleanTest clients:test --tests NetworkClientTest diff --git a/bin/connect-distributed.sh b/bin/connect-distributed.sh index b8088ad923451..0e3befbb0e01d 100755 --- a/bin/connect-distributed.sh +++ b/bin/connect-distributed.sh @@ -23,7 +23,7 @@ fi base_dir=$(dirname $0) if [ "x$KAFKA_LOG4J_OPTS" = "x" ]; then - export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/connect-log4j.properties" + export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/connect-log4j2.properties" fi if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then diff --git a/bin/connect-mirror-maker.sh b/bin/connect-mirror-maker.sh index 8e2b2e162daac..4831d91f30724 100755 --- a/bin/connect-mirror-maker.sh +++ b/bin/connect-mirror-maker.sh @@ -23,7 +23,7 @@ fi base_dir=$(dirname $0) if [ "x$KAFKA_LOG4J_OPTS" = "x" ]; then - export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/connect-log4j.properties" + export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/connect-log4j2.properties" fi if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then diff --git a/bin/connect-standalone.sh b/bin/connect-standalone.sh index bef78d658fda9..d574722c228bd 100755 --- a/bin/connect-standalone.sh +++ b/bin/connect-standalone.sh @@ -23,7 +23,7 @@ fi base_dir=$(dirname $0) if [ "x$KAFKA_LOG4J_OPTS" = "x" ]; then - export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/connect-log4j.properties" + export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/connect-log4j2.properties" fi if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then diff --git a/bin/kafka-run-class.sh b/bin/kafka-run-class.sh index b3291e461f2ba..94df649729de8 100755 --- a/bin/kafka-run-class.sh +++ b/bin/kafka-run-class.sh @@ -228,7 +228,7 @@ fi # Log4j settings if [ -z "$KAFKA_LOG4J_OPTS" ]; then # Log to console. This is a tool. - LOG4J_DIR="$base_dir/config/tools-log4j.properties" + LOG4J_DIR="$base_dir/config/tools-log4j2.properties" # If Cygwin is detected, LOG4J_DIR is converted to Windows format. (( WINDOWS_OS_FORMAT )) && LOG4J_DIR=$(cygpath --path --mixed "${LOG4J_DIR}") KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:${LOG4J_DIR}" diff --git a/bin/kafka-server-start.sh b/bin/kafka-server-start.sh index 5a53126172de9..9505eb62f233e 100755 --- a/bin/kafka-server-start.sh +++ b/bin/kafka-server-start.sh @@ -22,7 +22,7 @@ fi base_dir=$(dirname $0) if [ "x$KAFKA_LOG4J_OPTS" = "x" ]; then - export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/log4j.properties" + export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/log4j2.properties" fi if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then diff --git a/bin/windows/connect-distributed.bat b/bin/windows/connect-distributed.bat index 0535085bde507..2b473e4d8095a 100644 --- a/bin/windows/connect-distributed.bat +++ b/bin/windows/connect-distributed.bat @@ -27,7 +27,7 @@ popd rem Log4j settings IF ["%KAFKA_LOG4J_OPTS%"] EQU [""] ( - set KAFKA_LOG4J_OPTS=-Dlog4j.configuration=file:%BASE_DIR%/config/connect-log4j.properties + set KAFKA_LOG4J_OPTS=-Dlog4j.configuration=file:%BASE_DIR%/config/connect-log4j2.properties ) "%~dp0kafka-run-class.bat" org.apache.kafka.connect.cli.ConnectDistributed %* diff --git a/bin/windows/connect-standalone.bat b/bin/windows/connect-standalone.bat index 12ebb21dc9a85..e443f6a1c95fd 100644 --- a/bin/windows/connect-standalone.bat +++ b/bin/windows/connect-standalone.bat @@ -27,7 +27,7 @@ popd rem Log4j settings IF ["%KAFKA_LOG4J_OPTS%"] EQU [""] ( - set KAFKA_LOG4J_OPTS=-Dlog4j.configuration=file:%BASE_DIR%/config/connect-log4j.properties + set KAFKA_LOG4J_OPTS=-Dlog4j.configuration=file:%BASE_DIR%/config/connect-log4j2.properties ) "%~dp0kafka-run-class.bat" org.apache.kafka.connect.cli.ConnectStandalone %* diff --git a/bin/windows/kafka-run-class.bat b/bin/windows/kafka-run-class.bat index a163ccd0a7c08..e16287cd4114f 100755 --- a/bin/windows/kafka-run-class.bat +++ b/bin/windows/kafka-run-class.bat @@ -116,7 +116,7 @@ IF ["%LOG_DIR%"] EQU [""] ( rem Log4j settings IF ["%KAFKA_LOG4J_OPTS%"] EQU [""] ( - set KAFKA_LOG4J_OPTS=-Dlog4j.configuration=file:%BASE_DIR%/config/tools-log4j.properties + set KAFKA_LOG4J_OPTS=-Dlog4j.configuration=file:%BASE_DIR%/config/tools-log4j2.properties ) ELSE ( rem create logs directory IF not exist "%LOG_DIR%" ( diff --git a/bin/windows/kafka-server-start.bat b/bin/windows/kafka-server-start.bat index 8624eda9ff089..e03e6398dacd7 100644 --- a/bin/windows/kafka-server-start.bat +++ b/bin/windows/kafka-server-start.bat @@ -21,7 +21,7 @@ IF [%1] EQU [] ( SetLocal IF ["%KAFKA_LOG4J_OPTS%"] EQU [""] ( - set KAFKA_LOG4J_OPTS=-Dlog4j.configuration=file:%~dp0../../config/log4j.properties + set KAFKA_LOG4J_OPTS=-Dlog4j.configuration=file:%~dp0../../config/log4j2.properties ) IF ["%KAFKA_HEAP_OPTS%"] EQU [""] ( rem detect OS architecture diff --git a/bin/windows/zookeeper-server-start.bat b/bin/windows/zookeeper-server-start.bat index f201a585135d2..4549f240d19ee 100644 --- a/bin/windows/zookeeper-server-start.bat +++ b/bin/windows/zookeeper-server-start.bat @@ -21,7 +21,7 @@ IF [%1] EQU [] ( SetLocal IF ["%KAFKA_LOG4J_OPTS%"] EQU [""] ( - set KAFKA_LOG4J_OPTS=-Dlog4j.configuration=file:%~dp0../../config/log4j.properties + set KAFKA_LOG4J_OPTS=-Dlog4j.configuration=file:%~dp0../../config/log4j2.properties ) IF ["%KAFKA_HEAP_OPTS%"] EQU [""] ( set KAFKA_HEAP_OPTS=-Xmx512M -Xms512M diff --git a/bin/zookeeper-server-start.sh b/bin/zookeeper-server-start.sh index bd9c1142817c0..b1d72eb29537c 100755 --- a/bin/zookeeper-server-start.sh +++ b/bin/zookeeper-server-start.sh @@ -22,7 +22,7 @@ fi base_dir=$(dirname $0) if [ "x$KAFKA_LOG4J_OPTS" = "x" ]; then - export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/log4j.properties" + export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/log4j2.properties" fi if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then diff --git a/build.gradle b/build.gradle index 4b98bf32237d5..e67450379be71 100644 --- a/build.gradle +++ b/build.gradle @@ -64,22 +64,22 @@ ext { // for now and we'll fix it properly (where possible) via KAFKA-13275. if (JavaVersion.current().isCompatibleWith(JavaVersion.VERSION_16)) defaultJvmArgs.addAll( - "--add-opens=java.base/java.io=ALL-UNNAMED", - "--add-opens=java.base/java.lang=ALL-UNNAMED", - "--add-opens=java.base/java.nio=ALL-UNNAMED", - "--add-opens=java.base/java.nio.file=ALL-UNNAMED", - "--add-opens=java.base/java.util=ALL-UNNAMED", - "--add-opens=java.base/java.util.concurrent=ALL-UNNAMED", - "--add-opens=java.base/java.util.regex=ALL-UNNAMED", - "--add-opens=java.base/java.util.stream=ALL-UNNAMED", - "--add-opens=java.base/java.text=ALL-UNNAMED", - "--add-opens=java.base/java.time=ALL-UNNAMED", - "--add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED" + "--add-opens=java.base/java.io=ALL-UNNAMED", + "--add-opens=java.base/java.lang=ALL-UNNAMED", + "--add-opens=java.base/java.nio=ALL-UNNAMED", + "--add-opens=java.base/java.nio.file=ALL-UNNAMED", + "--add-opens=java.base/java.util=ALL-UNNAMED", + "--add-opens=java.base/java.util.concurrent=ALL-UNNAMED", + "--add-opens=java.base/java.util.regex=ALL-UNNAMED", + "--add-opens=java.base/java.util.stream=ALL-UNNAMED", + "--add-opens=java.base/java.text=ALL-UNNAMED", + "--add-opens=java.base/java.time=ALL-UNNAMED", + "--add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED" ) maxTestForks = project.hasProperty('maxParallelForks') ? maxParallelForks.toInteger() : Runtime.runtime.availableProcessors() maxScalacThreads = project.hasProperty('maxScalacThreads') ? maxScalacThreads.toInteger() : - Math.min(Runtime.runtime.availableProcessors(), 8) + Math.min(Runtime.runtime.availableProcessors(), 8) userIgnoreFailures = project.hasProperty('ignoreFailures') ? ignoreFailures.toBoolean() : false userMaxTestRetries = project.hasProperty('maxTestRetries') ? maxTestRetries.toInteger() : 0 @@ -152,18 +152,20 @@ allprojects { if (name != "zinc") { resolutionStrategy { force( - // be explicit about the javassist dependency version instead of relying on the transitive version - libs.javassist, - // ensure we have a single version in the classpath despite transitive dependencies - libs.scalaLibrary, - libs.scalaReflect, - libs.jacksonAnnotations, - // be explicit about the Netty dependency version instead of relying on the version set by - // ZooKeeper (potentially older and containing CVEs) - libs.nettyHandler, - libs.nettyTransportNativeEpoll, - // be explicit about the reload4j version instead of relying on the transitive versions - libs.reload4j + // be explicit about the javassist dependency version instead of relying on the transitive version + libs.javassist, + // ensure we have a single version in the classpath despite transitive dependencies + libs.scalaLibrary, + libs.scalaReflect, + libs.jacksonAnnotations, + // be explicit about the Netty dependency version instead of relying on the version set by + // ZooKeeper (potentially older and containing CVEs) + libs.nettyHandler, + libs.nettyTransportNativeEpoll, + // be explicit about the reload4j version instead of relying on the transitive versions + // libs.reload4j + libs.log4j2Api, + libs.log4j2Core ) } } @@ -226,26 +228,26 @@ if (repo != null) { excludes = new ArrayList(repo.clean(ignore: false, directories: true, dryRun: true)) // And some of the files that we have checked in should also be excluded from this check excludes.addAll([ - '**/.git/**', - '**/build/**', - 'CONTRIBUTING.md', - 'PULL_REQUEST_TEMPLATE.md', - 'gradlew', - 'gradlew.bat', - 'gradle/wrapper/gradle-wrapper.properties', - 'trogdor/README.md', - '**/README.md', - '**/id_rsa', - '**/id_rsa.pub', - 'checkstyle/suppressions.xml', - 'streams/quickstart/java/src/test/resources/projects/basic/goal.txt', - 'streams/streams-scala/logs/*', - 'licenses/*', - '**/generated/**', - 'clients/src/test/resources/serializedData/*', - 'docker/test/fixtures/secrets/*', - 'docker/examples/fixtures/secrets/*', - 'docker/docker_official_images/.gitkeep' + '**/.git/**', + '**/build/**', + 'CONTRIBUTING.md', + 'PULL_REQUEST_TEMPLATE.md', + 'gradlew', + 'gradlew.bat', + 'gradle/wrapper/gradle-wrapper.properties', + 'trogdor/README.md', + '**/README.md', + '**/id_rsa', + '**/id_rsa.pub', + 'checkstyle/suppressions.xml', + 'streams/quickstart/java/src/test/resources/projects/basic/goal.txt', + 'streams/streams-scala/logs/*', + 'licenses/*', + '**/generated/**', + 'clients/src/test/resources/serializedData/*', + 'docker/test/fixtures/secrets/*', + 'docker/examples/fixtures/secrets/*', + 'docker/docker_official_images/.gitkeep' ]) } } else { @@ -504,7 +506,7 @@ subprojects { maxFailures = userMaxTestRetryFailures } } - + // As we process results, check if there were any test failures. afterSuite { desc, result -> if (result.resultType == TestResult.ResultType.FAILURE) { @@ -671,27 +673,27 @@ subprojects { scalaCompileOptions.keepAliveMode = userKeepAliveMode scalaCompileOptions.additionalParameters = [ - "-deprecation:false", - "-unchecked", - "-encoding", "utf8", - "-Xlog-reflective-calls", - "-feature", - "-language:postfixOps", - "-language:implicitConversions", - "-language:existentials", - "-Ybackend-parallelism", maxScalacThreads.toString(), - "-Xlint:constant", - "-Xlint:delayedinit-select", - "-Xlint:doc-detached", - "-Xlint:missing-interpolator", - "-Xlint:nullary-unit", - "-Xlint:option-implicit", - "-Xlint:package-object-classes", - "-Xlint:poly-implicit-overload", - "-Xlint:private-shadow", - "-Xlint:stars-align", - "-Xlint:type-parameter-shadow", - "-Xlint:unused" + "-deprecation:false", + "-unchecked", + "-encoding", "utf8", + "-Xlog-reflective-calls", + "-feature", + "-language:postfixOps", + "-language:implicitConversions", + "-language:existentials", + "-Ybackend-parallelism", maxScalacThreads.toString(), + "-Xlint:constant", + "-Xlint:delayedinit-select", + "-Xlint:doc-detached", + "-Xlint:missing-interpolator", + "-Xlint:nullary-unit", + "-Xlint:option-implicit", + "-Xlint:package-object-classes", + "-Xlint:poly-implicit-overload", + "-Xlint:private-shadow", + "-Xlint:stars-align", + "-Xlint:type-parameter-shadow", + "-Xlint:unused" ] // See README.md for details on this option and the meaning of each value @@ -718,9 +720,9 @@ subprojects { // Scala 2.13 removes them, see https://github.com/scala/scala/pull/6502 and https://github.com/scala/scala/pull/5969 if (versions.baseScala == '2.12') { scalaCompileOptions.additionalParameters += [ - "-Xlint:by-name-right-associative", - "-Xlint:nullary-override", - "-Xlint:unsound-match" + "-Xlint:by-name-right-associative", + "-Xlint:nullary-override", + "-Xlint:unsound-match" ] } @@ -864,15 +866,15 @@ if (userEnableTestCoverage) { } def connectPkgs = [ - 'connect:api', - 'connect:basic-auth-extension', - 'connect:file', - 'connect:json', - 'connect:runtime', - 'connect:test-plugins', - 'connect:transforms', - 'connect:mirror', - 'connect:mirror-client' + 'connect:api', + 'connect:basic-auth-extension', + 'connect:file', + 'connect:json', + 'connect:runtime', + 'connect:test-plugins', + 'connect:transforms', + 'connect:mirror', + 'connect:mirror-client' ] tasks.create(name: "jarConnect", dependsOn: connectPkgs.collect { it + ":jar" }) {} @@ -897,13 +899,16 @@ project(':server') { implementation libs.slf4jApi - compileOnly libs.reload4j + // compileOnly libs.reload4j + compileOnly libs.log4j2Api + compileOnly libs.log4j2Core testImplementation project(':clients').sourceSets.test.output testImplementation libs.mockitoCore testImplementation libs.junitJupiter - testImplementation libs.slf4jReload4j +// testImplementation libs.slf4jReload4j + testImplementation libs.slf4jLog4j2 testRuntimeOnly libs.junitPlatformLanucher } @@ -917,8 +922,8 @@ project(':server') { doLast { def data = [ - commitId: commitId, - version: version, + commitId: commitId, + version: version, ] receiptFile.parentFile.mkdirs() @@ -958,7 +963,8 @@ project(':share') { implementation libs.slf4jApi testImplementation libs.junitJupiter - testImplementation libs.slf4jReload4j +// testImplementation libs.slf4jReload4j + testImplementation libs.slf4jLog4j2 testRuntimeOnly libs.junitPlatformLanucher } @@ -1050,8 +1056,10 @@ project(':core') { } // ZooKeeperMain depends on commons-cli but declares the dependency as `provided` implementation libs.commonsCli + implementation libs.log4j2Core - compileOnly libs.reload4j +// compileOnly libs.reload4j + compileOnly libs.log4j2Api testImplementation project(':clients').sourceSets.test.output testImplementation project(':group-coordinator').sourceSets.test.output @@ -1081,7 +1089,8 @@ project(':core') { testImplementation libs.apachedsMavibotPartition testImplementation libs.apachedsJdbmPartition testImplementation libs.junitJupiter - testImplementation libs.slf4jReload4j +// testImplementation libs.slf4jReload4j + testImplementation libs.slf4jLog4j2 testImplementation libs.caffeine testRuntimeOnly libs.junitPlatformLanucher @@ -1270,7 +1279,7 @@ project(':core') { jar.manifest { attributes( - 'Version': "${version}" + 'Version': "${version}" ) } @@ -1331,12 +1340,15 @@ project(':metadata') { implementation libs.jacksonDatabind implementation libs.jacksonJDK8Datatypes implementation libs.metrics - compileOnly libs.reload4j +// compileOnly libs.reload4j + compileOnly libs.log4j2Api + compileOnly libs.log4j2Core testImplementation libs.junitJupiter testImplementation libs.jqwik testImplementation libs.hamcrest testImplementation libs.mockitoCore - testImplementation libs.slf4jReload4j +// testImplementation libs.slf4jReload4j + testImplementation libs.slf4jLog4j2 testImplementation project(':clients').sourceSets.test.output testImplementation project(':raft').sourceSets.test.output testImplementation project(':server-common').sourceSets.test.output @@ -1354,10 +1366,10 @@ project(':metadata') { "-i", "src/main/resources/common/metadata", "-m", "MessageDataGenerator", "JsonConverterGenerator", "-t", "MetadataRecordTypeGenerator", "MetadataJsonConvertersGenerator" - ] + ] inputs.dir("src/main/resources/common/metadata") - .withPropertyName("messages") - .withPathSensitivity(PathSensitivity.RELATIVE) + .withPropertyName("messages") + .withPathSensitivity(PathSensitivity.RELATIVE) outputs.cacheIf { true } outputs.dir("${projectDir}/build/generated/main/java/org/apache/kafka/common/metadata") } @@ -1476,7 +1488,8 @@ project(':group-coordinator') { testImplementation libs.junitJupiter testImplementation libs.mockitoCore - testRuntimeOnly libs.slf4jReload4j +// testRuntimeOnly libs.slf4jReload4j + testImplementation libs.slf4jLog4j2 testRuntimeOnly libs.junitPlatformLanucher generator project(':generator') @@ -1512,8 +1525,8 @@ project(':group-coordinator') { "-m", "MessageDataGenerator", "JsonConverterGenerator" ] inputs.dir("src/main/resources/common/message") - .withPropertyName("messages") - .withPathSensitivity(PathSensitivity.RELATIVE) + .withPropertyName("messages") + .withPathSensitivity(PathSensitivity.RELATIVE) outputs.cacheIf { true } outputs.dir("${projectDir}/build/generated/main/java/org/apache/kafka/coordinator/group/generated") } @@ -1538,7 +1551,8 @@ project(':test-common') { implementation libs.slf4jApi testImplementation libs.junitJupiter testImplementation libs.mockitoCore - testRuntimeOnly libs.slf4jReload4j +// testRuntimeOnly libs.slf4jReload4j + testRuntimeOnly libs.slf4jLog4j2 testRuntimeOnly libs.junitPlatformLanucher } @@ -1681,7 +1695,8 @@ project(':coordinator-common') { testImplementation libs.junitJupiter testImplementation libs.mockitoCore - testRuntimeOnly libs.slf4jReload4j +// testRuntimeOnly libs.slf4jReload4j + testRuntimeOnly libs.slf4jLog4j2 testRuntimeOnly libs.junitPlatformLanucher } @@ -1733,7 +1748,8 @@ project(':share-coordinator') { testImplementation libs.junitJupiter testImplementation libs.mockitoCore - testRuntimeOnly libs.slf4jReload4j +// testRuntimeOnly libs.slf4jReload4j + testRuntimeOnly libs.slf4jLog4j2 testRuntimeOnly libs.junitPlatformLanucher generator project(':generator') @@ -1844,11 +1860,16 @@ project(':clients') { testImplementation libs.jacksonJaxrsJsonProvider testImplementation libs.jose4j testImplementation libs.junitJupiter - testImplementation libs.reload4j +// testImplementation libs.reload4j + testImplementation libs.log4j2Api + testImplementation libs.log4j2Core + testImplementation libs.bndlib + testImplementation libs.spotbugs testImplementation libs.mockitoCore testImplementation libs.mockitoJunitJupiter // supports MockitoExtension - testRuntimeOnly libs.slf4jReload4j +// testRuntimeOnly libs.slf4jReload4j + testRuntimeOnly libs.slf4jLog4j2 testRuntimeOnly libs.jacksonDatabind testRuntimeOnly libs.jacksonJDK8Datatypes testRuntimeOnly libs.junitPlatformLanucher @@ -1865,8 +1886,8 @@ project(':clients') { doLast { def data = [ - commitId: commitId, - version: version, + commitId: commitId, + version: version, ] receiptFile.parentFile.mkdirs() @@ -1922,10 +1943,10 @@ project(':clients') { "-i", "src/main/resources/common/message", "-t", "ApiMessageTypeGenerator", "-m", "MessageDataGenerator", "JsonConverterGenerator" - ] + ] inputs.dir("src/main/resources/common/message") - .withPropertyName("messages") - .withPathSensitivity(PathSensitivity.RELATIVE) + .withPropertyName("messages") + .withPathSensitivity(PathSensitivity.RELATIVE) outputs.cacheIf { true } outputs.dir("${projectDir}/build/generated/main/java/org/apache/kafka/common/message") } @@ -1937,10 +1958,10 @@ project(':clients') { "-o", "${projectDir}/build/generated/test/java/org/apache/kafka/common/message", "-i", "src/test/resources/common/message", "-m", "MessageDataGenerator", "JsonConverterGenerator" - ] + ] inputs.dir("src/test/resources/common/message") - .withPropertyName("testMessages") - .withPathSensitivity(PathSensitivity.RELATIVE) + .withPropertyName("testMessages") + .withPathSensitivity(PathSensitivity.RELATIVE) outputs.cacheIf { true } outputs.dir("${projectDir}/build/generated/test/java/org/apache/kafka/common/message") } @@ -2016,7 +2037,8 @@ project(':raft') { testImplementation libs.jqwik testImplementation libs.hamcrest - testRuntimeOnly libs.slf4jReload4j +// testRuntimeOnly libs.slf4jReload4j + testRuntimeOnly libs.slf4jLog4j2 testRuntimeOnly libs.junitPlatformLanucher generator project(':generator') @@ -2031,8 +2053,8 @@ project(':raft') { doLast { def data = [ - commitId: commitId, - version: version, + commitId: commitId, + version: version, ] receiptFile.parentFile.mkdirs() @@ -2049,8 +2071,8 @@ project(':raft') { "-i", "src/main/resources/common/message", "-m", "MessageDataGenerator", "JsonConverterGenerator"] inputs.dir("src/main/resources/common/message") - .withPropertyName("messages") - .withPathSensitivity(PathSensitivity.RELATIVE) + .withPropertyName("messages") + .withPathSensitivity(PathSensitivity.RELATIVE) outputs.cacheIf { true } outputs.dir("${projectDir}/build/generated/main/java/org/apache/kafka/raft/generated") } @@ -2074,7 +2096,7 @@ project(':raft') { jar { dependsOn createVersionFile from("$buildDir") { - include "kafka/$buildVersionFileName" + include "kafka/$buildVersionFileName" } } @@ -2112,7 +2134,8 @@ project(':server-common') { testImplementation libs.mockitoCore testImplementation libs.hamcrest - testRuntimeOnly libs.slf4jReload4j +// testRuntimeOnly libs.slf4jReload4j + testRuntimeOnly libs.slf4jLog4j2 testRuntimeOnly libs.junitPlatformLanucher } @@ -2171,7 +2194,8 @@ project(':storage:storage-api') { testImplementation libs.junitJupiter testImplementation libs.mockitoCore - testRuntimeOnly libs.slf4jReload4j +// testRuntimeOnly libs.slf4jReload4j + testRuntimeOnly libs.slf4jLog4j2 testRuntimeOnly libs.junitPlatformLanucher } @@ -2261,7 +2285,8 @@ project(':storage') { testImplementation libs.mockitoCore testImplementation libs.bcpkix - testRuntimeOnly libs.slf4jReload4j +// testRuntimeOnly libs.slf4jReload4j + testRuntimeOnly libs.slf4jLog4j2 testRuntimeOnly libs.junitPlatformLanucher generator project(':generator') @@ -2295,8 +2320,8 @@ project(':storage') { "-m", "MessageDataGenerator", "JsonConverterGenerator", "-t", "MetadataRecordTypeGenerator", "MetadataJsonConvertersGenerator" ] inputs.dir("src/main/resources/message") - .withPropertyName("messages") - .withPathSensitivity(PathSensitivity.RELATIVE) + .withPropertyName("messages") + .withPathSensitivity(PathSensitivity.RELATIVE) outputs.cacheIf { true } outputs.dir("${projectDir}/build/generated/main/java/org/apache/kafka/server/log/remote/metadata/storage/generated") } @@ -2431,7 +2456,10 @@ project(':tools') { implementation libs.jacksonDataformatCsv implementation libs.jacksonJDK8Datatypes implementation libs.slf4jApi - implementation libs.slf4jReload4j +// implementation libs.slf4jReload4j + implementation libs.slf4jLog4j2 + implementation libs.log4j2Core + implementation libs.spotbugs implementation libs.joptSimple implementation libs.jose4j // for SASL/OAUTHBEARER JWT validation @@ -2458,7 +2486,8 @@ project(':tools') { testImplementation(libs.jfreechart) { exclude group: 'junit', module: 'junit' } - testImplementation libs.reload4j +// testImplementation libs.reload4j + testImplementation libs.log4j2Api testImplementation libs.apachedsCoreApi testImplementation libs.apachedsInterceptorKerberos testImplementation libs.apachedsProtocolShared @@ -2497,7 +2526,9 @@ project(':trogdor') { implementation libs.jacksonDatabind implementation libs.jacksonJDK8Datatypes implementation libs.slf4jApi - runtimeOnly libs.reload4j +// runtimeOnly libs.reload4j + runtimeOnly libs.log4j2Api + runtimeOnly libs.log4j2Core implementation libs.jacksonJaxrsJsonProvider implementation libs.jerseyContainerServlet @@ -2513,7 +2544,8 @@ project(':trogdor') { testImplementation project(':clients').sourceSets.test.output testImplementation libs.mockitoCore - testRuntimeOnly libs.slf4jReload4j +// testRuntimeOnly libs.slf4jReload4j + testRuntimeOnly libs.slf4jLog4j2 testRuntimeOnly libs.junitPlatformLanucher } @@ -2561,7 +2593,8 @@ project(':shell') { testImplementation project(':server-common').sourceSets.test.output testImplementation libs.junitJupiter - testRuntimeOnly libs.slf4jReload4j +// testRuntimeOnly libs.slf4jReload4j + testRuntimeOnly libs.slf4jLog4j2 testRuntimeOnly libs.junitPlatformLanucher } @@ -2601,6 +2634,7 @@ project(':streams') { implementation libs.slf4jApi implementation libs.jacksonAnnotations implementation libs.jacksonDatabind + implementation libs.bndlib // testCompileOnly prevents streams from exporting a dependency on test-utils, which would cause a dependency cycle testCompileOnly project(':streams:test-utils') @@ -2617,7 +2651,9 @@ project(':streams') { testImplementation project(':server-common') testImplementation project(':server-common').sourceSets.test.output testImplementation project(':server') - testImplementation libs.reload4j +// testImplementation libs.reload4j + testImplementation libs.log4j2Api + testImplementation libs.log4j2Core testImplementation libs.junitJupiter testImplementation libs.bcpkix testImplementation libs.hamcrest @@ -2627,7 +2663,8 @@ project(':streams') { testImplementation project(':group-coordinator') testRuntimeOnly project(':streams:test-utils') - testRuntimeOnly libs.slf4jReload4j +// testRuntimeOnly libs.slf4jReload4j + testRuntimeOnly libs.slf4jLog4j2 testRuntimeOnly libs.junitPlatformLanucher generator project(':generator') @@ -2640,10 +2677,10 @@ project(':streams') { "-o", "${projectDir}/build/generated/main/java/org/apache/kafka/streams/internals/generated", "-i", "src/main/resources/common/message", "-m", "MessageDataGenerator" - ] + ] inputs.dir("src/main/resources/common/message") - .withPropertyName("messages") - .withPathSensitivity(PathSensitivity.RELATIVE) + .withPropertyName("messages") + .withPathSensitivity(PathSensitivity.RELATIVE) outputs.cacheIf { true } outputs.dir("${projectDir}/build/generated/main/java/org/apache/kafka/streams/internals/generated") } @@ -2715,36 +2752,36 @@ project(':streams') { } task testAll( - dependsOn: [ - ':streams:test', - ':streams:test-utils:test', - ':streams:streams-scala:test', - ':streams:upgrade-system-tests-0100:test', - ':streams:upgrade-system-tests-0101:test', - ':streams:upgrade-system-tests-0102:test', - ':streams:upgrade-system-tests-0110:test', - ':streams:upgrade-system-tests-10:test', - ':streams:upgrade-system-tests-11:test', - ':streams:upgrade-system-tests-20:test', - ':streams:upgrade-system-tests-21:test', - ':streams:upgrade-system-tests-22:test', - ':streams:upgrade-system-tests-23:test', - ':streams:upgrade-system-tests-24:test', - ':streams:upgrade-system-tests-25:test', - ':streams:upgrade-system-tests-26:test', - ':streams:upgrade-system-tests-27:test', - ':streams:upgrade-system-tests-28:test', - ':streams:upgrade-system-tests-30:test', - ':streams:upgrade-system-tests-31:test', - ':streams:upgrade-system-tests-32:test', - ':streams:upgrade-system-tests-33:test', - ':streams:upgrade-system-tests-34:test', - ':streams:upgrade-system-tests-35:test', - ':streams:upgrade-system-tests-36:test', - ':streams:upgrade-system-tests-37:test', - ':streams:upgrade-system-tests-38:test', - ':streams:examples:test' - ] + dependsOn: [ + ':streams:test', + ':streams:test-utils:test', + ':streams:streams-scala:test', + ':streams:upgrade-system-tests-0100:test', + ':streams:upgrade-system-tests-0101:test', + ':streams:upgrade-system-tests-0102:test', + ':streams:upgrade-system-tests-0110:test', + ':streams:upgrade-system-tests-10:test', + ':streams:upgrade-system-tests-11:test', + ':streams:upgrade-system-tests-20:test', + ':streams:upgrade-system-tests-21:test', + ':streams:upgrade-system-tests-22:test', + ':streams:upgrade-system-tests-23:test', + ':streams:upgrade-system-tests-24:test', + ':streams:upgrade-system-tests-25:test', + ':streams:upgrade-system-tests-26:test', + ':streams:upgrade-system-tests-27:test', + ':streams:upgrade-system-tests-28:test', + ':streams:upgrade-system-tests-30:test', + ':streams:upgrade-system-tests-31:test', + ':streams:upgrade-system-tests-32:test', + ':streams:upgrade-system-tests-33:test', + ':streams:upgrade-system-tests-34:test', + ':streams:upgrade-system-tests-35:test', + ':streams:upgrade-system-tests-36:test', + ':streams:upgrade-system-tests-37:test', + ':streams:upgrade-system-tests-38:test', + ':streams:examples:test' + ] ) } @@ -2779,7 +2816,8 @@ project(':streams:streams-scala') { testImplementation libs.mockitoCore testImplementation libs.mockitoJunitJupiter // supports MockitoExtension testImplementation libs.hamcrest - testRuntimeOnly libs.slf4jReload4j +// testRuntimeOnly libs.slf4jReload4j + testRuntimeOnly libs.slf4jLog4j2 testRuntimeOnly libs.junitPlatformLanucher } @@ -2832,7 +2870,8 @@ project(':streams:test-utils') { testImplementation libs.mockitoCore testImplementation libs.hamcrest - testRuntimeOnly libs.slf4jReload4j +// testRuntimeOnly libs.slf4jReload4j + testRuntimeOnly libs.slf4jLog4j2 testRuntimeOnly libs.junitPlatformLanucher } @@ -2864,7 +2903,8 @@ project(':streams:examples') { implementation project(':streams') - implementation libs.slf4jReload4j +// implementation libs.slf4jReload4j + implementation libs.slf4jLog4j2 testImplementation project(':streams:test-utils') testImplementation project(':clients').sourceSets.test.output // for org.apache.kafka.test.IntegrationTest @@ -3294,7 +3334,8 @@ project(':jmh-benchmarks') { implementation libs.jacksonDatabind implementation libs.metrics implementation libs.mockitoCore - implementation libs.slf4jReload4j +// implementation libs.slf4jReload4j + implementation libs.slf4jLog4j2 implementation libs.scalaLibrary implementation libs.scalaJava8Compat } @@ -3320,14 +3361,14 @@ project(':jmh-benchmarks') { doFirst { if (System.getProperty("jmhArgs")) { - args System.getProperty("jmhArgs").split(' ') + args System.getProperty("jmhArgs").split(' ') } args = [shadowJar.archivePath, *args] } } javadoc { - enabled = false + enabled = false } } @@ -3362,12 +3403,15 @@ project(':connect:api') { dependencies { api project(':clients') implementation libs.slf4jApi - runtimeOnly libs.reload4j +// runtimeOnly libs.reload4j + runtimeOnly libs.log4j2Api + runtimeOnly libs.log4j2Core implementation libs.jaxrsApi testImplementation libs.junitJupiter testRuntimeOnly libs.junitPlatformLanucher - testRuntimeOnly libs.slf4jReload4j +// testRuntimeOnly libs.slf4jReload4j + testRuntimeOnly libs.slf4jLog4j2 testImplementation project(':clients').sourceSets.test.output } @@ -3398,11 +3442,14 @@ project(':connect:transforms') { api project(':connect:api') implementation libs.slf4jApi - runtimeOnly libs.reload4j +// runtimeOnly libs.reload4j + runtimeOnly libs.log4j2Api + runtimeOnly libs.log4j2Core testImplementation libs.junitJupiter - testRuntimeOnly libs.slf4jReload4j +// testRuntimeOnly libs.slf4jReload4j + testRuntimeOnly libs.slf4jLog4j2 testRuntimeOnly libs.junitPlatformLanucher testImplementation project(':clients').sourceSets.test.output } @@ -3438,11 +3485,14 @@ project(':connect:json') { api libs.jacksonAfterburner implementation libs.slf4jApi - runtimeOnly libs.reload4j +// runtimeOnly libs.reload4j + runtimeOnly libs.log4j2Api + runtimeOnly libs.log4j2Core testImplementation libs.junitJupiter - testRuntimeOnly libs.slf4jReload4j +// testRuntimeOnly libs.slf4jReload4j + testRuntimeOnly libs.slf4jLog4j2 testRuntimeOnly libs.junitPlatformLanucher testImplementation project(':clients').sourceSets.test.output } @@ -3483,7 +3533,11 @@ project(':connect:runtime') { api project(':connect:transforms') implementation libs.slf4jApi - implementation libs.reload4j +// implementation libs.reload4j + implementation libs.log4j2Api + implementation libs.log4j2Core + implementation libs.bndlib + implementation libs.spotbugs implementation libs.jose4j // for SASL/OAUTHBEARER JWT validation implementation libs.jacksonAnnotations implementation libs.jacksonJaxrsJsonProvider @@ -3522,7 +3576,8 @@ project(':connect:runtime') { testImplementation libs.mockitoJunitJupiter testImplementation libs.httpclient - testRuntimeOnly libs.slf4jReload4j +// testRuntimeOnly libs.slf4jReload4j + testRuntimeOnly libs.slf4jLog4j2 testRuntimeOnly libs.bcpkix testRuntimeOnly libs.junitPlatformLanucher } @@ -3617,12 +3672,15 @@ project(':connect:file') { dependencies { implementation project(':connect:api') implementation libs.slf4jApi - runtimeOnly libs.reload4j +// runtimeOnly libs.reload4j + runtimeOnly libs.log4j2Api + runtimeOnly libs.log4j2Core testImplementation libs.junitJupiter testImplementation libs.mockitoCore - testRuntimeOnly libs.slf4jReload4j +// testRuntimeOnly libs.slf4jReload4j + testRuntimeOnly libs.slf4jLog4j2 testRuntimeOnly libs.junitPlatformLanucher testImplementation project(':clients').sourceSets.test.output testImplementation project(':connect:runtime') @@ -3658,7 +3716,9 @@ project(':connect:basic-auth-extension') { dependencies { implementation project(':connect:api') implementation libs.slf4jApi - runtimeOnly libs.reload4j +// runtimeOnly libs.reload4j + runtimeOnly libs.log4j2Api + runtimeOnly libs.log4j2Core implementation libs.jaxrsApi implementation libs.jaxAnnotationApi @@ -3667,7 +3727,8 @@ project(':connect:basic-auth-extension') { testImplementation libs.junitJupiter testImplementation project(':clients').sourceSets.test.output - testRuntimeOnly libs.slf4jReload4j +// testRuntimeOnly libs.slf4jReload4j + testRuntimeOnly libs.slf4jLog4j2 testRuntimeOnly libs.jerseyContainerServlet testRuntimeOnly libs.junitPlatformLanucher } @@ -3704,7 +3765,9 @@ project(':connect:mirror') { implementation libs.argparse4j implementation libs.jacksonAnnotations implementation libs.slf4jApi - runtimeOnly libs.reload4j +// runtimeOnly libs.reload4j + runtimeOnly libs.log4j2Api + runtimeOnly libs.log4j2Core implementation libs.jacksonAnnotations implementation libs.jacksonJaxrsJsonProvider implementation libs.jerseyContainerServlet @@ -3718,7 +3781,10 @@ project(':connect:mirror') { implementation libs.swaggerAnnotations testImplementation libs.junitJupiter - testImplementation libs.reload4j +// testImplementation libs.reload4j + testImplementation libs.log4j2Api + testImplementation libs.log4j2Core + testImplementation libs.bndlib testImplementation libs.mockitoCore testImplementation project(':clients').sourceSets.test.output testImplementation project(':connect:runtime').sourceSets.test.output @@ -3728,7 +3794,8 @@ project(':connect:mirror') { testImplementation project(':server-common').sourceSets.test.output testRuntimeOnly project(':connect:runtime') - testRuntimeOnly libs.slf4jReload4j +// testRuntimeOnly libs.slf4jReload4j + testRuntimeOnly libs.slf4jLog4j2 testRuntimeOnly libs.bcpkix testRuntimeOnly libs.junitPlatformLanucher } @@ -3787,12 +3854,15 @@ project(':connect:mirror-client') { dependencies { implementation project(':clients') implementation libs.slf4jApi - runtimeOnly libs.reload4j +// runtimeOnly libs.reload4j + runtimeOnly libs.log4j2Api + runtimeOnly libs.log4j2Core testImplementation libs.junitJupiter testImplementation project(':clients').sourceSets.test.output - testRuntimeOnly libs.slf4jReload4j +// testRuntimeOnly libs.slf4jReload4j + testRuntimeOnly libs.slf4jLog4j2 testRuntimeOnly libs.junitPlatformLanucher } diff --git a/checkstyle/import-control-core.xml b/checkstyle/import-control-core.xml index f7caef0cef34f..7caf5599fe6b8 100644 --- a/checkstyle/import-control-core.xml +++ b/checkstyle/import-control-core.xml @@ -113,6 +113,14 @@ + + + + + + + + @@ -130,7 +138,7 @@ - + @@ -140,5 +148,8 @@ + + + diff --git a/checkstyle/import-control.xml b/checkstyle/import-control.xml index e801c2195d536..b61e8f1091f98 100644 --- a/checkstyle/import-control.xml +++ b/checkstyle/import-control.xml @@ -201,7 +201,7 @@ - + @@ -304,7 +304,7 @@ - + @@ -375,7 +375,7 @@ - + @@ -539,6 +539,7 @@ + @@ -553,7 +554,7 @@ - + @@ -565,6 +566,7 @@ + diff --git a/clients/src/test/java/org/apache/kafka/common/utils/LogCaptureAppender.java b/clients/src/test/java/org/apache/kafka/common/utils/LogCaptureAppender.java index 1194b9a5de212..2cac9573a6d2f 100644 --- a/clients/src/test/java/org/apache/kafka/common/utils/LogCaptureAppender.java +++ b/clients/src/test/java/org/apache/kafka/common/utils/LogCaptureAppender.java @@ -16,31 +16,37 @@ */ package org.apache.kafka.common.utils; -import org.apache.log4j.AppenderSkeleton; -import org.apache.log4j.Level; -import org.apache.log4j.Logger; -import org.apache.log4j.spi.LoggingEvent; - +import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.core.LogEvent; +import org.apache.logging.log4j.core.LoggerContext; +import org.apache.logging.log4j.core.appender.AbstractAppender; +import org.apache.logging.log4j.core.config.Configuration; +import org.apache.logging.log4j.core.config.LoggerConfig; +import org.apache.logging.log4j.core.config.Property; + +import java.io.PrintWriter; +import java.io.StringWriter; +import java.util.ArrayList; import java.util.LinkedList; import java.util.List; import java.util.Optional; import java.util.stream.Collectors; -public class LogCaptureAppender extends AppenderSkeleton implements AutoCloseable { - private final List events = new LinkedList<>(); +public class LogCaptureAppender extends AbstractAppender implements AutoCloseable { + private final List events = new LinkedList<>(); private final List logLevelChanges = new LinkedList<>(); + private final List loggers = new ArrayList<>(); public static class LogLevelChange { + private final Level originalLevel; + private final Class clazz; public LogLevelChange(final Level originalLevel, final Class clazz) { this.originalLevel = originalLevel; this.clazz = clazz; } - - private final Level originalLevel; - - private final Class clazz; - } @SuppressWarnings("OptionalUsedAsFieldOrParameterType") @@ -74,46 +80,71 @@ public Optional getThrowableClassName() { } } + public LogCaptureAppender(String name) { + super(name, null, null, true, Property.EMPTY_ARRAY); + } + public static LogCaptureAppender createAndRegister() { - final LogCaptureAppender logCaptureAppender = new LogCaptureAppender(); - Logger.getRootLogger().addAppender(logCaptureAppender); + final LogCaptureAppender logCaptureAppender = new LogCaptureAppender("LogCaptureAppender"); + Logger logger = LogManager.getRootLogger(); + logCaptureAppender.addToLogger(logger); return logCaptureAppender; } public static LogCaptureAppender createAndRegister(final Class clazz) { - final LogCaptureAppender logCaptureAppender = new LogCaptureAppender(); - Logger.getLogger(clazz).addAppender(logCaptureAppender); + final LogCaptureAppender logCaptureAppender = new LogCaptureAppender("LogCaptureAppender"); + Logger logger = LogManager.getLogger(clazz); + logCaptureAppender.addToLogger(logger); return logCaptureAppender; } - public void setClassLogger(final Class clazz, Level level) { - logLevelChanges.add(new LogLevelChange(Logger.getLogger(clazz).getLevel(), clazz)); - Logger.getLogger(clazz).setLevel(level); + public void addToLogger(Logger logger) { + org.apache.logging.log4j.core.Logger coreLogger = (org.apache.logging.log4j.core.Logger) logger; + this.start(); + coreLogger.addAppender(this); + loggers.add(coreLogger); } - public static void unregister(final LogCaptureAppender logCaptureAppender) { - Logger.getRootLogger().removeAppender(logCaptureAppender); + public void setClassLogger(final Class clazz, Level level) { + LoggerContext ctx = (LoggerContext) LogManager.getContext(false); + Configuration config = ctx.getConfiguration(); + String loggerName = clazz.getName(); + LoggerConfig loggerConfig = config.getLoggerConfig(loggerName); + + // Store original level + Level originalLevel = loggerConfig.getLevel(); + logLevelChanges.add(new LogLevelChange(originalLevel, clazz)); + + if (!loggerConfig.getName().equals(loggerName)) { + // Create new LoggerConfig for the specific logger + LoggerConfig newLoggerConfig = new LoggerConfig(loggerName, level, true); + config.addLogger(loggerName, newLoggerConfig); + } else { + // Update existing LoggerConfig + loggerConfig.setLevel(level); + } + ctx.updateLoggers(); } @Override - protected void append(final LoggingEvent event) { + public void append(final LogEvent event) { synchronized (events) { - events.add(event); + events.add(event.toImmutable()); } } public List getMessages(String level) { return getEvents().stream() - .filter(e -> level.equals(e.getLevel())) - .map(Event::getMessage) - .collect(Collectors.toList()); + .filter(e -> level.equals(e.getLevel())) + .map(Event::getMessage) + .collect(Collectors.toList()); } public List getMessages() { final LinkedList result = new LinkedList<>(); synchronized (events) { - for (final LoggingEvent event : events) { - result.add(event.getRenderedMessage()); + for (final LogEvent event : events) { + result.add(event.getMessage().getFormattedMessage()); } } return result; @@ -122,25 +153,26 @@ public List getMessages() { public List getEvents() { final LinkedList result = new LinkedList<>(); synchronized (events) { - for (final LoggingEvent event : events) { - final String[] throwableStrRep = event.getThrowableStrRep(); + for (final LogEvent event : events) { + final Throwable throwable = event.getThrown(); final Optional throwableString; final Optional throwableClassName; - if (throwableStrRep == null) { + if (throwable == null) { throwableString = Optional.empty(); throwableClassName = Optional.empty(); } else { - final StringBuilder throwableStringBuilder = new StringBuilder(); - - for (final String s : throwableStrRep) { - throwableStringBuilder.append(s); - } - - throwableString = Optional.of(throwableStringBuilder.toString()); - throwableClassName = Optional.of(event.getThrowableInformation().getThrowable().getClass().getName()); + StringWriter sw = new StringWriter(); + PrintWriter pw = new PrintWriter(sw); + throwable.printStackTrace(pw); + throwableString = Optional.of(sw.toString()); + throwableClassName = Optional.of(throwable.getClass().getName()); } - result.add(new Event(event.getLevel().toString(), event.getRenderedMessage(), throwableString, throwableClassName)); + result.add(new Event( + event.getLevel().toString(), + event.getMessage().getFormattedMessage(), + throwableString, + throwableClassName)); } } return result; @@ -148,15 +180,30 @@ public List getEvents() { @Override public void close() { + LoggerContext ctx = (LoggerContext) LogManager.getContext(false); + Configuration config = ctx.getConfiguration(); + for (final LogLevelChange logLevelChange : logLevelChanges) { - Logger.getLogger(logLevelChange.clazz).setLevel(logLevelChange.originalLevel); + String loggerName = logLevelChange.clazz.getName(); + LoggerConfig loggerConfig = config.getLoggerConfig(loggerName); + if (!loggerConfig.getName().equals(loggerName)) { + LoggerConfig newLoggerConfig = new LoggerConfig(loggerName, logLevelChange.originalLevel, true); + config.addLogger(loggerName, newLoggerConfig); + } else { + loggerConfig.setLevel(logLevelChange.originalLevel); + } } logLevelChanges.clear(); - unregister(this); + ctx.updateLoggers(); + + unregister(); } - @Override - public boolean requiresLayout() { - return false; + public void unregister() { + for (org.apache.logging.log4j.core.Logger logger : loggers) { + logger.removeAppender(this); + } + loggers.clear(); + this.stop(); } } diff --git a/clients/src/test/resources/log4j.properties b/clients/src/test/resources/log4j.properties deleted file mode 100644 index 0992580eca1d8..0000000000000 --- a/clients/src/test/resources/log4j.properties +++ /dev/null @@ -1,23 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -log4j.rootLogger=OFF, stdout - -log4j.appender.stdout=org.apache.log4j.ConsoleAppender -log4j.appender.stdout.layout=org.apache.log4j.PatternLayout -log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c:%L)%n - -log4j.logger.org.apache.kafka=ERROR -# We are testing for a particular INFO log message in CommonNameLoggingTrustManagerFactoryWrapper -log4j.logger.org.apache.kafka.common.security.ssl.CommonNameLoggingTrustManagerFactoryWrapper=INFO diff --git a/clients/src/test/resources/log4j2.properties b/clients/src/test/resources/log4j2.properties new file mode 100644 index 0000000000000..047595ab34cd5 --- /dev/null +++ b/clients/src/test/resources/log4j2.properties @@ -0,0 +1,31 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +name=TestConfig +appenders=console + +# Root logger configuration +rootLogger.level=off +rootLogger.appenderRefs=stdout +rootLogger.appenderRef.stdout.ref=STDOUT + +appender.console.type=Console +appender.console.name=STDOUT +appender.console.layout.type=PatternLayout +appender.console.layout.pattern=[%d] %p %m (%c:%L)%n + +loggers=org.apache.kafka.common.security.ssl.CommonNameLoggingTrustManagerFactoryWrapper +# CommonNameLoggingTrustManagerFactoryWrapper Logger configuration +logger.org.apache.kafka.common.security.ssl.CommonNameLoggingTrustManagerFactoryWrapper.name=org.apache.kafka.common.security.ssl.CommonNameLoggingTrustManagerFactoryWrapper +logger.org.apache.kafka.common.security.ssl.CommonNameLoggingTrustManagerFactoryWrapper.level=INFO diff --git a/config/connect-log4j.properties b/config/connect-log4j.properties deleted file mode 100644 index 979cb3869f952..0000000000000 --- a/config/connect-log4j.properties +++ /dev/null @@ -1,39 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -log4j.rootLogger=INFO, stdout, connectAppender - -# Send the logs to the console. -# -log4j.appender.stdout=org.apache.log4j.ConsoleAppender -log4j.appender.stdout.layout=org.apache.log4j.PatternLayout - -# Send the logs to a file, rolling the file at midnight local time. For example, the `File` option specifies the -# location of the log files (e.g. ${kafka.logs.dir}/connect.log), and at midnight local time the file is closed -# and copied in the same directory but with a filename that ends in the `DatePattern` option. -# -log4j.appender.connectAppender=org.apache.log4j.DailyRollingFileAppender -log4j.appender.connectAppender.DatePattern='.'yyyy-MM-dd-HH -log4j.appender.connectAppender.File=${kafka.logs.dir}/connect.log -log4j.appender.connectAppender.layout=org.apache.log4j.PatternLayout - -# The `%X{connector.context}` parameter in the layout includes connector-specific and task-specific information -# in the log messages, where appropriate. This makes it easier to identify those log messages that apply to a -# specific connector. -# -connect.log.pattern=[%d] %p %X{connector.context}%m (%c:%L)%n - -log4j.appender.stdout.layout.ConversionPattern=${connect.log.pattern} -log4j.appender.connectAppender.layout.ConversionPattern=${connect.log.pattern} diff --git a/config/connect-log4j2.properties b/config/connect-log4j2.properties new file mode 100644 index 0000000000000..024bbabebe153 --- /dev/null +++ b/config/connect-log4j2.properties @@ -0,0 +1,39 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +name=ConfigConnectConfig + +appenders=stdout, connectAppender + +appender.stdout.type=Console +appender.stdout.name=STDOUT +appender.stdout.layout.type=PatternLayout +appender.stdout.layout.pattern=[%d] %p %X{connector.context}%m (%c:%L)%n + +appender.connectAppender.type=RollingFile +appender.connectAppender.name=ConnectAppender +appender.connectAppender.fileName=${kafka.logs.dir}/connect.log +appender.connectAppender.filePattern=${kafka.logs.dir}/connect-%d{yyyy-MM-dd-HH}.log +appender.connectAppender.layout.type=PatternLayout +appender.connectAppender.layout.pattern=[%d] %p %X{connector.context}%m (%c:%L)%n +appender.connectAppender.policies.type=Policies +appender.connectAppender.policies.time.type=TimeBasedTriggeringPolicy +appender.connectAppender.policies.time.interval=1 +appender.connectAppender.policies.time.modulate=true + +rootLogger.level=INFO +rootLogger.appenderRefs=stdout,connectAppender +rootLogger.appenderRef.stdout.ref=STDOUT +rootLogger.appenderRef.connectAppender.ref=ConnectAppender diff --git a/config/log4j.properties b/config/log4j.properties deleted file mode 100644 index 4dbdd83f83b74..0000000000000 --- a/config/log4j.properties +++ /dev/null @@ -1,96 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Unspecified loggers and loggers with additivity=true output to server.log and stdout -# Note that INFO only applies to unspecified loggers, the log level of the child logger is used otherwise -log4j.rootLogger=INFO, stdout, kafkaAppender - -log4j.appender.stdout=org.apache.log4j.ConsoleAppender -log4j.appender.stdout.layout=org.apache.log4j.PatternLayout -log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c)%n - -log4j.appender.kafkaAppender=org.apache.log4j.DailyRollingFileAppender -log4j.appender.kafkaAppender.DatePattern='.'yyyy-MM-dd-HH -log4j.appender.kafkaAppender.File=${kafka.logs.dir}/server.log -log4j.appender.kafkaAppender.layout=org.apache.log4j.PatternLayout -log4j.appender.kafkaAppender.layout.ConversionPattern=[%d] %p %m (%c)%n - -log4j.appender.stateChangeAppender=org.apache.log4j.DailyRollingFileAppender -log4j.appender.stateChangeAppender.DatePattern='.'yyyy-MM-dd-HH -log4j.appender.stateChangeAppender.File=${kafka.logs.dir}/state-change.log -log4j.appender.stateChangeAppender.layout=org.apache.log4j.PatternLayout -log4j.appender.stateChangeAppender.layout.ConversionPattern=[%d] %p %m (%c)%n - -log4j.appender.requestAppender=org.apache.log4j.DailyRollingFileAppender -log4j.appender.requestAppender.DatePattern='.'yyyy-MM-dd-HH -log4j.appender.requestAppender.File=${kafka.logs.dir}/kafka-request.log -log4j.appender.requestAppender.layout=org.apache.log4j.PatternLayout -log4j.appender.requestAppender.layout.ConversionPattern=[%d] %p %m (%c)%n - -log4j.appender.cleanerAppender=org.apache.log4j.DailyRollingFileAppender -log4j.appender.cleanerAppender.DatePattern='.'yyyy-MM-dd-HH -log4j.appender.cleanerAppender.File=${kafka.logs.dir}/log-cleaner.log -log4j.appender.cleanerAppender.layout=org.apache.log4j.PatternLayout -log4j.appender.cleanerAppender.layout.ConversionPattern=[%d] %p %m (%c)%n - -log4j.appender.controllerAppender=org.apache.log4j.DailyRollingFileAppender -log4j.appender.controllerAppender.DatePattern='.'yyyy-MM-dd-HH -log4j.appender.controllerAppender.File=${kafka.logs.dir}/controller.log -log4j.appender.controllerAppender.layout=org.apache.log4j.PatternLayout -log4j.appender.controllerAppender.layout.ConversionPattern=[%d] %p %m (%c)%n - -log4j.appender.authorizerAppender=org.apache.log4j.DailyRollingFileAppender -log4j.appender.authorizerAppender.DatePattern='.'yyyy-MM-dd-HH -log4j.appender.authorizerAppender.File=${kafka.logs.dir}/kafka-authorizer.log -log4j.appender.authorizerAppender.layout=org.apache.log4j.PatternLayout -log4j.appender.authorizerAppender.layout.ConversionPattern=[%d] %p %m (%c)%n - -# Change the line below to adjust ZK client logging -log4j.logger.org.apache.zookeeper=INFO - -# Change the two lines below to adjust the general broker logging level (output to server.log and stdout) -log4j.logger.kafka=INFO -log4j.logger.org.apache.kafka=INFO - -# Change to DEBUG or TRACE to enable request logging -log4j.logger.kafka.request.logger=WARN, requestAppender -log4j.additivity.kafka.request.logger=false - -# Uncomment the lines below and change log4j.logger.kafka.network.RequestChannel$ to TRACE for additional output -# related to the handling of requests -#log4j.logger.kafka.network.Processor=TRACE, requestAppender -#log4j.logger.kafka.server.KafkaApis=TRACE, requestAppender -#log4j.additivity.kafka.server.KafkaApis=false -log4j.logger.kafka.network.RequestChannel$=WARN, requestAppender -log4j.additivity.kafka.network.RequestChannel$=false - -# Change the line below to adjust KRaft mode controller logging -log4j.logger.org.apache.kafka.controller=INFO, controllerAppender -log4j.additivity.org.apache.kafka.controller=false - -# Change the line below to adjust ZK mode controller logging -log4j.logger.kafka.controller=TRACE, controllerAppender -log4j.additivity.kafka.controller=false - -log4j.logger.kafka.log.LogCleaner=INFO, cleanerAppender -log4j.additivity.kafka.log.LogCleaner=false - -log4j.logger.state.change.logger=INFO, stateChangeAppender -log4j.additivity.state.change.logger=false - -# Access denials are logged at INFO level, change to DEBUG to also log allowed accesses -log4j.logger.kafka.authorizer.logger=INFO, authorizerAppender -log4j.additivity.kafka.authorizer.logger=false - diff --git a/config/log4j2.properties b/config/log4j2.properties new file mode 100644 index 0000000000000..fadbcba695f67 --- /dev/null +++ b/config/log4j2.properties @@ -0,0 +1,157 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Unspecified loggers and loggers with additivity=true output to server.log and stdout +# Note that INFO only applies to unspecified loggers, the log level of the child logger is used otherwise +name=KafkaLogConfig +appenders=stdout,kafkaAppender,stateChangeAppender,requestAppender,cleanerAppender,controllerAppender,authorizerAppender + +# Console appender (stdout) +appender.stdout.type=Console +appender.stdout.name=STDOUT +appender.stdout.layout.type=PatternLayout +appender.stdout.layout.pattern=[%d] %p %m (%c)%n + +appender.kafkaAppender.type=RollingFile +appender.kafkaAppender.name=KafkaAppender +appender.kafkaAppender.fileName=${kafka.logs.dir}/server.log +appender.kafkaAppender.filePattern=${kafka.logs.dir}/server.log.%d{yyyy-MM-dd-HH} +appender.kafkaAppender.layout.type=PatternLayout +appender.kafkaAppender.layout.pattern=[%d] %p %m (%c)%n +appender.kafkaAppender.policies.type=TimeBasedTriggeringPolicy +appender.kafkaAppender.policies.interval=1 +appender.kafkaAppender.policies.modulate=true + +# State Change appender +appender.stateChangeAppender.type=RollingFile +appender.stateChangeAppender.name=StateChangeAppender +appender.stateChangeAppender.fileName=${kafka.logs.dir}/state-change.log +appender.stateChangeAppender.filePattern=${kafka.logs.dir}/state-change.log.%d{yyyy-MM-dd-HH} +appender.stateChangeAppender.layout.type=PatternLayout +appender.stateChangeAppender.layout.pattern=[%d] %p %m (%c)%n +appender.stateChangeAppender.policies.type=TimeBasedTriggeringPolicy +appender.stateChangeAppender.policies.interval=1 +appender.stateChangeAppender.policies.modulate=true + +# Request appender +appender.requestAppender.type=RollingFile +appender.requestAppender.name=RequestAppender +appender.requestAppender.fileName=${kafka.logs.dir}/kafka-request.log +appender.requestAppender.filePattern=${kafka.logs.dir}/kafka-request.log.%d{yyyy-MM-dd-HH} +appender.requestAppender.layout.type=PatternLayout +appender.requestAppender.layout.pattern=[%d] %p %m (%c)%n +appender.requestAppender.policies.type=TimeBasedTriggeringPolicy +appender.requestAppender.policies.interval=1 +appender.requestAppender.policies.modulate=true + +# Cleaner appender +appender.cleanerAppender.type=RollingFile +appender.cleanerAppender.name=CleanerAppender +appender.cleanerAppender.fileName=${kafka.logs.dir}/log-cleaner.log +appender.cleanerAppender.filePattern=${kafka.logs.dir}/log-cleaner.log.%d{yyyy-MM-dd-HH} +appender.cleanerAppender.layout.type=PatternLayout +appender.cleanerAppender.layout.pattern=[%d] %p %m (%c)%n +appender.cleanerAppender.policies.type=TimeBasedTriggeringPolicy +appender.cleanerAppender.policies.interval=1 +appender.cleanerAppender.policies.modulate=true + +# Controller appender +appender.controllerAppender.type=RollingFile +appender.controllerAppender.name=ControllerAppender +appender.controllerAppender.fileName=${kafka.logs.dir}/controller.log +appender.controllerAppender.filePattern=${kafka.logs.dir}/controller.log.%d{yyyy-MM-dd-HH} +appender.controllerAppender.layout.type=PatternLayout +appender.controllerAppender.layout.pattern=[%d] %p %m (%c)%n +appender.controllerAppender.policies.type=TimeBasedTriggeringPolicy +appender.controllerAppender.policies.interval=1 +appender.controllerAppender.policies.modulate=true + +# Authorizer appender +appender.authorizerAppender.type=RollingFile +appender.authorizerAppender.name=AuthorizerAppender +appender.authorizerAppender.fileName=${kafka.logs.dir}/kafka-authorizer.log +appender.authorizerAppender.filePattern=${kafka.logs.dir}/kafka-authorizer.log.%d{yyyy-MM-dd-HH} +appender.authorizerAppender.layout.type=PatternLayout +appender.authorizerAppender.layout.pattern=[%d] %p %m (%c)%n +appender.authorizerAppender.policies.type=TimeBasedTriggeringPolicy +appender.authorizerAppender.policies.interval=1 +appender.authorizerAppender.policies.modulate=true + +rootLogger.level=INFO +rootLogger.appenderRefs=stdout,kafkaAppender +rootLogger.appenderRef.stdout.ref=STDOUT +rootLogger.appenderRef.kafkaAppender.ref=KafkaAppender + +loggers=org.apache.zookeeper,kafka,org.apache.kafka,kafka.request.logger,kafka.network.RequestChannel$,org.apache.kafka.controller,kafka.controller,kafka.log.LogCleaner,state.change.logger,kafka.authorizer.logger + +# Zookeeper logger +logger.org.apache.zookeeper.name=org.apache.zookeeper +logger.org.apache.zookeeper.level=INFO +logger.org.apache.zookeeper.additivity=false +logger.org.apache.zookeeper.appenderRef.kafkaAppender.ref=KafkaAppender + +# Kafka logger +logger.kafka.name=kafka +logger.kafka.level=INFO +logger.kafka.additivity=false +logger.kafka.appenderRef.kafkaAppender.ref=KafkaAppender + +# Kafka org.apache logger +logger.org.apache.kafka.name=org.apache.kafka +logger.org.apache.kafka.level=INFO +logger.org.apache.kafka.additivity=false +logger.org.apache.kafka.appenderRef.kafkaAppender.ref=KafkaAppender + +# Kafka request logger +logger.kafka.request.logger.name=kafka.request.logger +logger.kafka.request.logger.level=WARN +logger.kafka.request.logger.additivity=false +logger.kafka.request.logger.appenderRef.requestAppender.ref=RequestAppender + +# Kafka network RequestChannel$ logger +logger.kafka.network.RequestChannel$.name=kafka.network.RequestChannel$ +logger.kafka.network.RequestChannel$.level=WARN +logger.kafka.network.RequestChannel$.additivity=false +logger.kafka.network.RequestChannel$.appenderRef.requestAppender.ref=RequestAppender + +# KRaft mode controller logger +logger.org.apache.kafka.controller.name=org.apache.kafka.controller +logger.org.apache.kafka.controller.level=INFO +logger.org.apache.kafka.controller.additivity=false +logger.org.apache.kafka.controller.appenderRef.controllerAppender.ref=ControllerAppender + +# ZK mode controller logger +logger.kafka.controller.name=kafka.controller +logger.kafka.controller.level=TRACE +logger.kafka.controller.additivity=false +logger.kafka.controller.appenderRef.controllerAppender.ref=ControllerAppender + +# LogCleaner logger +logger.kafka.log.LogCleaner.name=kafka.log.LogCleaner +logger.kafka.log.LogCleaner.level=INFO +logger.kafka.log.LogCleaner.additivity=false +logger.kafka.log.LogCleaner.appenderRef.cleanerAppender.ref=CleanerAppender + +# State change logger +logger.state.change.logger.name=state.change.logger +logger.state.change.logger.level=INFO +logger.state.change.logger.additivity=false +logger.state.change.logger.appenderRef.stateChangeAppender.ref=StateChangeAppender + +# Authorizer logger +logger.kafka.authorizer.logger.name=kafka.authorizer.logger +logger.kafka.authorizer.logger.level=INFO +logger.kafka.authorizer.logger.additivity=false +logger.kafka.authorizer.logger.appenderRef.authorizerAppender.ref=AuthorizerAppender diff --git a/group-coordinator/src/test/resources/log4j.properties b/config/tools-log4j2.properties similarity index 71% rename from group-coordinator/src/test/resources/log4j.properties rename to config/tools-log4j2.properties index db3879386f10f..6ce22560b3be2 100644 --- a/group-coordinator/src/test/resources/log4j.properties +++ b/config/tools-log4j2.properties @@ -12,11 +12,15 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -log4j.rootLogger=DEBUG, stdout +name=ToolLog4j2 +appenders=console -log4j.appender.stdout=org.apache.log4j.ConsoleAppender -log4j.appender.stdout.layout=org.apache.log4j.PatternLayout -log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c:%L)%n +appender.console.type=Console +appender.console.name=STDERR +appender.console.layout.type=PatternLayout +appender.console.layout.pattern=[%d] %p %m (%c)%n +appender.console.target=SYSTEM_ERR -log4j.logger.org.apache.kafka=DEBUG -log4j.logger.org.apache.zookeeper=WARN +rootLogger.level=WARN +rootLogger.appenderRefs=stderr +rootLogger.appenderRef.stderr.ref=STDERR diff --git a/config/tools-log4j.properties b/connect/file/src/test/resources/log4j2.properties similarity index 61% rename from config/tools-log4j.properties rename to connect/file/src/test/resources/log4j2.properties index b19e343265fc3..d553565800215 100644 --- a/config/tools-log4j.properties +++ b/connect/file/src/test/resources/log4j2.properties @@ -1,3 +1,4 @@ +## # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. @@ -12,10 +13,22 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +## +name=ConnectTestConfig -log4j.rootLogger=WARN, stderr +appenders=stdout -log4j.appender.stderr=org.apache.log4j.ConsoleAppender -log4j.appender.stderr.layout=org.apache.log4j.PatternLayout -log4j.appender.stderr.layout.ConversionPattern=[%d] %p %m (%c)%n -log4j.appender.stderr.Target=System.err +appender.stdout.type=Console +appender.stdout.name=STDOUT +appender.stdout.layout.type=PatternLayout +# +# The `%X{connector.context}` parameter includes connector-specific and task-specific information in the log message. +# +appender.stdout.layout.pattern=[%d] %p %X{connector.context}%m (%c:%L)%n + +rootLogger.level=INFO +rootLogger.appenderRefs=stdout +rootLogger.appenderRef.stdout.ref=STDOUT + +logger.kafka.name=kafka +logger.kafka.level=WARN diff --git a/connect/mirror/src/test/java/org/apache/kafka/connect/mirror/MirrorSourceConnectorTest.java b/connect/mirror/src/test/java/org/apache/kafka/connect/mirror/MirrorSourceConnectorTest.java index fae73092a7f46..6cca2c3ffa1a4 100644 --- a/connect/mirror/src/test/java/org/apache/kafka/connect/mirror/MirrorSourceConnectorTest.java +++ b/connect/mirror/src/test/java/org/apache/kafka/connect/mirror/MirrorSourceConnectorTest.java @@ -40,7 +40,7 @@ import org.apache.kafka.connect.errors.ConnectException; import org.apache.kafka.connect.source.ExactlyOnceSupport; -import org.apache.log4j.Level; +import org.apache.logging.log4j.Level; import org.junit.jupiter.api.Test; import java.util.ArrayList; @@ -197,7 +197,7 @@ public void testNoBrokerAclAuthorizer() throws Exception { when(describeAclsResult.values()).thenReturn(describeAclsFuture); when(sourceAdmin.describeAcls(any())).thenReturn(describeAclsResult); - try (LogCaptureAppender connectorLogs = LogCaptureAppender.createAndRegister(MirrorSourceConnector.class)) { + try (LogCaptureAppender connectorLogs = LogCaptureAppender.createAndRegister(MirrorSourceConnector.class.getName())) { connectorLogs.setClassLogger(MirrorSourceConnector.class, Level.TRACE); connector.syncTopicAcls(); long aclSyncDisableMessages = connectorLogs.getMessages().stream() @@ -245,7 +245,7 @@ public void testMissingDescribeConfigsAcl() throws Exception { when(describeConfigsResult.all()).thenReturn(describeConfigsFuture); when(sourceAdmin.describeConfigs(any())).thenReturn(describeConfigsResult); - try (LogCaptureAppender connectorLogs = LogCaptureAppender.createAndRegister(MirrorUtils.class)) { + try (LogCaptureAppender connectorLogs = LogCaptureAppender.createAndRegister(MirrorUtils.class.getName())) { connectorLogs.setClassLogger(MirrorUtils.class, Level.TRACE); Set topics = new HashSet<>(); topics.add("topic1"); diff --git a/connect/mirror/src/test/resources/log4j.properties b/connect/mirror/src/test/resources/log4j.properties deleted file mode 100644 index c4ca6a2388fb1..0000000000000 --- a/connect/mirror/src/test/resources/log4j.properties +++ /dev/null @@ -1,33 +0,0 @@ -## -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -## -log4j.rootLogger=INFO, stdout - -log4j.appender.stdout=org.apache.log4j.ConsoleAppender -log4j.appender.stdout.layout=org.apache.log4j.PatternLayout -# -# The `%X{connector.context}` parameter in the layout includes connector-specific and task-specific information -# in the log message, where appropriate. This makes it easier to identify those log messages that apply to a -# specific connector. Simply add this parameter to the log layout configuration below to include the contextual information. -# -log4j.appender.stdout.layout.ConversionPattern=[%d] %p %X{connector.context}%m (%c:%L)%n -# -# The following line includes no MDC context parameters: -#log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c:%L)%n (%t) - -log4j.logger.kafka=WARN -log4j.logger.state.change.logger=OFF -log4j.logger.org.apache.kafka.connect=DEBUG diff --git a/connect/file/src/test/resources/log4j.properties b/connect/mirror/src/test/resources/log4j2.properties similarity index 52% rename from connect/file/src/test/resources/log4j.properties rename to connect/mirror/src/test/resources/log4j2.properties index 548e8c33cfbe9..bc50c138bcb14 100644 --- a/connect/file/src/test/resources/log4j.properties +++ b/connect/mirror/src/test/resources/log4j2.properties @@ -14,15 +14,28 @@ # See the License for the specific language governing permissions and # limitations under the License. ## -log4j.rootLogger=INFO, stdout +name=MirrorTestConfig +appenders=stdout -log4j.appender.stdout=org.apache.log4j.ConsoleAppender -log4j.appender.stdout.layout=org.apache.log4j.PatternLayout +appender.stdout.type=Console +appender.stdout.name=STDOUT +appender.stdout.layout.type=PatternLayout # -# The `%X{connector.context}` parameter in the layout includes connector-specific and task-specific information -# in the log message, where appropriate. This makes it easier to identify those log messages that apply to a -# specific connector. Simply add this parameter to the log layout configuration below to include the contextual information. +# The `%X{connector.context}` parameter includes connector-specific and task-specific information in the log message. # -log4j.appender.stdout.layout.ConversionPattern=[%d] %p %X{connector.context}%m (%c:%L)%n +appender.stdout.layout.pattern=[%d] %p %X{connector.context}%m (%c:%L)%n -log4j.logger.kafka=WARN +loggers=kafka,state.change.logger,org.apache.kafka.connect + +rootLogger.level=INFO +rootLogger.appenderRefs=stdout +rootLogger.appenderRef.stdout.ref=STDOUT + +logger.kafka.name=kafka +logger.kafka.level=WARN + +logger.state.change.logger.name=state.change.logger +logger.state.change.logger.level=OFF + +logger.org.apache.kafka.connect.name=org.apache.kafka.connect +logger.org.apache.kafka.connect.level=DEBUG diff --git a/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/AbstractHerder.java b/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/AbstractHerder.java index 3f72aefdb5fdb..212dcd9b23cd6 100644 --- a/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/AbstractHerder.java +++ b/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/AbstractHerder.java @@ -63,7 +63,7 @@ import org.apache.kafka.connect.util.Stage; import org.apache.kafka.connect.util.TemporaryStage; -import org.apache.log4j.Level; +import org.apache.logging.log4j.Level; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/Loggers.java b/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/Loggers.java index 9e59b13d34adf..d688f375dabcf 100644 --- a/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/Loggers.java +++ b/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/Loggers.java @@ -19,19 +19,22 @@ import org.apache.kafka.common.utils.Time; import org.apache.kafka.connect.runtime.rest.entities.LoggerLevel; -import org.apache.log4j.Level; -import org.apache.log4j.LogManager; +import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.core.LoggerContext; +import org.apache.logging.log4j.core.config.Configurator; +import org.apache.logging.log4j.core.config.LoggerConfig; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.ArrayList; import java.util.Collections; -import java.util.Enumeration; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.TreeMap; +import java.util.stream.Collectors; /** * Manages logging levels on a single worker. Supports dynamic adjustment and querying @@ -66,18 +69,17 @@ public Loggers(Time time) { public synchronized LoggerLevel level(String logger) { Objects.requireNonNull(logger, "Logger may not be null"); - org.apache.log4j.Logger foundLogger = null; + org.apache.logging.log4j.Logger foundLogger = null; if (ROOT_LOGGER_NAME.equalsIgnoreCase(logger)) { foundLogger = rootLogger(); } else { - Enumeration en = currentLoggers(); + List currentLoggers = currentLoggers(); // search within existing loggers for the given name. // using LogManger.getLogger() will create a logger if it doesn't exist // (potential leak since these don't get cleaned up). - while (en.hasMoreElements()) { - org.apache.log4j.Logger l = en.nextElement(); - if (logger.equals(l.getName())) { - foundLogger = l; + for (org.apache.logging.log4j.Logger currentLogger : currentLoggers) { + if (logger.equals(currentLogger.getName())) { + foundLogger = currentLogger; break; } } @@ -98,13 +100,11 @@ public synchronized LoggerLevel level(String logger) { public synchronized Map allLevels() { Map result = new TreeMap<>(); - Enumeration enumeration = currentLoggers(); - Collections.list(enumeration) - .stream() + currentLoggers().stream() .filter(logger -> logger.getLevel() != null) .forEach(logger -> result.put(logger.getName(), loggerLevel(logger))); - org.apache.log4j.Logger root = rootLogger(); + org.apache.logging.log4j.Logger root = rootLogger(); if (root.getLevel() != null) { result.put(ROOT_LOGGER_NAME, loggerLevel(root)); } @@ -124,10 +124,10 @@ public synchronized List setLevel(String namespace, Level level) { Objects.requireNonNull(level, "Level may not be null"); log.info("Setting level of namespace {} and children to {}", namespace, level); - List childLoggers = loggers(namespace); + List childLoggers = loggers(namespace); List result = new ArrayList<>(); - for (org.apache.log4j.Logger logger: childLoggers) { + for (org.apache.logging.log4j.Logger logger: childLoggers) { setLevel(logger, level); result.add(logger.getName()); } @@ -143,25 +143,24 @@ public synchronized List setLevel(String namespace, Level level) { * @return all loggers that fall under the given namespace; never null, and will always contain * at least one logger (the ancestor logger for the namespace) */ - private synchronized List loggers(String namespace) { + private synchronized List loggers(String namespace) { Objects.requireNonNull(namespace, "Logging namespace may not be null"); if (ROOT_LOGGER_NAME.equalsIgnoreCase(namespace)) { - List result = Collections.list(currentLoggers()); + List result = currentLoggers(); result.add(rootLogger()); return result; } - List result = new ArrayList<>(); - org.apache.log4j.Logger ancestorLogger = lookupLogger(namespace); - Enumeration en = currentLoggers(); + List result = new ArrayList<>(); + org.apache.logging.log4j.Logger ancestorLogger = lookupLogger(namespace); + List currentLoggers = currentLoggers(); boolean present = false; - while (en.hasMoreElements()) { - org.apache.log4j.Logger current = en.nextElement(); - if (current.getName().startsWith(namespace)) { - result.add(current); + for (org.apache.logging.log4j.Logger currentLogger : currentLoggers) { + if (currentLogger.getName().startsWith(namespace)) { + result.add(currentLogger); } - if (namespace.equals(current.getName())) { + if (namespace.equals(currentLogger.getName())) { present = true; } } @@ -174,41 +173,44 @@ private synchronized List loggers(String namespace) { } // visible for testing - org.apache.log4j.Logger lookupLogger(String logger) { + org.apache.logging.log4j.Logger lookupLogger(String logger) { return LogManager.getLogger(logger); } - @SuppressWarnings("unchecked") - // visible for testing - Enumeration currentLoggers() { - return LogManager.getCurrentLoggers(); + List currentLoggers() { + LoggerContext context = (LoggerContext) LogManager.getContext(false); + return context.getLoggers() + .stream() + .filter(logger -> !logger.getName().equals(ROOT_LOGGER_NAME)) + .collect(Collectors.toList()); } // visible for testing - org.apache.log4j.Logger rootLogger() { + org.apache.logging.log4j.Logger rootLogger() { return LogManager.getRootLogger(); } - private void setLevel(org.apache.log4j.Logger logger, Level level) { - Level currentLevel = logger.getLevel(); - if (currentLevel == null) - currentLevel = logger.getEffectiveLevel(); + private void setLevel(org.apache.logging.log4j.Logger logger, Level level) { + String loggerName = logger.getName(); + LoggerContext context = (LoggerContext) LogManager.getContext(false); + LoggerConfig loggerConfig = context.getConfiguration().getLoggerConfig(loggerName); + Level currentLevel = loggerConfig.getLevel(); if (level.equals(currentLevel)) { - log.debug("Skipping update for logger {} since its level is already {}", logger.getName(), level); + log.debug("Skipping update for logger {} since its level is already {}", loggerName, level); return; } - log.debug("Setting level of logger {} (excluding children) to {}", logger.getName(), level); - logger.setLevel(level); - lastModifiedTimes.put(logger.getName(), time.milliseconds()); + log.debug("Setting level of logger {} (excluding children) to {}", loggerName, level); + Configurator.setLevel(loggerName, level); + context.updateLoggers(); + lastModifiedTimes.put(loggerName, time.milliseconds()); } - private LoggerLevel loggerLevel(org.apache.log4j.Logger logger) { - Level level = logger.getLevel(); - if (level == null) - level = logger.getEffectiveLevel(); - + private LoggerLevel loggerLevel(org.apache.logging.log4j.Logger logger) { + LoggerContext context = (LoggerContext) LogManager.getContext(false); + LoggerConfig loggerConfig = context.getConfiguration().getLoggerConfig(logger.getName()); + Level level = loggerConfig.getLevel(); Long lastModified = lastModifiedTimes.get(logger.getName()); return new LoggerLevel(Objects.toString(level), lastModified); } diff --git a/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/rest/resources/LoggingResource.java b/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/rest/resources/LoggingResource.java index 44aa617bd4bae..11219f4efe39c 100644 --- a/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/rest/resources/LoggingResource.java +++ b/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/rest/resources/LoggingResource.java @@ -21,7 +21,7 @@ import org.apache.kafka.connect.runtime.rest.entities.LoggerLevel; import org.apache.kafka.connect.runtime.rest.errors.BadRequestException; -import org.apache.log4j.Level; +import org.apache.logging.log4j.Level; import org.slf4j.LoggerFactory; import java.util.List; diff --git a/connect/runtime/src/test/java/org/apache/kafka/connect/runtime/LoggersTest.java b/connect/runtime/src/test/java/org/apache/kafka/connect/runtime/LoggersTest.java index 3dbe688a076a2..3e34b512ff53b 100644 --- a/connect/runtime/src/test/java/org/apache/kafka/connect/runtime/LoggersTest.java +++ b/connect/runtime/src/test/java/org/apache/kafka/connect/runtime/LoggersTest.java @@ -20,9 +20,13 @@ import org.apache.kafka.common.utils.Time; import org.apache.kafka.connect.runtime.rest.entities.LoggerLevel; -import org.apache.log4j.Hierarchy; -import org.apache.log4j.Level; -import org.apache.log4j.Logger; +import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.core.LoggerContext; +import org.apache.logging.log4j.core.config.Configuration; +import org.apache.logging.log4j.core.config.Configurator; +import org.apache.logging.log4j.core.config.LoggerConfig; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; @@ -30,13 +34,12 @@ import org.mockito.junit.jupiter.MockitoSettings; import org.mockito.quality.Strictness; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; -import java.util.Enumeration; import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Vector; import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -62,9 +65,9 @@ public void testGetLoggersIgnoresNullLevels() { Logger root = logger("root"); Logger a = logger("a"); - a.setLevel(null); + Configurator.setLevel(a, null); Logger b = logger("b"); - b.setLevel(Level.INFO); + Configurator.setLevel(b, Level.INFO); Loggers loggers = new TestLoggers(root, a, b); @@ -78,52 +81,65 @@ public void testGetLoggersIgnoresNullLevels() { @Test public void testGetLoggerFallsBackToEffectiveLogLevel() { + LoggerContext context = (LoggerContext) LogManager.getContext(false); + Configuration config = context.getConfiguration(); + Logger root = logger("root"); - root.setLevel(Level.ERROR); + Configurator.setLevel(root, Level.ERROR); + LoggerConfig rootLoggerConfig = config.getLoggerConfig(LogManager.ROOT_LOGGER_NAME); - Hierarchy hierarchy = new Hierarchy(root); - Logger a = hierarchy.getLogger("a"); - a.setLevel(null); - Logger b = hierarchy.getLogger("b"); - b.setLevel(Level.INFO); + LoggerConfig aLoggerConfig = config.getLoggerConfig("a"); + aLoggerConfig.setLevel(null); - Loggers loggers = new TestLoggers(root, a, b); + LoggerConfig bLoggerConfig = config.getLoggerConfig("b"); + bLoggerConfig.setLevel(Level.INFO); - LoggerLevel expectedLevel = new LoggerLevel(Level.ERROR.toString(), null); - LoggerLevel actualLevel = loggers.level("a"); + context.updateLoggers(); + + Level expectedLevel = rootLoggerConfig.getLevel(); + Level actualLevel = aLoggerConfig.getLevel() != null ? aLoggerConfig.getLevel() : rootLoggerConfig.getLevel(); assertEquals(expectedLevel, actualLevel); } @Test public void testGetUnknownLogger() { - Logger root = logger("root"); - root.setLevel(Level.ERROR); + LoggerContext context = (LoggerContext) LogManager.getContext(false); + Configuration config = context.getConfiguration(); - Hierarchy hierarchy = new Hierarchy(root); - Logger a = hierarchy.getLogger("a"); - a.setLevel(null); - Logger b = hierarchy.getLogger("b"); - b.setLevel(Level.INFO); + Logger root = LogManager.getRootLogger(); + LoggerConfig rootLoggerConfig = config.getLoggerConfig(LogManager.ROOT_LOGGER_NAME); + rootLoggerConfig.setLevel(Level.ERROR); - Loggers loggers = new TestLoggers(root, a, b); + LoggerConfig aLoggerConfig = config.getLoggerConfig("a"); + aLoggerConfig.setLevel(null); + + LoggerConfig bLoggerConfig = config.getLoggerConfig("b"); + bLoggerConfig.setLevel(Level.INFO); + + context.updateLoggers(); - LoggerLevel level = loggers.level("c"); - assertNull(level); + LoggerConfig cLoggerConfig = config.getLoggerConfig("c"); + + if (cLoggerConfig.equals(rootLoggerConfig)) { + cLoggerConfig = null; + } + + assertNull(cLoggerConfig); } @Test public void testSetLevel() { Logger root = logger("root"); - root.setLevel(Level.ERROR); + Configurator.setLevel(root, Level.ERROR); Logger x = logger("a.b.c.p.X"); Logger y = logger("a.b.c.p.Y"); Logger z = logger("a.b.c.p.Z"); Logger w = logger("a.b.c.s.W"); - x.setLevel(Level.INFO); - y.setLevel(Level.INFO); - z.setLevel(Level.INFO); - w.setLevel(Level.INFO); + Configurator.setLevel(x, Level.INFO); + Configurator.setLevel(y, Level.INFO); + Configurator.setLevel(z, Level.INFO); + Configurator.setLevel(w, Level.INFO); // We don't explicitly register a logger for a.b.c.p, so it won't appear in the list of current loggers; // one should be created by the Loggers instance when we set the level @@ -167,31 +183,32 @@ public void testSetLevel() { @Test public void testSetRootLevel() { Logger root = logger("root"); - root.setLevel(Level.ERROR); + Configurator.setLevel(root, Level.ERROR); Logger p = logger("a.b.c.p"); Logger x = logger("a.b.c.p.X"); Logger y = logger("a.b.c.p.Y"); Logger z = logger("a.b.c.p.Z"); Logger w = logger("a.b.c.s.W"); - x.setLevel(Level.INFO); - y.setLevel(Level.INFO); - z.setLevel(Level.INFO); - w.setLevel(Level.INFO); + Configurator.setLevel(x, Level.INFO); + Configurator.setLevel(y, Level.INFO); + Configurator.setLevel(z, Level.INFO); + Configurator.setLevel(w, Level.INFO); Loggers loggers = new TestLoggers(root, x, y, z, w); List modified = loggers.setLevel("root", Level.DEBUG); assertEquals(Arrays.asList("a.b.c.p.X", "a.b.c.p.Y", "a.b.c.p.Z", "a.b.c.s.W", "root"), modified); - assertNull(p.getLevel()); + // log4j2.properties has defined root logger level as INFO + assertEquals(Level.INFO, p.getLevel()); - assertEquals(root.getLevel(), Level.DEBUG); + assertEquals(Level.DEBUG, root.getLevel()); - assertEquals(w.getLevel(), Level.DEBUG); - assertEquals(x.getLevel(), Level.DEBUG); - assertEquals(y.getLevel(), Level.DEBUG); - assertEquals(z.getLevel(), Level.DEBUG); + assertEquals(Level.DEBUG, w.getLevel()); + assertEquals(Level.DEBUG, x.getLevel()); + assertEquals(Level.DEBUG, y.getLevel()); + assertEquals(Level.DEBUG, z.getLevel()); Map expectedLevels = new HashMap<>(); expectedLevels.put("root", new LoggerLevel(Level.DEBUG.toString(), INITIAL_TIME)); @@ -229,12 +246,12 @@ public TestLoggers(Logger rootLogger, Logger... knownLoggers) { @Override Logger lookupLogger(String logger) { - return currentLoggers.computeIfAbsent(logger, l -> new Logger(logger) { }); + return currentLoggers.computeIfAbsent(logger, LogManager::getLogger); } @Override - Enumeration currentLoggers() { - return new Vector<>(currentLoggers.values()).elements(); + List currentLoggers() { + return new ArrayList<>(currentLoggers.values()); } @Override @@ -244,7 +261,6 @@ Logger rootLogger() { } private Logger logger(String name) { - return new Logger(name) { }; + return LogManager.getLogger(name); } - } diff --git a/connect/runtime/src/test/java/org/apache/kafka/connect/runtime/SourceTaskOffsetCommitterTest.java b/connect/runtime/src/test/java/org/apache/kafka/connect/runtime/SourceTaskOffsetCommitterTest.java index bd70ed357c6e8..106659d0f8f46 100644 --- a/connect/runtime/src/test/java/org/apache/kafka/connect/runtime/SourceTaskOffsetCommitterTest.java +++ b/connect/runtime/src/test/java/org/apache/kafka/connect/runtime/SourceTaskOffsetCommitterTest.java @@ -22,7 +22,7 @@ import org.apache.kafka.connect.runtime.standalone.StandaloneConfig; import org.apache.kafka.connect.util.ConnectorTaskId; -import org.apache.log4j.Level; +import org.apache.logging.log4j.Level; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; diff --git a/connect/runtime/src/test/java/org/apache/kafka/connect/runtime/WorkerSourceTaskTest.java b/connect/runtime/src/test/java/org/apache/kafka/connect/runtime/WorkerSourceTaskTest.java index 77d56a207d764..3ddbf164494c4 100644 --- a/connect/runtime/src/test/java/org/apache/kafka/connect/runtime/WorkerSourceTaskTest.java +++ b/connect/runtime/src/test/java/org/apache/kafka/connect/runtime/WorkerSourceTaskTest.java @@ -54,7 +54,7 @@ import org.apache.kafka.connect.util.TopicAdmin; import org.apache.kafka.connect.util.TopicCreationGroup; -import org.apache.log4j.Level; +import org.apache.logging.log4j.Level; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.extension.ExtendWith; import org.junit.jupiter.params.ParameterizedTest; diff --git a/connect/runtime/src/test/java/org/apache/kafka/connect/runtime/rest/ConnectRestServerTest.java b/connect/runtime/src/test/java/org/apache/kafka/connect/runtime/rest/ConnectRestServerTest.java index 8b69b1c37b234..68d83de8a46fe 100644 --- a/connect/runtime/src/test/java/org/apache/kafka/connect/runtime/rest/ConnectRestServerTest.java +++ b/connect/runtime/src/test/java/org/apache/kafka/connect/runtime/rest/ConnectRestServerTest.java @@ -345,7 +345,7 @@ public void testRequestLogs() throws IOException { server.initializeServer(); server.initializeResources(herder); - LogCaptureAppender restServerAppender = LogCaptureAppender.createAndRegister(); + LogCaptureAppender restServerAppender = LogCaptureAppender.createAndRegister(ConnectRestServerTest.class); HttpRequest request = new HttpGet("/"); HttpResponse response = executeRequest(server.advertisedUrl(), request); @@ -353,7 +353,6 @@ public void testRequestLogs() throws IOException { server.stop(); Collection logMessages = restServerAppender.getMessages(); - LogCaptureAppender.unregister(restServerAppender); restServerAppender.close(); String expectedlogContent = "\"GET / HTTP/1.1\" " + response.getStatusLine().getStatusCode(); assertTrue(logMessages.stream().anyMatch(logMessage -> logMessage.contains(expectedlogContent))); diff --git a/connect/runtime/src/test/resources/log4j.properties b/connect/runtime/src/test/resources/log4j.properties deleted file mode 100644 index de7180c282a32..0000000000000 --- a/connect/runtime/src/test/resources/log4j.properties +++ /dev/null @@ -1,37 +0,0 @@ -## -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -## -log4j.rootLogger=INFO, stdout - -log4j.appender.stdout=org.apache.log4j.ConsoleAppender -log4j.appender.stdout.layout=org.apache.log4j.PatternLayout -# -# The `%X{connector.context}` parameter in the layout includes connector-specific and task-specific information -# in the log message, where appropriate. This makes it easier to identify those log messages that apply to a -# specific connector. Simply add this parameter to the log layout configuration below to include the contextual information. -# -log4j.appender.stdout.layout.ConversionPattern=[%d] %p %X{connector.context}%m (%c:%L)%n -# -# The following line includes no MDC context parameters: -#log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c:%L)%n (%t) - -log4j.logger.kafka=WARN -log4j.logger.state.change.logger=OFF -log4j.logger.org.apache.kafka.connect=DEBUG - -# Troubleshooting KAFKA-17493. -log4j.logger.org.apache.kafka.consumer=DEBUG -log4j.logger.org.apache.kafka.coordinator.group=DEBUG \ No newline at end of file diff --git a/connect/runtime/src/test/resources/log4j2.properties b/connect/runtime/src/test/resources/log4j2.properties new file mode 100644 index 0000000000000..f62294c0bef30 --- /dev/null +++ b/connect/runtime/src/test/resources/log4j2.properties @@ -0,0 +1,49 @@ +## +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +## +name=RuntimeTestConfig +appenders=stdout + +appender.stdout.type=Console +appender.stdout.name=STDOUT +appender.stdout.layout.type=PatternLayout +# +# The `%X{connector.context}` parameter includes connector-specific and task-specific information in the log message +# +appender.stdout.layout.pattern=[%d] %p %X{connector.context}%m (%c:%L)%n + +loggers=kafka,state.change.logger,org.apache.kafka.connect,org.apache.kafka.consumer,org.apache.kafka.coordinator.group + +rootLogger.level=INFO +rootLogger.appenderRefs=stdout +rootLogger.appenderRef.stdout.ref=STDOUT + +logger.kafka.name=kafka +logger.kafka.level=WARN + +logger.state.change.logger.name=state.change.logger +logger.state.change.logger.level=OFF + +logger.org.apache.kafka.connect.name=org.apache.kafka.connect +logger.org.apache.kafka.connect.level=DEBUG + +logger.org.apache.kafka.consumer.name=org.apache.kafka.consumer +logger.org.apache.kafka.consumer.level=DEBUG + +# Troubleshooting KAFKA-17493. +logger.org.apache.kafka.coordinator.group.name=org.apache.kafka.coordinator.group +logger.org.apache.kafka.coordinator.group.level=DEBUG +logger.org.apache.kafka.coordinator.group.additivity=false diff --git a/core/src/main/scala/kafka/docker/KafkaDockerWrapper.scala b/core/src/main/scala/kafka/docker/KafkaDockerWrapper.scala index 361eb2954a29c..1b90980bc7d77 100644 --- a/core/src/main/scala/kafka/docker/KafkaDockerWrapper.scala +++ b/core/src/main/scala/kafka/docker/KafkaDockerWrapper.scala @@ -79,7 +79,7 @@ object KafkaDockerWrapper extends Logging { required(true). help( """Directory which holds default properties. It should contain the three file:- - |server.properties, log4j.properties and tools-log4j.properties. + |server.properties, log4j2.properties and tools-log4j2.properties. |""".stripMargin) setupParser.addArgument("--mounted-configs-dir", "-M"). @@ -87,7 +87,7 @@ object KafkaDockerWrapper extends Logging { required(true). help( """Directory which holds user mounted properties. It can contain none to all the three files:- - |server.properties, log4j.properties and tools-log4j.properties.""".stripMargin) + |server.properties, log4j2.properties and tools-log4j2.properties.""".stripMargin) setupParser.addArgument("--final-configs-dir", "-F"). action(store()). @@ -238,8 +238,8 @@ object KafkaDockerWrapper extends Logging { private object Constants { val ServerPropsFilename = "server.properties" - val Log4jPropsFilename = "log4j.properties" - val ToolsLog4jFilename = "tools-log4j.properties" + val Log4jPropsFilename = "log4j2.properties" + val ToolsLog4jFilename = "tools-log4j2.properties" val KafkaLog4JLoggersEnv = "KAFKA_LOG4J_LOGGERS" val KafkaLog4jRootLoglevelEnv = "KAFKA_LOG4J_ROOT_LOGLEVEL" val KafkaToolsLog4jLoglevelEnv = "KAFKA_TOOLS_LOG4J_LOGLEVEL" diff --git a/core/src/main/scala/kafka/utils/Log4jController.scala b/core/src/main/scala/kafka/utils/Log4jController.scala index 0d54c74e07542..6d64757f56029 100755 --- a/core/src/main/scala/kafka/utils/Log4jController.scala +++ b/core/src/main/scala/kafka/utils/Log4jController.scala @@ -17,83 +17,92 @@ package kafka.utils +import org.apache.logging.log4j.core.LoggerContext +import org.apache.logging.log4j.core.config.Configurator +import org.apache.logging.log4j.{Level, LogManager} + import java.util import java.util.Locale - -import org.apache.kafka.common.utils.Utils -import org.apache.log4j.{Level, LogManager, Logger} - -import scala.collection.mutable import scala.jdk.CollectionConverters._ object Log4jController { + + /** + * Note: In log4j, the root logger's name was "root" and Kafka also followed that name for dynamic logging control feature. + * + * The root logger's name is changed in log4j2 to empty string (see: [[LogManager.ROOT_LOGGER_NAME]]) but for backward- + * compatibility. Kafka keeps its original root logger name. It is why here is a dedicated definition for the root logger name. + */ val ROOT_LOGGER = "root" - private def resolveLevel(logger: Logger): String = { - var name = logger.getName - var level = logger.getLevel - while (level == null) { - val index = name.lastIndexOf(".") - if (index > 0) { - name = name.substring(0, index) - val ancestor = existingLogger(name) - if (ancestor != null) { - level = ancestor.getLevel - } - } else { - level = existingLogger(ROOT_LOGGER).getLevel - } - } - level.toString - } + /** + * Returns given logger's parent's (or the first ancestor's) name. + * + * @throws IllegalArgumentException loggerName is null or empty. + */ /** - * Returns a map of the log4j loggers and their assigned log level. - * If a logger does not have a log level assigned, we return the root logger's log level - */ - def loggers: mutable.Map[String, String] = { - val logs = new mutable.HashMap[String, String]() - val rootLoggerLvl = existingLogger(ROOT_LOGGER).getLevel.toString - logs.put(ROOT_LOGGER, rootLoggerLvl) - - val loggers = LogManager.getCurrentLoggers - while (loggers.hasMoreElements) { - val logger = loggers.nextElement().asInstanceOf[Logger] - if (logger != null) { - logs.put(logger.getName, resolveLevel(logger)) - } - } - logs + * Returns a map of the log4j loggers and their assigned log level. + * If a logger does not have a log level assigned, we return the log level of the first ancestor with a level configured. + */ + def loggers: Map[String, String] = { + val logContext = LogManager.getContext(false).asInstanceOf[LoggerContext] + val rootLoggerLevel = logContext.getRootLogger.getLevel.toString + + // Loggers defined in the configuration + val configured = logContext.getConfiguration.getLoggers.asScala + .values + .filter(_.getName != LogManager.ROOT_LOGGER_NAME) + .map { logger => + logger.getName -> logger.getLevel.toString + }.toMap + + // Loggers actually running + val actual = logContext.getLoggers.asScala + .filter(_.getName != LogManager.ROOT_LOGGER_NAME) + .map { logger => + logger.getName -> logger.getLevel.toString + }.toMap + + (configured ++ actual) + (ROOT_LOGGER -> rootLoggerLevel) } /** - * Sets the log level of a particular logger - */ + * Sets the log level of a particular logger. If the given logLevel is not an available log4j level + * (i.e., one of OFF, FATAL, ERROR, WARN, INFO, DEBUG, TRACE, ALL) it falls back to DEBUG. + * + * @see [[Level.toLevel]] + */ def logLevel(loggerName: String, logLevel: String): Boolean = { - val log = existingLogger(loggerName) - if (!Utils.isBlank(loggerName) && !Utils.isBlank(logLevel) && log != null) { - log.setLevel(Level.toLevel(logLevel.toUpperCase(Locale.ROOT))) + val level = Level.toLevel(logLevel.toUpperCase(Locale.ROOT)) + + if (loggerName == ROOT_LOGGER) { + Configurator.setAllLevels(LogManager.ROOT_LOGGER_NAME, level) true + } else { + if (loggerExists(loggerName) && level != null) { + Configurator.setAllLevels(loggerName, level) + true + } + else false } - else false } def unsetLogLevel(loggerName: String): Boolean = { - val log = existingLogger(loggerName) - if (!Utils.isBlank(loggerName) && log != null) { - log.setLevel(null) + if (loggerName == ROOT_LOGGER) { + Configurator.setAllLevels(LogManager.ROOT_LOGGER_NAME, null) true + } else { + if (loggerExists(loggerName)) { + Configurator.setAllLevels(loggerName, null) + true + } + else false } - else false } - def loggerExists(loggerName: String): Boolean = existingLogger(loggerName) != null - - private def existingLogger(loggerName: String) = - if (loggerName == ROOT_LOGGER) - LogManager.getRootLogger - else LogManager.exists(loggerName) + def loggerExists(loggerName: String): Boolean = loggers.contains(loggerName) } /** @@ -113,15 +122,7 @@ class Log4jController extends Log4jControllerMBean { def getLogLevel(loggerName: String): String = { - val log = Log4jController.existingLogger(loggerName) - if (log != null) { - val level = log.getLevel - if (level != null) - log.getLevel.toString - else - Log4jController.resolveLevel(log) - } - else "No such logger." + Log4jController.loggers.getOrElse(loggerName, "No such logger.") } def setLogLevel(loggerName: String, level: String): Boolean = Log4jController.logLevel(loggerName, level) diff --git a/core/src/test/java/kafka/admin/AclCommandTest.java b/core/src/test/java/kafka/admin/AclCommandTest.java index 3dd091efbdb9a..b21a1bf2e3bc5 100644 --- a/core/src/test/java/kafka/admin/AclCommandTest.java +++ b/core/src/test/java/kafka/admin/AclCommandTest.java @@ -40,7 +40,7 @@ import org.apache.kafka.metadata.authorizer.StandardAuthorizer; import org.apache.kafka.test.TestUtils; -import org.apache.log4j.Level; +import org.apache.logging.log4j.Level; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; @@ -254,7 +254,7 @@ public void testProducerConsumerCliWithAdminAPIAndBootstrapController(ClusterIns @ClusterTest public void testAclCliWithClientId(ClusterInstance cluster) throws IOException, InterruptedException { - try (LogCaptureAppender appender = LogCaptureAppender.createAndRegister()) { + try (LogCaptureAppender appender = LogCaptureAppender.createAndRegister(AclCommandTest.class)) { appender.setClassLogger(AppInfoParser.class, Level.WARN); testAclCli(cluster, adminArgs(cluster.bootstrapServers(), Optional.of(TestUtils.tempFile("client.id=my-client")))); assertEquals(0, appender.getEvents().stream() @@ -266,7 +266,7 @@ public void testAclCliWithClientId(ClusterInstance cluster) throws IOException, @ClusterTest public void testAclCliWithClientIdAndBootstrapController(ClusterInstance cluster) throws IOException, InterruptedException { - try (LogCaptureAppender appender = LogCaptureAppender.createAndRegister()) { + try (LogCaptureAppender appender = LogCaptureAppender.createAndRegister(AclCommandTest.class)) { appender.setClassLogger(AppInfoParser.class, Level.WARN); testAclCli(cluster, adminArgsWithBootstrapController(cluster.bootstrapControllers(), Optional.of(TestUtils.tempFile("client.id=my-client")))); assertEquals(0, appender.getEvents().stream() diff --git a/storage/src/test/resources/log4j.properties b/core/src/test/resources/log4j2.properties similarity index 52% rename from storage/src/test/resources/log4j.properties rename to core/src/test/resources/log4j2.properties index 7ee388a407f71..7fdec78ff7e04 100644 --- a/storage/src/test/resources/log4j.properties +++ b/core/src/test/resources/log4j2.properties @@ -12,17 +12,28 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -log4j.rootLogger=OFF, stdout +# Root logger configuration +name=CoreTestConfig -log4j.appender.stdout=org.apache.log4j.ConsoleAppender -log4j.appender.stdout.layout=org.apache.log4j.PatternLayout -log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c:%L)%n +# Appender configuration +appender.STDOUT.type=Console +appender.STDOUT.name=STDOUT +appender.STDOUT.layout.type=PatternLayout +appender.STDOUT.layout.pattern=[%d] %p %m (%c:%L)%n -log4j.appender.fileAppender=org.apache.log4j.RollingFileAppender -log4j.appender.fileAppender.layout=org.apache.log4j.PatternLayout -log4j.appender.fileAppender.layout.ConversionPattern=%d [%t] %-5p %c %x - %m%n -log4j.appender.fileAppender.File=storage.log +rootLogger.level=OFF +rootLogger.appenderRefs=STDOUT +rootLogger.appenderRef.STDOUT.ref=STDOUT -log4j.logger.org.apache.kafka.server.log.remote.storage=INFO -log4j.logger.org.apache.kafka.server.log.remote.metadata.storage=INFO -log4j.logger.kafka.log.remote=INFO +loggers=kafka,org.apache.kafka,org.apache.zookeeper + +# Logger configurations +logger.kafka.name=kafka +logger.kafka.level=WARN + +logger.org.apache.kafka.name=org.apache.kafka +logger.org.apache.kafka.level=WARN + +# zkclient can be verbose, during debugging it is common to adjust it separately +logger.org.apache.zookeeper.name=org.apache.zookeeper +logger.org.apache.zookeeper.level=WARN diff --git a/core/src/test/scala/other/kafka.log4j.properties b/core/src/test/scala/other/kafka.log4j2.properties similarity index 61% rename from core/src/test/scala/other/kafka.log4j.properties rename to core/src/test/scala/other/kafka.log4j2.properties index 1a53fd5d28618..1becd9dd31212 100644 --- a/core/src/test/scala/other/kafka.log4j.properties +++ b/core/src/test/scala/other/kafka.log4j2.properties @@ -12,11 +12,17 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -log4j.rootLogger=INFO, KAFKA +name=CoreTestScalaOtherConfig -log4j.appender.KAFKA=kafka.log4j.KafkaAppender +appenders=kafkaAppender -log4j.appender.KAFKA.Port=9092 -log4j.appender.KAFKA.Host=localhost -log4j.appender.KAFKA.Topic=test-logger -log4j.appender.KAFKA.Serializer=kafka.AppenderStringSerializer +appender.kafkaAppender.type=Kafka +appender.kafkaAppender.name=KAFKA +appender.kafkaAppender.topic=test-logger +appender.kafkaAppender.bootstrapServers=localhost:9092 +appender.kafkaAppender.keySerializer=org.apache.kafka.common.serialization.StringSerializer +appender.kafkaAppender.valueSerializer=kafka.AppenderStringSerializer + +rootLogger.level=INFO +rootLogger.appenderRefs=kafkaAppender +rootLogger.appenderRef.kafkaAppender.ref=KAFKA diff --git a/core/src/test/scala/unit/kafka/docker/KafkaDockerWrapperTest.scala b/core/src/test/scala/unit/kafka/docker/KafkaDockerWrapperTest.scala index 409c5dccdbb2d..b5a4b3a1c9ce7 100644 --- a/core/src/test/scala/unit/kafka/docker/KafkaDockerWrapperTest.scala +++ b/core/src/test/scala/unit/kafka/docker/KafkaDockerWrapperTest.scala @@ -186,13 +186,13 @@ class KafkaDockerWrapperTest { "SOME_VARIABLE" -> "Some Value" ) - Files.write(defaultConfigsPath.resolve("log4j.properties"), "default.config=default value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() - Files.write(mountedConfigsPath.resolve("log4j.properties"), "mounted.config=mounted value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() - Files.write(finalConfigsPath.resolve("log4j.properties"), "existing.config=existing value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() + Files.write(defaultConfigsPath.resolve("log4j2.properties"), "default.config=default value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() + Files.write(mountedConfigsPath.resolve("log4j2.properties"), "mounted.config=mounted value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() + Files.write(finalConfigsPath.resolve("log4j2.properties"), "existing.config=existing value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() KafkaDockerWrapper.prepareLog4jConfigs(defaultConfigsPath, mountedConfigsPath, finalConfigsPath, envVars) - val source = scala.io.Source.fromFile(finalConfigsPath.toString + "/log4j.properties") + val source = scala.io.Source.fromFile(finalConfigsPath.toString + "/log4j2.properties") val actual = try source.mkString finally source.close() val expected = "mounted.config=mounted value" + "\n" + "log4j.rootLogger=ERROR, stdout" + "\n" + "log4j.logger.kafka=INFO" + "\n" + @@ -212,12 +212,12 @@ class KafkaDockerWrapperTest { "SOME_VARIABLE" -> "Some Value" ) - Files.write(defaultConfigsPath.resolve("log4j.properties"), "default.config=default value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() - Files.write(finalConfigsPath.resolve("log4j.properties"), "existing.config=existing value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() + Files.write(defaultConfigsPath.resolve("log4j2.properties"), "default.config=default value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() + Files.write(finalConfigsPath.resolve("log4j2.properties"), "existing.config=existing value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() KafkaDockerWrapper.prepareLog4jConfigs(defaultConfigsPath, mountedConfigsPath, finalConfigsPath, envVars) - val source = scala.io.Source.fromFile(finalConfigsPath.toString + "/log4j.properties") + val source = scala.io.Source.fromFile(finalConfigsPath.toString + "/log4j2.properties") val actual = try source.mkString finally source.close() val expected = "default.config=default value" + "\n" + "log4j.rootLogger=ERROR, stdout" + "\n" + "log4j.logger.kafka=INFO" + "\n" + @@ -233,13 +233,13 @@ class KafkaDockerWrapperTest { val envVars = Map.empty[String, String] - Files.write(defaultConfigsPath.resolve("log4j.properties"), "default.config=default value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() - Files.write(mountedConfigsPath.resolve("log4j.properties"), "mounted.config=mounted value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() - Files.write(finalConfigsPath.resolve("log4j.properties"), "existing.config=existing value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() + Files.write(defaultConfigsPath.resolve("log4j2.properties"), "default.config=default value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() + Files.write(mountedConfigsPath.resolve("log4j2.properties"), "mounted.config=mounted value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() + Files.write(finalConfigsPath.resolve("log4j2.properties"), "existing.config=existing value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() KafkaDockerWrapper.prepareLog4jConfigs(defaultConfigsPath, mountedConfigsPath, finalConfigsPath, envVars) - val source = scala.io.Source.fromFile(finalConfigsPath.toString + "/log4j.properties") + val source = scala.io.Source.fromFile(finalConfigsPath.toString + "/log4j2.properties") val actual = try source.mkString finally source.close() val expected = "mounted.config=mounted value" @@ -276,13 +276,13 @@ class KafkaDockerWrapperTest { val envVars = Map("KAFKA_TOOLS_LOG4J_LOGLEVEL" -> "TRACE") - Files.write(defaultConfigsPath.resolve("tools-log4j.properties"), "default.config=default value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() - Files.write(mountedConfigsPath.resolve("tools-log4j.properties"), "mounted.config=mounted value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() - Files.write(finalConfigsPath.resolve("tools-log4j.properties"), "existing.config=existing value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() + Files.write(defaultConfigsPath.resolve("tools-log4j2.properties"), "default.config=default value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() + Files.write(mountedConfigsPath.resolve("tools-log4j2.properties"), "mounted.config=mounted value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() + Files.write(finalConfigsPath.resolve("tools-log4j2.properties"), "existing.config=existing value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() KafkaDockerWrapper.prepareToolsLog4jConfigs(defaultConfigsPath, mountedConfigsPath, finalConfigsPath, envVars) - val source = scala.io.Source.fromFile(finalConfigsPath.toString + "/tools-log4j.properties") + val source = scala.io.Source.fromFile(finalConfigsPath.toString + "/tools-log4j2.properties") val actual = try source.mkString finally source.close() val expected = "mounted.config=mounted value" + "\n" + "log4j.rootLogger=TRACE, stderr" @@ -295,12 +295,12 @@ class KafkaDockerWrapperTest { val envVars = Map("KAFKA_TOOLS_LOG4J_LOGLEVEL" -> "TRACE") - Files.write(defaultConfigsPath.resolve("tools-log4j.properties"), "default.config=default value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() - Files.write(finalConfigsPath.resolve("tools-log4j.properties"), "existing.config=existing value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() + Files.write(defaultConfigsPath.resolve("tools-log4j2.properties"), "default.config=default value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() + Files.write(finalConfigsPath.resolve("tools-log4j2.properties"), "existing.config=existing value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() KafkaDockerWrapper.prepareToolsLog4jConfigs(defaultConfigsPath, mountedConfigsPath, finalConfigsPath, envVars) - val source = scala.io.Source.fromFile(finalConfigsPath.toString + "/tools-log4j.properties") + val source = scala.io.Source.fromFile(finalConfigsPath.toString + "/tools-log4j2.properties") val actual = try source.mkString finally source.close() val expected = "default.config=default value" + "\n" + "log4j.rootLogger=TRACE, stderr" @@ -313,13 +313,13 @@ class KafkaDockerWrapperTest { val envVars = Map.empty[String, String] - Files.write(defaultConfigsPath.resolve("tools-log4j.properties"), "default.config=default value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() - Files.write(mountedConfigsPath.resolve("tools-log4j.properties"), "mounted.config=mounted value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() - Files.write(finalConfigsPath.resolve("tools-log4j.properties"), "existing.config=existing value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() + Files.write(defaultConfigsPath.resolve("tools-log4j2.properties"), "default.config=default value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() + Files.write(mountedConfigsPath.resolve("tools-log4j2.properties"), "mounted.config=mounted value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() + Files.write(finalConfigsPath.resolve("tools-log4j2.properties"), "existing.config=existing value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() KafkaDockerWrapper.prepareToolsLog4jConfigs(defaultConfigsPath, mountedConfigsPath, finalConfigsPath, envVars) - val source = scala.io.Source.fromFile(finalConfigsPath.toString + "/tools-log4j.properties") + val source = scala.io.Source.fromFile(finalConfigsPath.toString + "/tools-log4j2.properties") val actual = try source.mkString finally source.close() val expected = "mounted.config=mounted value" diff --git a/core/src/test/scala/unit/kafka/integration/UncleanLeaderElectionTest.scala b/core/src/test/scala/unit/kafka/integration/UncleanLeaderElectionTest.scala index 2a23023c898b9..349a24df87472 100755 --- a/core/src/test/scala/unit/kafka/integration/UncleanLeaderElectionTest.scala +++ b/core/src/test/scala/unit/kafka/integration/UncleanLeaderElectionTest.scala @@ -34,13 +34,14 @@ import org.apache.kafka.common.security.auth.SecurityProtocol import org.apache.kafka.clients.admin.{Admin, AdminClientConfig, AlterConfigOp, AlterConfigsResult, ConfigEntry} import org.apache.kafka.server.config.ReplicationConfigs import org.apache.kafka.server.metrics.KafkaYammerMetrics -import org.apache.log4j.{Level, Logger} +import org.apache.logging.log4j.{Level, LogManager} import org.junit.jupiter.api.{AfterEach, BeforeEach, TestInfo} import org.junit.jupiter.api.Assertions._ import org.junit.jupiter.params.ParameterizedTest import org.junit.jupiter.params.provider.ValueSource import com.yammer.metrics.core.Meter import org.apache.kafka.metadata.LeaderConstants +import org.apache.logging.log4j.core.config.Configurator class UncleanLeaderElectionTest extends QuorumTestHarness { val brokerId1 = 0 @@ -63,8 +64,8 @@ class UncleanLeaderElectionTest extends QuorumTestHarness { val partitionId = 0 val topicPartition = new TopicPartition(topic, partitionId) - val kafkaApisLogger = Logger.getLogger(classOf[kafka.server.KafkaApis]) - val networkProcessorLogger = Logger.getLogger(classOf[kafka.network.Processor]) + val kafkaApisLogger = LogManager.getLogger(classOf[kafka.server.KafkaApis]) + val networkProcessorLogger = LogManager.getLogger(classOf[kafka.network.Processor]) @BeforeEach override def setUp(testInfo: TestInfo): Unit = { @@ -80,8 +81,8 @@ class UncleanLeaderElectionTest extends QuorumTestHarness { } // temporarily set loggers to a higher level so that tests run quietly - kafkaApisLogger.setLevel(Level.FATAL) - networkProcessorLogger.setLevel(Level.FATAL) + Configurator.setLevel(kafkaApisLogger.getName, Level.FATAL) + Configurator.setLevel(networkProcessorLogger.getName, Level.FATAL) } @AfterEach @@ -90,8 +91,8 @@ class UncleanLeaderElectionTest extends QuorumTestHarness { brokers.foreach(broker => CoreUtils.delete(broker.config.logDirs)) // restore log levels - kafkaApisLogger.setLevel(Level.ERROR) - networkProcessorLogger.setLevel(Level.ERROR) + Configurator.setLevel(kafkaApisLogger.getName, Level.ERROR) + Configurator.setLevel(networkProcessorLogger.getName, Level.ERROR) admin.close() diff --git a/core/src/test/scala/unit/kafka/network/SocketServerTest.scala b/core/src/test/scala/unit/kafka/network/SocketServerTest.scala index 2a34d2aea5f36..a7aec1ecbeddc 100644 --- a/core/src/test/scala/unit/kafka/network/SocketServerTest.scala +++ b/core/src/test/scala/unit/kafka/network/SocketServerTest.scala @@ -44,7 +44,8 @@ import org.apache.kafka.server.config.QuotaConfigs import org.apache.kafka.server.metrics.KafkaYammerMetrics import org.apache.kafka.server.quota.{ThrottleCallback, ThrottledChannel} import org.apache.kafka.test.{TestSslUtils, TestUtils => JTestUtils} -import org.apache.log4j.Level +import org.apache.logging.log4j.Level +import org.apache.logging.log4j.core.config.Configurator import org.junit.jupiter.api.Assertions._ import org.junit.jupiter.api._ @@ -87,7 +88,7 @@ class SocketServerTest { var server: SocketServer = _ val sockets = new ArrayBuffer[Socket] - private val kafkaLogger = org.apache.log4j.LogManager.getLogger("kafka") + private val kafkaLogger = org.apache.logging.log4j.LogManager.getLogger("kafka") private var logLevelToRestore: Level = _ def endpoint: EndPoint = { KafkaConfig.fromProps(props, doLog = false).dataPlaneListeners.head @@ -101,7 +102,7 @@ class SocketServerTest { server.enableRequestProcessing(Map.empty).get(1, TimeUnit.MINUTES) // Run the tests with TRACE logging to exercise request logging path logLevelToRestore = kafkaLogger.getLevel - kafkaLogger.setLevel(Level.TRACE) + Configurator.setLevel(kafkaLogger.getName, Level.TRACE) assertTrue(server.controlPlaneRequestChannelOpt.isEmpty) } @@ -111,7 +112,7 @@ class SocketServerTest { shutdownServerAndMetrics(server) sockets.foreach(_.close()) sockets.clear() - kafkaLogger.setLevel(logLevelToRestore) + Configurator.setLevel(kafkaLogger.getName, logLevelToRestore) TestUtils.clearYammerMetrics() } diff --git a/docker/docker_official_images/3.7.0/jvm/Dockerfile b/docker/docker_official_images/3.7.0/jvm/Dockerfile index 905e2f2149bba..a9b7dce603b53 100755 --- a/docker/docker_official_images/3.7.0/jvm/Dockerfile +++ b/docker/docker_official_images/3.7.0/jvm/Dockerfile @@ -76,8 +76,8 @@ RUN set -eux ; \ chown appuser:appuser -R /usr/logs /opt/kafka /mnt/shared/config; \ chown appuser:root -R /var/lib/kafka /etc/kafka/secrets /etc/kafka; \ chmod -R ug+w /etc/kafka /var/lib/kafka /etc/kafka/secrets; \ - cp /opt/kafka/config/log4j.properties /etc/kafka/docker/log4j.properties; \ - cp /opt/kafka/config/tools-log4j.properties /etc/kafka/docker/tools-log4j.properties; \ + cp /opt/kafka/config/log4j2.properties /etc/kafka/docker/log4j2.properties; \ + cp /opt/kafka/config/tools-log4j2.properties /etc/kafka/docker/tools-log4j2.properties; \ cp /opt/kafka/config/kraft/server.properties /etc/kafka/docker/server.properties; \ rm kafka.tgz kafka.tgz.asc KEYS; \ apk del wget gpg gpg-agent; \ diff --git a/docker/examples/README.md b/docker/examples/README.md index 8f4fdce43673d..11258c91c5605 100644 --- a/docker/examples/README.md +++ b/docker/examples/README.md @@ -42,11 +42,11 @@ Using Environment Variables - To provide configs to log4j property files, following points should be considered:- - log4j properties provided via environment variables will be appended to the default properties file (log4j properties files bundled with kafka) -- `KAFKA_LOG4J_ROOT_LOGLEVEL` can be provided to set the value of log4j.rootLogger in log4j.properties and tools-log4j.properties -- log4j loggers can be added to log4j.properties by setting them in KAFKA_LOG4J_LOGGERS environment variable in a single comma separated string +- `KAFKA_LOG4J_ROOT_LOGLEVEL` can be provided to set the value of log4j.rootLogger in log4j2.properties and tools-log4j2.properties +- log4j loggers can be added to log4j2.properties by setting them in KAFKA_LOG4J_LOGGERS environment variable in a single comma separated string - Example: - Assuming that KAFKA_LOG4J_LOGGERS='property1=value1,property2=value2' environment variable is provided to docker container. - - log4j.logger.property1=value1 and log4j.logger.property2=value2 will be added to the log4j.properties file inside docker container. + - log4j.logger.property1=value1 and log4j.logger.property2=value2 will be added to the log4j2.properties file inside docker container. - Environment variables commonly used in Kafka can be provided via environment variables, for example `CLUSTER_ID`. - Command `docker run --env CONFIG_NAME=CONFIG_VALUE -p 9092:9092 apache/kafka:latest` can be used to provide environment variables to docker container diff --git a/docker/jvm/Dockerfile b/docker/jvm/Dockerfile index 72e35e63c0473..ccf4676b89233 100644 --- a/docker/jvm/Dockerfile +++ b/docker/jvm/Dockerfile @@ -76,8 +76,8 @@ RUN set -eux ; \ chown appuser:appuser -R /usr/logs /opt/kafka /mnt/shared/config; \ chown appuser:root -R /var/lib/kafka /etc/kafka/secrets /etc/kafka; \ chmod -R ug+w /etc/kafka /var/lib/kafka /etc/kafka/secrets; \ - cp /opt/kafka/config/log4j.properties /etc/kafka/docker/log4j.properties; \ - cp /opt/kafka/config/tools-log4j.properties /etc/kafka/docker/tools-log4j.properties; \ + cp /opt/kafka/config/log4j2.properties /etc/kafka/docker/log4j2.properties; \ + cp /opt/kafka/config/tools-log4j2.properties /etc/kafka/docker/tools-log4j2.properties; \ cp /opt/kafka/config/kraft/server.properties /etc/kafka/docker/server.properties; \ rm kafka.tgz kafka.tgz.asc KEYS; \ apk del wget gpg gpg-agent; \ diff --git a/docker/native/Dockerfile b/docker/native/Dockerfile index 36104b76edca2..a6ec7aed2c016 100644 --- a/docker/native/Dockerfile +++ b/docker/native/Dockerfile @@ -64,8 +64,8 @@ RUN apk update ; \ COPY --chown=appuser:root --from=build-native-image /app/kafka/kafka.Kafka /opt/kafka/ COPY --chown=appuser:root --from=build-native-image /app/kafka/config/kraft/server.properties /etc/kafka/docker/ -COPY --chown=appuser:root --from=build-native-image /app/kafka/config/log4j.properties /etc/kafka/docker/ -COPY --chown=appuser:root --from=build-native-image /app/kafka/config/tools-log4j.properties /etc/kafka/docker/ +COPY --chown=appuser:root --from=build-native-image /app/kafka/config/log4j2.properties /etc/kafka/docker/ +COPY --chown=appuser:root --from=build-native-image /app/kafka/config/tools-log4j2.properties /etc/kafka/docker/ COPY --chown=appuser:root resources/common-scripts /etc/kafka/docker/ COPY --chown=appuser:root launch /etc/kafka/docker/ diff --git a/docker/native/launch b/docker/native/launch index 93de7aa9483b4..feb42ce646711 100755 --- a/docker/native/launch +++ b/docker/native/launch @@ -42,11 +42,11 @@ result=$(/opt/kafka/kafka.Kafka setup \ --default-configs-dir /etc/kafka/docker \ --mounted-configs-dir /mnt/shared/config \ --final-configs-dir /opt/kafka/config \ - -Dlog4j.configuration=file:/opt/kafka/config/tools-log4j.properties 2>&1) || \ + -Dlog4j.configuration=file:/opt/kafka/config/tools-log4j2.properties 2>&1) || \ echo $result | grep -i "already formatted" || \ { echo $result && (exit 1) } echo "WARNING: THIS IS AN EXPERIMENTAL DOCKER IMAGE RECOMMENDED FOR LOCAL TESTING AND DEVELOPMENT PURPOSES." -KAFKA_LOG4J_CMD_OPTS="-Dkafka.logs.dir=/opt/kafka/logs/ -Dlog4j.configuration=file:/opt/kafka/config/log4j.properties" +KAFKA_LOG4J_CMD_OPTS="-Dkafka.logs.dir=/opt/kafka/logs/ -Dlog4j.configuration=file:/opt/kafka/config/log4j2.properties" exec /opt/kafka/kafka.Kafka start --config /opt/kafka/config/server.properties $KAFKA_LOG4J_CMD_OPTS $KAFKA_JMX_OPTS ${KAFKA_OPTS-} diff --git a/docs/ops.html b/docs/ops.html index e283cbc456f57..12b5b43873581 100644 --- a/docs/ops.html +++ b/docs/ops.html @@ -3957,7 +3957,7 @@

Preparing for migration

It is recommended to enable TRACE level logging for the migration components while the migration is active. This can - be done by adding the following log4j configuration to each KRaft controller's "log4j.properties" file. + be done by adding the following log4j configuration to each KRaft controller's "log4j2.properties" file.

log4j.logger.org.apache.kafka.metadata.migration=TRACE
diff --git a/docs/streams/tutorial.html b/docs/streams/tutorial.html index 933cd11f12be2..6915be76f2495 100644 --- a/docs/streams/tutorial.html +++ b/docs/streams/tutorial.html @@ -66,7 +66,7 @@

| |-- Pipe.java | |-- WordCount.java |-- resources - |-- log4j.properties + |-- log4j2.properties

The pom.xml file included in the project already has the Streams dependency defined. diff --git a/gradle/dependencies.gradle b/gradle/dependencies.gradle index 8bb417a736215..24c23f5017235 100644 --- a/gradle/dependencies.gradle +++ b/gradle/dependencies.gradle @@ -162,7 +162,9 @@ versions += [ // Also make sure the compression levels in org.apache.kafka.common.record.CompressionType are still valid zstd: "1.5.6-6", junitPlatform: "1.10.2", - hdrHistogram: "2.2.2" + hdrHistogram: "2.2.2", + log4j2: "2.24.1", + bndlib: "7.0.0" ] libs += [ @@ -261,5 +263,11 @@ libs += [ mavenArtifact: "org.apache.maven:maven-artifact:$versions.mavenArtifact", zstd: "com.github.luben:zstd-jni:$versions.zstd", httpclient: "org.apache.httpcomponents:httpclient:$versions.httpclient", - hdrHistogram: "org.hdrhistogram:HdrHistogram:$versions.hdrHistogram" + hdrHistogram: "org.hdrhistogram:HdrHistogram:$versions.hdrHistogram", + log4j2Api: "org.apache.logging.log4j:log4j-api:$versions.log4j2", + log4j2Core: "org.apache.logging.log4j:log4j-core:$versions.log4j2", + log4j2CoreTest: "org.apache.logging.log4j:log4j-core-test:$versions.log4j2", + slf4jLog4j2: "org.apache.logging.log4j:log4j-slf4j-impl:$versions.log4j2", + spotbugs: "com.github.spotbugs:spotbugs-annotations:$versions.spotbugs", + bndlib:"biz.aQute.bnd:biz.aQute.bndlib:$versions.bndlib" ] diff --git a/group-coordinator/src/test/resources/log4j2.properties b/group-coordinator/src/test/resources/log4j2.properties new file mode 100644 index 0000000000000..01ee02c2986ae --- /dev/null +++ b/group-coordinator/src/test/resources/log4j2.properties @@ -0,0 +1,39 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +name=GroupCoordinatorTestConfig +appenders=stdout + +appender.stdout.type=Console +appender.stdout.name=STDOUT +appender.stdout.layout.type=PatternLayout +appender.stdout.layout.pattern=[%d] %p %m (%c:%L)%n + +rootLogger.level=DEBUG +rootLogger.appenderRefs=stdout +rootLogger.appenderRef.stdout.ref=STDOUT + +loggers=org.apache.kafka,org.apache.zookeeper + +logger.org.apache.kafka.name=org.apache.kafka +logger.org.apache.kafka.level=DEBUG +logger.org.apache.kafka.additivity=false +logger.org.apache.kafka.appenderRefs=stdout +logger.org.apache.kafka.appenderRef.stdout.ref=STDOUT + +logger.org.apache.zookeeper.name=org.apache.zookeeper +logger.org.apache.zookeeper.level=WARN +logger.org.apache.zookeeper.additivity=false +logger.org.apache.zookeeper.appenderRefs=stdout +logger.org.apache.zookeeper.appenderRef.stdout.ref=STDOUT diff --git a/metadata/src/test/resources/log4j2.properties b/metadata/src/test/resources/log4j2.properties new file mode 100644 index 0000000000000..39356d69dfd49 --- /dev/null +++ b/metadata/src/test/resources/log4j2.properties @@ -0,0 +1,33 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +name=MetadatTestConfig +appenders=stdout + +appender.stdout.type=Console +appender.stdout.name=STDOUT +appender.stdout.layout.type=PatternLayout +appender.stdout.layout.pattern=[%d] %p %m (%c:%L)%n + +rootLogger.level=DEBUG +rootLogger.appenderRefs=stdout +rootLogger.appenderRef.stdout.ref=STDOUT + +loggers=org.apache.kafka,org.apache.zookeeper + +logger.org.apache.kafka.name=org.apache.kafka +logger.org.apache.kafka.level=DEBUG + +logger.org.apache.zookeeper.name=org.apache.zookeeper +logger.org.apache.zookeeper.level=WARN diff --git a/raft/bin/test-kraft-server-start.sh b/raft/bin/test-kraft-server-start.sh index 701bc1864a458..6efac054acaf5 100755 --- a/raft/bin/test-kraft-server-start.sh +++ b/raft/bin/test-kraft-server-start.sh @@ -17,7 +17,7 @@ base_dir=$(dirname $0) if [ "x$KAFKA_LOG4J_OPTS" = "x" ]; then - export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/kraft-log4j.properties" + export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/kraft-log4j2.properties" fi if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then diff --git a/raft/config/kraft-log4j2.properties b/raft/config/kraft-log4j2.properties new file mode 100644 index 0000000000000..76d756ef39ff0 --- /dev/null +++ b/raft/config/kraft-log4j2.properties @@ -0,0 +1,35 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +name=RaftConfig +appenders=stderr + +appender.stderr.type=Console +appender.stderr.name=STDERR +appender.stderr.layout.type=PatternLayout +appender.stderr.layout.pattern=[%d] %p %m (%c)%n +appender.stderr.target=SYSTEM_ERR + +rootLogger.level=INFO +rootLogger.appenderRefs=stderr +rootLogger.appenderRef.stderr.ref=STDERR + +loggers=org.apache.kafka.raft,org.apache.kafka.snapshot + +logger.org.apache.kafka.raft.name=org.apache.kafka.raft +logger.org.apache.kafka.raft.level=INFO + +logger.org.apache.kafka.snapshot.name=org.apache.kafka.snapshot +logger.org.apache.kafka.snapshot.level=INFO diff --git a/raft/src/test/resources/log4j.properties b/raft/src/test/resources/log4j.properties deleted file mode 100644 index 6d90f6dd34884..0000000000000 --- a/raft/src/test/resources/log4j.properties +++ /dev/null @@ -1,22 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -log4j.rootLogger=OFF, stdout - -log4j.appender.stdout=org.apache.log4j.ConsoleAppender -log4j.appender.stdout.layout=org.apache.log4j.PatternLayout -log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c:%L)%n - -log4j.logger.org.apache.kafka.raft=ERROR -log4j.logger.org.apache.kafka.snapshot=ERROR diff --git a/raft/src/test/resources/log4j2.properties b/raft/src/test/resources/log4j2.properties new file mode 100644 index 0000000000000..94d0feb0087ef --- /dev/null +++ b/raft/src/test/resources/log4j2.properties @@ -0,0 +1,33 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +name=RaftTestConfig +appenders=stdout + +appender.stdout.type=Console +appender.stdout.name=STDOUT +appender.stdout.layout.type=PatternLayout +appender.stdout.layout.pattern=[%d] %p %m (%c:%L)%n + +rootLogger.level=OFF +rootLogger.appenderRefs=stdout +rootLogger.appenderRef.stdout.ref=STDOUT + +loggers=org.apache.kafka.raft,org.apache.kafka.snapshot + +logger.org.apache.kafka.raft.name=org.apache.kafka.raft +logger.org.apache.kafka.raft.level=ERROR + +logger.org.apache.kafka.snapshot.name=org.apache.kafka.snapshot +logger.org.apache.kafka.snapshot.level=ERROR diff --git a/server-common/src/test/resources/test/log4j.properties b/server-common/src/test/resources/test/log4j.properties deleted file mode 100644 index be36f90299a77..0000000000000 --- a/server-common/src/test/resources/test/log4j.properties +++ /dev/null @@ -1,21 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -log4j.rootLogger=INFO, stdout - -log4j.appender.stdout=org.apache.log4j.ConsoleAppender -log4j.appender.stdout.layout=org.apache.log4j.PatternLayout -log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c:%L)%n - -log4j.logger.org.apache.kafka=INFO diff --git a/raft/config/kraft-log4j.properties b/server-common/src/test/resources/test/log4j2.properties similarity index 66% rename from raft/config/kraft-log4j.properties rename to server-common/src/test/resources/test/log4j2.properties index 14f739af05df5..d889660be3c35 100644 --- a/raft/config/kraft-log4j.properties +++ b/server-common/src/test/resources/test/log4j2.properties @@ -12,13 +12,19 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +name=ServerCommonTestConfig +appenders=stdout -log4j.rootLogger=INFO, stderr +appender.stdout.type=Console +appender.stdout.name=STDOUT +appender.stdout.layout.type=PatternLayout +appender.stdout.layout.pattern=[%d] %p %m (%c:%L)%n -log4j.appender.stderr=org.apache.log4j.ConsoleAppender -log4j.appender.stderr.layout=org.apache.log4j.PatternLayout -log4j.appender.stderr.layout.ConversionPattern=[%d] %p %m (%c)%n -log4j.appender.stderr.Target=System.err +rootLogger.level=INFO +rootLogger.appenderRefs=stdout +rootLogger.appenderRef.stdout.ref=STDOUT -log4j.logger.org.apache.kafka.raft=INFO -log4j.logger.org.apache.kafka.snapshot=INFO +loggers=org.apache.kafka + +logger.org.apache.kafka.name=org.apache.kafka +logger.org.apache.kafka.level=INFO diff --git a/shell/src/test/resources/log4j.properties b/shell/src/test/resources/log4j.properties deleted file mode 100644 index a72a9693de2af..0000000000000 --- a/shell/src/test/resources/log4j.properties +++ /dev/null @@ -1,19 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -log4j.rootLogger=DEBUG, stdout - -log4j.appender.stdout=org.apache.log4j.ConsoleAppender -log4j.appender.stdout.layout=org.apache.log4j.PatternLayout -log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c:%L)%n diff --git a/streams/test-utils/src/test/resources/log4j.properties b/shell/src/test/resources/log4j2.properties similarity index 73% rename from streams/test-utils/src/test/resources/log4j.properties rename to shell/src/test/resources/log4j2.properties index be36f90299a77..85f7978580e01 100644 --- a/streams/test-utils/src/test/resources/log4j.properties +++ b/shell/src/test/resources/log4j2.properties @@ -12,10 +12,14 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -log4j.rootLogger=INFO, stdout +name=ShellTestConfig +appenders=stdout -log4j.appender.stdout=org.apache.log4j.ConsoleAppender -log4j.appender.stdout.layout=org.apache.log4j.PatternLayout -log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c:%L)%n +appender.stdout.type=Console +appender.stdout.name=STDOUT +appender.stdout.layout.type=PatternLayout +appender.stdout.layout.pattern=[%d] %p %m (%c:%L)%n -log4j.logger.org.apache.kafka=INFO +rootLogger.level=DEBUG +rootLogger.appenderRefs=stdout +rootLogger.appenderRef.stdout.ref=STDOUT diff --git a/storage/src/test/resources/log4j2.properties b/storage/src/test/resources/log4j2.properties new file mode 100644 index 0000000000000..3bc0f99475be7 --- /dev/null +++ b/storage/src/test/resources/log4j2.properties @@ -0,0 +1,53 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +name=StorageTestConfig +appenders=stdout,fileAppender + +appender.stdout.type=Console +appender.stdout.name=STDOUT +appender.stdout.layout.type=PatternLayout +appender.stdout.layout.pattern=[%d] %p %m (%c:%L)%n + +appender.fileAppender.type=RollingFile +appender.fileAppender.name=FileAppender +appender.fileAppender.fileName=storage.log +appender.fileAppender.layout.type=PatternLayout +appender.fileAppender.layout.pattern=%d [%t] %-5p %c %x - %m%n + +appender.fileAppender.filePattern=storage-%d{yyyy-MM-dd}.log +appender.fileAppender.policies.type=Policies +appender.fileAppender.policies.time.type=TimeBasedTriggeringPolicy +appender.fileAppender.policies.time.interval=1 + +rootLogger.level=OFF +rootLogger.appenderRefs=stdout +rootLogger.appenderRef.stdout.ref=STDOUT + +loggers=org.apache.kafka.server.log.remote.storage,org.apache.kafka.server.log.remote.metadata.storage,kafka.log.remote + +logger.org.apache.kafka.server.log.remote.storage.name=org.apache.kafka.server.log.remote.storage +logger.org.apache.kafka.server.log.remote.storage.level=INFO +logger.org.apache.kafka.server.log.remote.storage.appenderRefs=fileAppender +logger.org.apache.kafka.server.log.remote.storage.appenderRef.fileAppender.ref=FileAppender + +logger.org.apache.kafka.server.log.remote.metadata.storage.name=org.apache.kafka.server.log.remote.metadata.storage +logger.org.apache.kafka.server.log.remote.metadata.storage.level=INFO +logger.org.apache.kafka.server.log.remote.metadata.storage.appenderRefs=fileAppender +logger.org.apache.kafka.server.log.remote.metadata.storage.appenderRef.fileAppender.ref=FileAppender + +logger.kafka.log.remote.name=kafka.log.remote +logger.kafka.log.remote.level=INFO +logger.kafka.log.remote.appenderRefs=fileAppender +logger.kafka.log.remote.appenderRef.fileAppender.ref=FileAppender diff --git a/streams/quickstart/java/src/main/resources/archetype-resources/src/main/resources/log4j.properties b/streams/quickstart/java/src/main/resources/archetype-resources/src/main/resources/log4j.properties deleted file mode 100644 index b620f1bb390e4..0000000000000 --- a/streams/quickstart/java/src/main/resources/archetype-resources/src/main/resources/log4j.properties +++ /dev/null @@ -1,19 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -log4j.rootLogger=INFO, console - -log4j.appender.console=org.apache.log4j.ConsoleAppender -log4j.appender.console.layout=org.apache.log4j.PatternLayout -log4j.appender.console.layout.ConversionPattern=%d{HH:mm:ss,SSS} %-5p %-60c %x - %m%n \ No newline at end of file diff --git a/metadata/src/test/resources/log4j.properties b/streams/quickstart/java/src/main/resources/archetype-resources/src/main/resources/log4j2.properties similarity index 70% rename from metadata/src/test/resources/log4j.properties rename to streams/quickstart/java/src/main/resources/archetype-resources/src/main/resources/log4j2.properties index db3879386f10f..f656fe5e7abdb 100644 --- a/metadata/src/test/resources/log4j.properties +++ b/streams/quickstart/java/src/main/resources/archetype-resources/src/main/resources/log4j2.properties @@ -12,11 +12,14 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -log4j.rootLogger=DEBUG, stdout +name=StreamsQuickstartConfig +appenders=console -log4j.appender.stdout=org.apache.log4j.ConsoleAppender -log4j.appender.stdout.layout=org.apache.log4j.PatternLayout -log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c:%L)%n +appender.console.type=Console +appender.console.name=ConsoleAppender +appender.console.layout.type=PatternLayout +appender.console.layout.pattern=%d{HH:mm:ss,SSS} %-5p %-60c %x - %m%n -log4j.logger.org.apache.kafka=DEBUG -log4j.logger.org.apache.zookeeper=WARN +rootLogger.level=INFO +rootLogger.appenderRefs=console +rootLogger.appenderRef.console.ref=ConsoleAppender diff --git a/streams/src/test/java/org/apache/kafka/streams/StreamsConfigTest.java b/streams/src/test/java/org/apache/kafka/streams/StreamsConfigTest.java index ffb1e4496226a..01847f43c8207 100644 --- a/streams/src/test/java/org/apache/kafka/streams/StreamsConfigTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/StreamsConfigTest.java @@ -42,7 +42,7 @@ import org.apache.kafka.streams.processor.internals.StreamsPartitionAssignor; import org.apache.kafka.streams.state.BuiltInDslStoreSuppliers; -import org.apache.log4j.Level; +import org.apache.logging.log4j.Level; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Timeout; diff --git a/streams/src/test/java/org/apache/kafka/streams/processor/internals/InternalTopicManagerTest.java b/streams/src/test/java/org/apache/kafka/streams/processor/internals/InternalTopicManagerTest.java index fdc37f631e222..e6deab1b256f3 100644 --- a/streams/src/test/java/org/apache/kafka/streams/processor/internals/InternalTopicManagerTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/processor/internals/InternalTopicManagerTest.java @@ -53,7 +53,7 @@ import org.apache.kafka.streams.errors.StreamsException; import org.apache.kafka.streams.processor.internals.InternalTopicManager.ValidationResult; -import org.apache.log4j.Level; +import org.apache.logging.log4j.Level; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; diff --git a/streams/src/test/java/org/apache/kafka/streams/processor/internals/PartitionGroupTest.java b/streams/src/test/java/org/apache/kafka/streams/processor/internals/PartitionGroupTest.java index 9396faa1be123..0ce59b01726b3 100644 --- a/streams/src/test/java/org/apache/kafka/streams/processor/internals/PartitionGroupTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/processor/internals/PartitionGroupTest.java @@ -38,7 +38,7 @@ import org.apache.kafka.test.MockSourceNode; import org.apache.kafka.test.MockTimestampExtractor; -import org.apache.log4j.Level; +import org.apache.logging.log4j.Level; import org.hamcrest.Matchers; import org.junit.jupiter.api.Test; diff --git a/streams/src/test/java/org/apache/kafka/streams/processor/internals/RecordCollectorTest.java b/streams/src/test/java/org/apache/kafka/streams/processor/internals/RecordCollectorTest.java index cfb9e6f952f5f..1e3acd4e118a4 100644 --- a/streams/src/test/java/org/apache/kafka/streams/processor/internals/RecordCollectorTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/processor/internals/RecordCollectorTest.java @@ -62,7 +62,8 @@ import org.apache.kafka.test.InternalMockProcessorContext; import org.apache.kafka.test.MockClientSupplier; -import org.apache.log4j.Level; +import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.core.filter.ThresholdFilter; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -1289,7 +1290,7 @@ public void shouldNotThrowStreamsExceptionOnSubsequentCallIfASendFailsWithContin try (final LogCaptureAppender logCaptureAppender = LogCaptureAppender.createAndRegister(RecordCollectorImpl.class)) { - logCaptureAppender.setThreshold(Level.INFO); + logCaptureAppender.addFilter(ThresholdFilter.createFilter(Level.INFO, null, null)); collector.send(topic, "3", "0", null, null, stringSerializer, stringSerializer, sinkNodeName, context, streamPartitioner); collector.flush(); diff --git a/streams/src/test/java/org/apache/kafka/streams/processor/internals/StoreChangelogReaderTest.java b/streams/src/test/java/org/apache/kafka/streams/processor/internals/StoreChangelogReaderTest.java index b94c13d18c598..ab44cdb3002e7 100644 --- a/streams/src/test/java/org/apache/kafka/streams/processor/internals/StoreChangelogReaderTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/processor/internals/StoreChangelogReaderTest.java @@ -45,7 +45,7 @@ import org.apache.kafka.test.MockStateRestoreListener; import org.apache.kafka.test.StreamsTestUtils; -import org.apache.log4j.Level; +import org.apache.logging.log4j.Level; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.junit.jupiter.params.ParameterizedTest; diff --git a/streams/src/test/java/org/apache/kafka/streams/processor/internals/TaskManagerTest.java b/streams/src/test/java/org/apache/kafka/streams/processor/internals/TaskManagerTest.java index fa5dca53bb1ff..543f339606a65 100644 --- a/streams/src/test/java/org/apache/kafka/streams/processor/internals/TaskManagerTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/processor/internals/TaskManagerTest.java @@ -55,7 +55,7 @@ import org.apache.kafka.streams.processor.internals.testutil.DummyStreamsConfig; import org.apache.kafka.streams.state.internals.OffsetCheckpoint; -import org.apache.log4j.Level; +import org.apache.logging.log4j.Level; import org.hamcrest.Matchers; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; diff --git a/streams/src/test/java/org/apache/kafka/streams/state/internals/AbstractKeyValueStoreTest.java b/streams/src/test/java/org/apache/kafka/streams/state/internals/AbstractKeyValueStoreTest.java index 1e8c96f6f5299..14e0484158b55 100644 --- a/streams/src/test/java/org/apache/kafka/streams/state/internals/AbstractKeyValueStoreTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/state/internals/AbstractKeyValueStoreTest.java @@ -668,5 +668,5 @@ public void prefixScanShouldNotThrowConcurrentModificationException() { iter.next(); } } - } + } } diff --git a/streams/src/test/java/org/apache/kafka/streams/state/internals/AbstractSessionBytesStoreTest.java b/streams/src/test/java/org/apache/kafka/streams/state/internals/AbstractSessionBytesStoreTest.java index 45941ecb3cfc4..8a0ad6d9a00af 100644 --- a/streams/src/test/java/org/apache/kafka/streams/state/internals/AbstractSessionBytesStoreTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/state/internals/AbstractSessionBytesStoreTest.java @@ -921,7 +921,7 @@ public void shouldNotThrowInvalidRangeExceptionWithNegativeFromKey() { final String keyTo = Serdes.String().deserializer() .deserialize("", Serdes.Integer().serializer().serialize("", 1)); - try (final LogCaptureAppender appender = LogCaptureAppender.createAndRegister(); + try (final LogCaptureAppender appender = LogCaptureAppender.createAndRegister(AbstractSessionBytesStoreTest.class); final KeyValueIterator, Long> iterator = sessionStore.findSessions(keyFrom, keyTo, 0L, 10L)) { assertFalse(iterator.hasNext()); diff --git a/streams/src/test/java/org/apache/kafka/streams/state/internals/AbstractWindowBytesStoreTest.java b/streams/src/test/java/org/apache/kafka/streams/state/internals/AbstractWindowBytesStoreTest.java index 76e3306922ebe..af96be1bc13f0 100644 --- a/streams/src/test/java/org/apache/kafka/streams/state/internals/AbstractWindowBytesStoreTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/state/internals/AbstractWindowBytesStoreTest.java @@ -971,7 +971,7 @@ public void shouldReturnSameResultsForSingleKeyFetchAndEqualKeyRangeFetch() { @Test public void shouldNotThrowInvalidRangeExceptionWithNegativeFromKey() { - try (final LogCaptureAppender appender = LogCaptureAppender.createAndRegister(); + try (final LogCaptureAppender appender = LogCaptureAppender.createAndRegister(AbstractWindowBytesStoreTest.class); final KeyValueIterator, String> iterator = windowStore.fetch(-1, 1, 0L, 10L)) { assertFalse(iterator.hasNext()); diff --git a/streams/src/test/resources/log4j.properties b/streams/src/test/resources/log4j.properties deleted file mode 100644 index b7e1fb2d60ea4..0000000000000 --- a/streams/src/test/resources/log4j.properties +++ /dev/null @@ -1,37 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -log4j.rootLogger=INFO, stdout - -log4j.appender.stdout=org.apache.log4j.ConsoleAppender -log4j.appender.stdout.layout=org.apache.log4j.PatternLayout -log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c:%L)%n - -log4j.logger.kafka=ERROR -log4j.logger.state.change.logger=ERROR -log4j.logger.org.apache.kafka=ERROR -log4j.logger.org.apache.zookeeper=ERROR -log4j.logger.org.apache.kafka.clients=ERROR - -# These are the only logs we will likely ever find anything useful in to debug Streams test failures -log4j.logger.org.apache.kafka.clients.consumer=INFO -log4j.logger.org.apache.kafka.clients.producer=INFO -log4j.logger.org.apache.kafka.streams=INFO - -# printing out the configs takes up a huge amount of the allotted characters, -# and provides little value as we can always figure out the test configs without the logs -log4j.logger.org.apache.kafka.clients.producer.ProducerConfig=ERROR -log4j.logger.org.apache.kafka.clients.consumer.ConsumerConfig=ERROR -log4j.logger.org.apache.kafka.clients.admin.AdminClientConfig=ERROR -log4j.logger.org.apache.kafka.streams.StreamsConfig=ERROR diff --git a/streams/src/test/resources/log4j2.properties b/streams/src/test/resources/log4j2.properties new file mode 100644 index 0000000000000..81f5117252aa3 --- /dev/null +++ b/streams/src/test/resources/log4j2.properties @@ -0,0 +1,69 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# Appender Configuration +name=TestConfig +appenders=console + +appender.console.type=Console +appender.console.name=STDOUT +appender.console.layout.type=PatternLayout +appender.console.layout.pattern=[%d] %p %m (%c:%L)%n + +rootLogger.level=INFO +rootLogger.appenderRefs=stdout +rootLogger.appenderRef.stdout.ref=STDOUT + +#appender.stdout.type=Console +#appender.stdout.name=stdout +#appender.stdout.layout.type=PatternLayout +#appender.stdout.layout.pattern=[%d] %p %m (%c:%L)%n + +loggers=kafka,org.apache.zookeeper,org.apache.kafka.clients.producer.ProducerConfig,org.apache.kafka.clients.consumer.ConsumerConfig,org.apache.kafka.clients.admin.AdminClientConfig,org.apache.kafka.clients,org.apache.kafka.streams.StreamsConfig,org.apache.kafka.streams,org.apache.kafka,state.change.logger + +logger.kafka.name=kafka +logger.kafka.level=ERROR + +logger.state.change.logger.name=state.change.logger +logger.state.change.logger.level=ERROR + +logger.org.apache.kafka.name=org.apache.kafka +logger.org.apache.kafka.level=ERROR + +logger.org.apache.zookeeper.name=org.apache.zookeeper +logger.org.apache.zookeeper.level=ERROR + +logger.org.apache.kafka.clients.name=org.apache.kafka.clients +logger.org.apache.kafka.clients.level=ERROR + +logger.org.apache.kafka.clients.consumer.name=org.apache.kafka.clients.consumer +logger.org.apache.kafka.clients.consumer.level=INFO + +logger.org.apache.kafka.clients.producer.name=org.apache.kafka.clients.producer +logger.org.apache.kafka.clients.producer.level=INFO + +logger.org.apache.kafka.streams.name=org.apache.kafka.streams +logger.org.apache.kafka.streams.level=INFO + +logger.org.apache.kafka.clients.producer.ProducerConfig.name=org.apache.kafka.clients.producer.ProducerConfig +logger.org.apache.kafka.clients.producer.ProducerConfig.level=ERROR + +logger.org.apache.kafka.clients.consumer.ConsumerConfig.name=org.apache.kafka.clients.consumer.ConsumerConfig +logger.org.apache.kafka.clients.consumer.ConsumerConfig.level=ERROR + +logger.org.apache.kafka.clients.admin.AdminClientConfig.name=org.apache.kafka.clients.admin.AdminClientConfig +logger.org.apache.kafka.clients.admin.AdminClientConfig.level=ERROR + +logger.org.apache.kafka.streams.StreamsConfig.name=org.apache.kafka.streams.StreamsConfig +logger.org.apache.kafka.streams.StreamsConfig.level=ERROR diff --git a/streams/streams-scala/src/test/resources/log4j.properties b/streams/streams-scala/src/test/resources/log4j2.properties similarity index 56% rename from streams/streams-scala/src/test/resources/log4j.properties rename to streams/streams-scala/src/test/resources/log4j2.properties index 93ffc165654a2..48c9aae534006 100644 --- a/streams/streams-scala/src/test/resources/log4j.properties +++ b/streams/streams-scala/src/test/resources/log4j2.properties @@ -16,19 +16,28 @@ # See the License for the specific language governing permissions and # limitations under the License. -# Set root logger level to DEBUG and its only appender to A1. -log4j.rootLogger=INFO, R +name=StreamsScalaTestConfig +appenders=rolling -# A1 is set to be a ConsoleAppender. -log4j.appender.A1=org.apache.log4j.ConsoleAppender +rootLogger.level=INFO +rootLogger.appenderRefs=R +rootLogger.appenderRef.R.ref=R -log4j.appender.R=org.apache.log4j.RollingFileAppender -log4j.appender.R.File=logs/kafka-streams-scala.log +appender.A1.type=Console +appender.A1.name=A1 +appender.A1.layout.type=PatternLayout +appender.A1.layout.pattern=%-4r [%t] %-5p %c %x - %m%n -log4j.appender.R.MaxFileSize=100KB -# Keep one backup file -log4j.appender.R.MaxBackupIndex=1 +appender.R.type=RollingFile +appender.R.name=R +appender.R.fileName=logs/kafka-streams-scala.log +appender.R.filePattern=logs/kafka-streams-scala.log.%i +appender.R.layout.type=PatternLayout +appender.R.layout.pattern=%-4r [%t] %-5p %c %x - %m%n -# A1 uses PatternLayout. -log4j.appender.R.layout=org.apache.log4j.PatternLayout -log4j.appender.R.layout.ConversionPattern=%-4r [%t] %-5p %c %x - %m%n +appender.R.policies.type=Policies +appender.R.policies.size.type=SizeBasedTriggeringPolicy +appender.R.policies.size.size=100KB + +appender.R.strategy.type=DefaultRolloverStrategy +appender.R.strategy.max=1 diff --git a/core/src/test/resources/log4j.properties b/streams/test-utils/src/test/resources/log4j2.properties similarity index 66% rename from core/src/test/resources/log4j.properties rename to streams/test-utils/src/test/resources/log4j2.properties index b265ee9cdaaf1..91ffcef88a762 100644 --- a/core/src/test/resources/log4j.properties +++ b/streams/test-utils/src/test/resources/log4j2.properties @@ -12,14 +12,19 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -log4j.rootLogger=OFF, stdout +name=StreamsTestUtilsTestConfig +appenders=stdout -log4j.appender.stdout=org.apache.log4j.ConsoleAppender -log4j.appender.stdout.layout=org.apache.log4j.PatternLayout -log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c:%L)%n +appender.stdout.type=Console +appender.stdout.name=STDOUT +appender.stdout.layout.type=PatternLayout +appender.stdout.layout.pattern=[%d] %p %m (%c:%L)%n -log4j.logger.kafka=WARN -log4j.logger.org.apache.kafka=WARN +rootLogger.level=INFO +rootLogger.appenderRefs=stdout +rootLogger.appenderRef.stdout.ref=STDOUT -# zkclient can be verbose, during debugging it is common to adjust it separately -log4j.logger.org.apache.zookeeper=WARN +loggers=org.apache.kafka + +logger.org.apache.kafka.name=org.apache.kafka +logger.org.apache.kafka.level=INFO diff --git a/tests/kafkatest/services/connect.py b/tests/kafkatest/services/connect.py index c84a3ec43c31e..4780b5e714f1b 100644 --- a/tests/kafkatest/services/connect.py +++ b/tests/kafkatest/services/connect.py @@ -38,7 +38,7 @@ class ConnectServiceBase(KafkaPathResolverMixin, Service): LOG_FILE = os.path.join(PERSISTENT_ROOT, "connect.log") STDOUT_FILE = os.path.join(PERSISTENT_ROOT, "connect.stdout") STDERR_FILE = os.path.join(PERSISTENT_ROOT, "connect.stderr") - LOG4J_CONFIG_FILE = os.path.join(PERSISTENT_ROOT, "connect-log4j.properties") + LOG4J_CONFIG_FILE = os.path.join(PERSISTENT_ROOT, "connect-log4j2.properties") PID_FILE = os.path.join(PERSISTENT_ROOT, "connect.pid") EXTERNAL_CONFIGS_FILE = os.path.join(PERSISTENT_ROOT, "connect-external-configs.properties") CONNECT_REST_PORT = 8083 @@ -364,7 +364,7 @@ def start_node(self, node, **kwargs): if self.external_config_template_func: node.account.create_file(self.EXTERNAL_CONFIGS_FILE, self.external_config_template_func(node)) node.account.create_file(self.CONFIG_FILE, self.config_template_func(node)) - node.account.create_file(self.LOG4J_CONFIG_FILE, self.render('connect_log4j.properties', log_file=self.LOG_FILE)) + node.account.create_file(self.LOG4J_CONFIG_FILE, self.render('connect_log4j2.properties', log_file=self.LOG_FILE)) remote_connector_configs = [] for idx, template in enumerate(self.connector_config_templates): target_file = os.path.join(self.PERSISTENT_ROOT, "connect-connector-" + str(idx) + ".properties") @@ -421,7 +421,7 @@ def start_node(self, node, **kwargs): if self.external_config_template_func: node.account.create_file(self.EXTERNAL_CONFIGS_FILE, self.external_config_template_func(node)) node.account.create_file(self.CONFIG_FILE, self.config_template_func(node)) - node.account.create_file(self.LOG4J_CONFIG_FILE, self.render('connect_log4j.properties', log_file=self.LOG_FILE)) + node.account.create_file(self.LOG4J_CONFIG_FILE, self.render('connect_log4j2.properties', log_file=self.LOG_FILE)) if self.connector_config_templates: raise DucktapeError("Config files are not valid in distributed mode, submit connectors via the REST API") diff --git a/tests/kafkatest/services/console_consumer.py b/tests/kafkatest/services/console_consumer.py index fb87f20df1993..4f11f370b3cd2 100644 --- a/tests/kafkatest/services/console_consumer.py +++ b/tests/kafkatest/services/console_consumer.py @@ -36,7 +36,7 @@ class ConsoleConsumer(KafkaPathResolverMixin, JmxMixin, BackgroundThreadService) STDERR_CAPTURE = os.path.join(PERSISTENT_ROOT, "console_consumer.stderr") LOG_DIR = os.path.join(PERSISTENT_ROOT, "logs") LOG_FILE = os.path.join(LOG_DIR, "console_consumer.log") - LOG4J_CONFIG = os.path.join(PERSISTENT_ROOT, "tools-log4j.properties") + LOG4J_CONFIG = os.path.join(PERSISTENT_ROOT, "tools-log4j2.properties") CONFIG_FILE = os.path.join(PERSISTENT_ROOT, "console_consumer.properties") JMX_TOOL_LOG = os.path.join(PERSISTENT_ROOT, "jmx_tool.log") JMX_TOOL_ERROR_LOG = os.path.join(PERSISTENT_ROOT, "jmx_tool.err.log") @@ -253,7 +253,7 @@ def _worker(self, idx, node): node.account.create_file(ConsoleConsumer.CONFIG_FILE, prop_file) # Create and upload log properties - log_config = self.render('tools_log4j.properties', log_file=ConsoleConsumer.LOG_FILE) + log_config = self.render('tools_log4j2.properties', log_file=ConsoleConsumer.LOG_FILE) node.account.create_file(ConsoleConsumer.LOG4J_CONFIG, log_config) # Run and capture output diff --git a/tests/kafkatest/services/kafka/kafka.py b/tests/kafkatest/services/kafka/kafka.py index 7d6f42d27b7ca..1219b8e11b8e0 100644 --- a/tests/kafkatest/services/kafka/kafka.py +++ b/tests/kafkatest/services/kafka/kafka.py @@ -145,7 +145,7 @@ class for details. """ PERSISTENT_ROOT = "/mnt/kafka" STDOUT_STDERR_CAPTURE = os.path.join(PERSISTENT_ROOT, "server-start-stdout-stderr.log") - LOG4J_CONFIG = os.path.join(PERSISTENT_ROOT, "kafka-log4j.properties") + LOG4J_CONFIG = os.path.join(PERSISTENT_ROOT, "kafka-log4j2.properties") # Logs such as controller.log, server.log, etc all go here OPERATIONAL_LOG_DIR = os.path.join(PERSISTENT_ROOT, "kafka-operational-logs") OPERATIONAL_LOG_INFO_DIR = os.path.join(OPERATIONAL_LOG_DIR, "info") @@ -896,7 +896,7 @@ def start_node(self, node, timeout_sec=60, **kwargs): self.logger.info("kafka.properties:") self.logger.info(prop_file) node.account.create_file(KafkaService.CONFIG_FILE, prop_file) - node.account.create_file(self.LOG4J_CONFIG, self.render('log4j.properties', log_dir=KafkaService.OPERATIONAL_LOG_DIR)) + node.account.create_file(self.LOG4J_CONFIG, self.render('log4j2.properties', log_dir=KafkaService.OPERATIONAL_LOG_DIR)) if self.quorum_info.using_kraft: # format log directories if necessary diff --git a/tests/kafkatest/services/kafka/templates/log4j.properties b/tests/kafkatest/services/kafka/templates/log4j.properties deleted file mode 100644 index 04c9bd5d6ab6f..0000000000000 --- a/tests/kafkatest/services/kafka/templates/log4j.properties +++ /dev/null @@ -1,141 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -log4j.rootLogger={{ log_level|default("DEBUG") }}, stdout - -log4j.appender.stdout=org.apache.log4j.ConsoleAppender -log4j.appender.stdout.layout=org.apache.log4j.PatternLayout -log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c)%n - -# INFO level appenders -log4j.appender.kafkaInfoAppender=org.apache.log4j.DailyRollingFileAppender -log4j.appender.kafkaInfoAppender.DatePattern='.'yyyy-MM-dd-HH -log4j.appender.kafkaInfoAppender.File={{ log_dir }}/info/server.log -log4j.appender.kafkaInfoAppender.layout=org.apache.log4j.PatternLayout -log4j.appender.kafkaInfoAppender.layout.ConversionPattern=[%d] %p %m (%c)%n -log4j.appender.kafkaInfoAppender.Threshold=INFO - -log4j.appender.stateChangeInfoAppender=org.apache.log4j.DailyRollingFileAppender -log4j.appender.stateChangeInfoAppender.DatePattern='.'yyyy-MM-dd-HH -log4j.appender.stateChangeInfoAppender.File={{ log_dir }}/info/state-change.log -log4j.appender.stateChangeInfoAppender.layout=org.apache.log4j.PatternLayout -log4j.appender.stateChangeInfoAppender.layout.ConversionPattern=[%d] %p %m (%c)%n -log4j.appender.stateChangeInfoAppender.Threshold=INFO - -log4j.appender.requestInfoAppender=org.apache.log4j.DailyRollingFileAppender -log4j.appender.requestInfoAppender.DatePattern='.'yyyy-MM-dd-HH -log4j.appender.requestInfoAppender.File={{ log_dir }}/info/kafka-request.log -log4j.appender.requestInfoAppender.layout=org.apache.log4j.PatternLayout -log4j.appender.requestInfoAppender.layout.ConversionPattern=[%d] %p %m (%c)%n -log4j.appender.requestInfoAppender.Threshold=INFO - -log4j.appender.cleanerInfoAppender=org.apache.log4j.DailyRollingFileAppender -log4j.appender.cleanerInfoAppender.DatePattern='.'yyyy-MM-dd-HH -log4j.appender.cleanerInfoAppender.File={{ log_dir }}/info/log-cleaner.log -log4j.appender.cleanerInfoAppender.layout=org.apache.log4j.PatternLayout -log4j.appender.cleanerInfoAppender.layout.ConversionPattern=[%d] %p %m (%c)%n -log4j.appender.cleanerInfoAppender.Threshold=INFO - -log4j.appender.controllerInfoAppender=org.apache.log4j.DailyRollingFileAppender -log4j.appender.controllerInfoAppender.DatePattern='.'yyyy-MM-dd-HH -log4j.appender.controllerInfoAppender.File={{ log_dir }}/info/controller.log -log4j.appender.controllerInfoAppender.layout=org.apache.log4j.PatternLayout -log4j.appender.controllerInfoAppender.layout.ConversionPattern=[%d] %p %m (%c)%n -log4j.appender.controllerInfoAppender.Threshold=INFO - -log4j.appender.authorizerInfoAppender=org.apache.log4j.DailyRollingFileAppender -log4j.appender.authorizerInfoAppender.DatePattern='.'yyyy-MM-dd-HH -log4j.appender.authorizerInfoAppender.File={{ log_dir }}/info/kafka-authorizer.log -log4j.appender.authorizerInfoAppender.layout=org.apache.log4j.PatternLayout -log4j.appender.authorizerInfoAppender.layout.ConversionPattern=[%d] %p %m (%c)%n -log4j.appender.authorizerInfoAppender.Threshold=INFO - -# DEBUG level appenders -log4j.appender.kafkaDebugAppender=org.apache.log4j.DailyRollingFileAppender -log4j.appender.kafkaDebugAppender.DatePattern='.'yyyy-MM-dd-HH -log4j.appender.kafkaDebugAppender.File={{ log_dir }}/debug/server.log -log4j.appender.kafkaDebugAppender.layout=org.apache.log4j.PatternLayout -log4j.appender.kafkaDebugAppender.layout.ConversionPattern=[%d] %p %m (%c)%n -log4j.appender.kafkaDebugAppender.Threshold=DEBUG - -log4j.appender.stateChangeDebugAppender=org.apache.log4j.DailyRollingFileAppender -log4j.appender.stateChangeDebugAppender.DatePattern='.'yyyy-MM-dd-HH -log4j.appender.stateChangeDebugAppender.File={{ log_dir }}/debug/state-change.log -log4j.appender.stateChangeDebugAppender.layout=org.apache.log4j.PatternLayout -log4j.appender.stateChangeDebugAppender.layout.ConversionPattern=[%d] %p %m (%c)%n -log4j.appender.stateChangeDebugAppender.Threshold=DEBUG - -log4j.appender.requestDebugAppender=org.apache.log4j.DailyRollingFileAppender -log4j.appender.requestDebugAppender.DatePattern='.'yyyy-MM-dd-HH -log4j.appender.requestDebugAppender.File={{ log_dir }}/debug/kafka-request.log -log4j.appender.requestDebugAppender.layout=org.apache.log4j.PatternLayout -log4j.appender.requestDebugAppender.layout.ConversionPattern=[%d] %p %m (%c)%n -log4j.appender.requestDebugAppender.Threshold=DEBUG - -log4j.appender.cleanerDebugAppender=org.apache.log4j.DailyRollingFileAppender -log4j.appender.cleanerDebugAppender.DatePattern='.'yyyy-MM-dd-HH -log4j.appender.cleanerDebugAppender.File={{ log_dir }}/debug/log-cleaner.log -log4j.appender.cleanerDebugAppender.layout=org.apache.log4j.PatternLayout -log4j.appender.cleanerDebugAppender.layout.ConversionPattern=[%d] %p %m (%c)%n -log4j.appender.cleanerDebugAppender.Threshold=DEBUG - -log4j.appender.controllerDebugAppender=org.apache.log4j.DailyRollingFileAppender -log4j.appender.controllerDebugAppender.DatePattern='.'yyyy-MM-dd-HH -log4j.appender.controllerDebugAppender.File={{ log_dir }}/debug/controller.log -log4j.appender.controllerDebugAppender.layout=org.apache.log4j.PatternLayout -log4j.appender.controllerDebugAppender.layout.ConversionPattern=[%d] %p %m (%c)%n -log4j.appender.controllerDebugAppender.Threshold=DEBUG - -log4j.appender.authorizerDebugAppender=org.apache.log4j.DailyRollingFileAppender -log4j.appender.authorizerDebugAppender.DatePattern='.'yyyy-MM-dd-HH -log4j.appender.authorizerDebugAppender.File={{ log_dir }}/debug/kafka-authorizer.log -log4j.appender.authorizerDebugAppender.layout=org.apache.log4j.PatternLayout -log4j.appender.authorizerDebugAppender.layout.ConversionPattern=[%d] %p %m (%c)%n -log4j.appender.authorizerDebugAppender.Threshold=DEBUG - -# Turn on all our debugging info -log4j.logger.kafka.producer.async.DefaultEventHandler={{ log_level|default("DEBUG") }}, kafkaInfoAppender, kafkaDebugAppender -log4j.logger.kafka.client.ClientUtils={{ log_level|default("DEBUG") }}, kafkaInfoAppender, kafkaDebugAppender -log4j.logger.kafka.perf={{ log_level|default("DEBUG") }}, kafkaInfoAppender, kafkaDebugAppender -log4j.logger.kafka.perf.ProducerPerformance$ProducerThread={{ log_level|default("DEBUG") }}, kafkaInfoAppender, kafkaDebugAppender -log4j.logger.kafka={{ log_level|default("DEBUG") }}, kafkaInfoAppender, kafkaDebugAppender - -log4j.logger.kafka.network.RequestChannel$={{ log_level|default("DEBUG") }}, requestInfoAppender, requestDebugAppender -log4j.additivity.kafka.network.RequestChannel$=false - -log4j.logger.kafka.network.Processor={{ log_level|default("DEBUG") }}, requestInfoAppender, requestDebugAppender -log4j.logger.kafka.server.KafkaApis={{ log_level|default("DEBUG") }}, requestInfoAppender, requestDebugAppender -log4j.additivity.kafka.server.KafkaApis=false -log4j.logger.kafka.request.logger={{ log_level|default("DEBUG") }}, requestInfoAppender, requestDebugAppender -log4j.additivity.kafka.request.logger=false - -log4j.logger.org.apache.kafka.raft={{ log_level|default("DEBUG") }}, controllerInfoAppender, controllerDebugAppender -log4j.logger.org.apache.kafka.controller={{ log_level|default("DEBUG") }}, controllerInfoAppender, controllerDebugAppender -log4j.logger.kafka.controller={{ log_level|default("DEBUG") }}, controllerInfoAppender, controllerDebugAppender -log4j.additivity.kafka.controller=false - -log4j.logger.kafka.log.LogCleaner={{ log_level|default("DEBUG") }}, cleanerInfoAppender, cleanerDebugAppender -log4j.additivity.kafka.log.LogCleaner=false - -log4j.logger.state.change.logger={{ log_level|default("DEBUG") }}, stateChangeInfoAppender, stateChangeDebugAppender -log4j.additivity.state.change.logger=false - -#Change this to debug to get the actual audit log for authorizer. -log4j.logger.kafka.authorizer.logger={{ log_level|default("DEBUG") }}, authorizerInfoAppender, authorizerDebugAppender -log4j.additivity.kafka.authorizer.logger=false - -#New Group Coordinator logging. -log4j.logger.org.apache.kafka.coordinator.group={{ log_level|default("DEBUG") }}, kafkaInfoAppender, kafkaDebugAppender -log4j.additivity.org.apache.kafka.coordinator.group=false diff --git a/tests/kafkatest/services/kafka/templates/log4j2.properties b/tests/kafkatest/services/kafka/templates/log4j2.properties new file mode 100644 index 0000000000000..52934ed373634 --- /dev/null +++ b/tests/kafkatest/services/kafka/templates/log4j2.properties @@ -0,0 +1,298 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +name=KafkaTestsServicesConfig +appenders=stdout,kafkaInfoAppender,kafkaDebugAppender,stateChangeInfoAppender,stateChangeDebugAppender,requestInfoAppender,requestDebugAppender,cleanerInfoAppender,cleanerDebugAppender,controllerInfoAppender,controllerDebugAppender,authorizerInfoAppender,authorizerDebugAppender + +# Console appender +appender.stdout.type=Console +appender.stdout.name=STDOUT +appender.stdout.layout.type=PatternLayout +appender.stdout.layout.pattern=[%d] %p %m (%c)%n + +# Kafka INFO level appender (RollingFileAppender) +appender.kafkaInfoAppender.type=RollingFile +appender.kafkaInfoAppender.name=KafkaInfoAppender +appender.kafkaInfoAppender.fileName={{ log_dir }}/info/server.log +appender.kafkaInfoAppender.filePattern={{ log_dir }}/info/server.log.%d{yyyy-MM-dd-HH} +appender.kafkaInfoAppender.layout.type=PatternLayout +appender.kafkaInfoAppender.layout.pattern=[%d] %p %m (%c)%n +appender.kafkaInfoAppender.policies.type=Policies +appender.kafkaInfoAppender.policies.time.type=TimeBasedTriggeringPolicy +appender.kafkaInfoAppender.policies.time.interval=1 +appender.kafkaInfoAppender.filter.threshold.type=ThresholdFilter +appender.kafkaInfoAppender.filter.threshold.level=INFO + +# Kafka DEBUG level appender (RollingFileAppender) +appender.kafkaDebugAppender.type=RollingFile +appender.kafkaDebugAppender.name=KafkaDebugAppender +appender.kafkaDebugAppender.fileName={{ log_dir }}/debug/server.log +appender.kafkaDebugAppender.filePattern={{ log_dir }}/debug/server.log.%d{yyyy-MM-dd-HH} +appender.kafkaDebugAppender.layout.type=PatternLayout +appender.kafkaDebugAppender.layout.pattern=[%d] %p %m (%c)%n +appender.kafkaDebugAppender.policies.type=Policies +appender.kafkaDebugAppender.policies.time.type=TimeBasedTriggeringPolicy +appender.kafkaDebugAppender.policies.time.interval=1 +appender.kafkaDebugAppender.filter.threshold.type=ThresholdFilter +appender.kafkaDebugAppender.filter.threshold.level=DEBUG + +# State Change INFO level appender +appender.stateChangeInfoAppender.type=RollingFile +appender.stateChangeInfoAppender.name=StateChangeInfoAppender +appender.stateChangeInfoAppender.fileName={{ log_dir }}/info/state-change.log +appender.stateChangeInfoAppender.filePattern={{ log_dir }}/info/state-change.log.%d{yyyy-MM-dd-HH} +appender.stateChangeInfoAppender.layout.type=PatternLayout +appender.stateChangeInfoAppender.layout.pattern=[%d] %p %m (%c)%n +appender.stateChangeInfoAppender.policies.type=Policies +appender.stateChangeInfoAppender.policies.time.type=TimeBasedTriggeringPolicy +appender.stateChangeInfoAppender.policies.time.interval=1 +appender.stateChangeInfoAppender.filter.threshold.type=ThresholdFilter +appender.stateChangeInfoAppender.filter.threshold.level=INFO + +# State Change DEBUG level appender +appender.stateChangeDebugAppender.type=RollingFile +appender.stateChangeDebugAppender.name=StateChangeDebugAppender +appender.stateChangeDebugAppender.fileName={{ log_dir }}/debug/state-change.log +appender.stateChangeDebugAppender.filePattern={{ log_dir }}/debug/state-change.log.%d{yyyy-MM-dd-HH} +appender.stateChangeDebugAppender.layout.type=PatternLayout +appender.stateChangeDebugAppender.layout.pattern=[%d] %p %m (%c)%n +appender.stateChangeDebugAppender.policies.type=Policies +appender.stateChangeDebugAppender.policies.time.type=TimeBasedTriggeringPolicy +appender.stateChangeDebugAppender.policies.time.interval=1 +appender.stateChangeDebugAppender.filter.threshold.type=ThresholdFilter +appender.stateChangeDebugAppender.filter.threshold.level=DEBUG + +# Request INFO level appender +appender.requestInfoAppender.type=RollingFile +appender.requestInfoAppender.name=RequestInfoAppender +appender.requestInfoAppender.fileName={{ log_dir }}/info/kafka-request.log +appender.requestInfoAppender.filePattern={{ log_dir }}/info/kafka-request.log.%d{yyyy-MM-dd-HH} +appender.requestInfoAppender.layout.type=PatternLayout +appender.requestInfoAppender.layout.pattern=[%d] %p %m (%c)%n +appender.requestInfoAppender.policies.type=Policies +appender.requestInfoAppender.policies.time.type=TimeBasedTriggeringPolicy +appender.requestInfoAppender.policies.time.interval=1 +appender.requestInfoAppender.filter.threshold.type=ThresholdFilter +appender.requestInfoAppender.filter.threshold.level=INFO + +# Request DEBUG level appender +appender.requestDebugAppender.type=RollingFile +appender.requestDebugAppender.name=RequestDebugAppender +appender.requestDebugAppender.fileName={{ log_dir }}/debug/kafka-request.log +appender.requestDebugAppender.filePattern={{ log_dir }}/debug/kafka-request.log.%d{yyyy-MM-dd-HH} +appender.requestDebugAppender.layout.type=PatternLayout +appender.requestDebugAppender.layout.pattern=[%d] %p %m (%c)%n +appender.requestDebugAppender.policies.type=Policies +appender.requestDebugAppender.policies.time.type=TimeBasedTriggeringPolicy +appender.requestDebugAppender.policies.time.interval=1 +appender.requestDebugAppender.filter.threshold.type=ThresholdFilter +appender.requestDebugAppender.filter.threshold.level=DEBUG + +# Cleaner INFO level appender +appender.cleanerInfoAppender.type=RollingFile +appender.cleanerInfoAppender.name=CleanerInfoAppender +appender.cleanerInfoAppender.fileName={{ log_dir }}/info/log-cleaner.log +appender.cleanerInfoAppender.filePattern={{ log_dir }}/info/log-cleaner.log.%d{yyyy-MM-dd-HH} +appender.cleanerInfoAppender.layout.type=PatternLayout +appender.cleanerInfoAppender.layout.pattern=[%d] %p %m (%c)%n +appender.cleanerInfoAppender.policies.type=Policies +appender.cleanerInfoAppender.policies.time.type=TimeBasedTriggeringPolicy +appender.cleanerInfoAppender.policies.time.interval=1 +appender.cleanerInfoAppender.filter.threshold.type=ThresholdFilter +appender.cleanerInfoAppender.filter.threshold.level=INFO + +# Cleaner DEBUG level appender +appender.cleanerDebugAppender.type=RollingFile +appender.cleanerDebugAppender.name=CleanerDebugAppender +appender.cleanerDebugAppender.fileName={{ log_dir }}/debug/log-cleaner.log +appender.cleanerDebugAppender.filePattern={{ log_dir }}/debug/log-cleaner.log.%d{yyyy-MM-dd-HH} +appender.cleanerDebugAppender.layout.type=PatternLayout +appender.cleanerDebugAppender.layout.pattern=[%d] %p %m (%c)%n +appender.cleanerDebugAppender.policies.type=Policies +appender.cleanerDebugAppender.policies.time.type=TimeBasedTriggeringPolicy +appender.cleanerDebugAppender.policies.time.interval=1 +appender.cleanerDebugAppender.filter.threshold.type=ThresholdFilter +appender.cleanerDebugAppender.filter.threshold.level=DEBUG + +# Controller INFO level appender +appender.controllerInfoAppender.type=RollingFile +appender.controllerInfoAppender.name=ControllerInfoAppender +appender.controllerInfoAppender.fileName={{ log_dir }}/info/controller.log +appender.controllerInfoAppender.filePattern={{ log_dir }}/info/controller.log.%d{yyyy-MM-dd-HH} +appender.controllerInfoAppender.layout.type=PatternLayout +appender.controllerInfoAppender.layout.pattern=[%d] %p %m (%c)%n +appender.controllerInfoAppender.policies.type=Policies +appender.controllerInfoAppender.policies.time.type=TimeBasedTriggeringPolicy +appender.controllerInfoAppender.policies.time.interval=1 +appender.controllerInfoAppender.filter.threshold.type=ThresholdFilter +appender.controllerInfoAppender.filter.threshold.level=INFO + +# Controller DEBUG level appender +appender.controllerDebugAppender.type=RollingFile +appender.controllerDebugAppender.name=ControllerDebugAppender +appender.controllerDebugAppender.fileName={{ log_dir }}/debug/controller.log +appender.controllerDebugAppender.filePattern={{ log_dir }}/debug/controller.log.%d{yyyy-MM-dd-HH} +appender.controllerDebugAppender.layout.type=PatternLayout +appender.controllerDebugAppender.layout.pattern=[%d] %p %m (%c)%n +appender.controllerDebugAppender.policies.type=Policies +appender.controllerDebugAppender.policies.time.type=TimeBasedTriggeringPolicy +appender.controllerDebugAppender.policies.time.interval=1 +appender.controllerDebugAppender.filter.threshold.type=ThresholdFilter +appender.controllerDebugAppender.filter.threshold.level=DEBUG + +# Authorizer INFO level appender +appender.authorizerInfoAppender.type=RollingFile +appender.authorizerInfoAppender.name=AuthorizerInfoAppender +appender.authorizerInfoAppender.fileName={{ log_dir }}/info/kafka-authorizer.log +appender.authorizerInfoAppender.filePattern={{ log_dir }}/info/kafka-authorizer.log.%d{yyyy-MM-dd-HH} +appender.authorizerInfoAppender.layout.type=PatternLayout +appender.authorizerInfoAppender.layout.pattern=[%d] %p %m (%c)%n +appender.authorizerInfoAppender.policies.type=Policies +appender.authorizerInfoAppender.policies.time.type=TimeBasedTriggeringPolicy +appender.authorizerInfoAppender.policies.time.interval=1 +appender.authorizerInfoAppender.filter.threshold.type=ThresholdFilter +appender.authorizerInfoAppender.filter.threshold.level=INFO + +# Authorizer DEBUG level appender +appender.authorizerDebugAppender.type=RollingFile +appender.authorizerDebugAppender.name=AuthorizerDebugAppender +appender.authorizerDebugAppender.fileName={{ log_dir }}/debug/kafka-authorizer.log +appender.authorizerDebugAppender.filePattern={{ log_dir }}/debug/kafka-authorizer.log.%d{yyyy-MM-dd-HH} +appender.authorizerDebugAppender.layout.type=PatternLayout +appender.authorizerDebugAppender.layout.pattern=[%d] %p %m (%c)%n +appender.authorizerDebugAppender.policies.type=Policies +appender.authorizerDebugAppender.policies.time.type=TimeBasedTriggeringPolicy +appender.authorizerDebugAppender.policies.time.interval=1 +appender.authorizerDebugAppender.filter.threshold.type=ThresholdFilter +appender.authorizerDebugAppender.filter.threshold.level=DEBUG + +# Root logger configuration +rootLogger.level={{ log_level|default("DEBUG") }} +rootLogger.appenderRefs=stdout +rootLogger.appenderRef.stdout.ref=STDOUT + +# Define loggers for specific packages and components +loggers=kafka.producer.async.DefaultEventHandler,kafka.client.ClientUtils,kafka.perf,kafka.perf.ProducerPerformance$ProducerThread,kafka,kafka.network.RequestChannel$,kafka.network.Processor,kafka.server.KafkaApis,kafka.request.logger,org.apache.kafka.raft,org.apache.kafka.controller,kafka.controller,kafka.log.LogCleaner,state.change.logger,kafka.authorizer.logger,org.apache.kafka.coordinator.group + +# Kafka loggers +logger.kafka.producer.async.DefaultEventHandler.name=kafka.producer.async.DefaultEventHandler +logger.kafka.producer.async.DefaultEventHandler.level={{ log_level|default("DEBUG") }} +logger.kafka.producer.async.DefaultEventHandler.appenderRefs=kafkaInfoAppender,kafkaDebugAppender +logger.kafka.producer.async.DefaultEventHandler.appenderRef.kafkaInfoAppender.ref=KafkaInfoAppender +logger.kafka.producer.async.DefaultEventHandler.appenderRef.kafkaDebugAppender.ref=KafkaDebugAppender + +logger.kafka.client.ClientUtils.name=kafka.client.ClientUtils +logger.kafka.client.ClientUtils.level={{ log_level|default("DEBUG") }} +logger.kafka.client.ClientUtils.appenderRefs=kafkaInfoAppender,kafkaDebugAppender +logger.kafka.client.ClientUtils.appenderRef.kafkaInfoAppender.ref=KafkaInfoAppender +logger.kafka.client.ClientUtils.appenderRef.kafkaDebugAppender.ref=KafkaDebugAppender + +logger.kafka.perf.name=kafka.perf +logger.kafka.perf.level={{ log_level|default("DEBUG") }} +logger.kafka.perf.appenderRefs=kafkaInfoAppender,kafkaDebugAppender +logger.kafka.perf.appenderRef.kafkaInfoAppender.ref=KafkaInfoAppender +logger.kafka.perf.appenderRef.kafkaDebugAppender.ref=KafkaDebugAppender + +logger.kafka.perf.ProducerPerformance$ProducerThread.name=kafka.perf.ProducerPerformance$ProducerThread +logger.kafka.perf.ProducerPerformance$ProducerThread.level={{ log_level|default("DEBUG") }} +logger.kafka.perf.ProducerPerformance$ProducerThread.appenderRefs=kafkaInfoAppender,kafkaDebugAppender +logger.kafka.perf.ProducerPerformance$ProducerThread.appenderRef.kafkaInfoAppender.ref=KafkaInfoAppender +logger.kafka.perf.ProducerPerformance$ProducerThread.appenderRef.kafkaDebugAppender.ref=KafkaDebugAppender + +logger.kafka.name=kafka +logger.kafka.level={{ log_level|default("DEBUG") }} +logger.kafka.appenderRefs=kafkaInfoAppender,kafkaDebugAppender +logger.kafka.appenderRef.kafkaInfoAppender.ref=KafkaInfoAppender +logger.kafka.appenderRef.kafkaDebugAppender.ref=KafkaDebugAppender + +# Request Channel and Server loggers +logger.kafka.network.RequestChannel$.name=kafka.network.RequestChannel$ +logger.kafka.network.RequestChannel$.level={{ log_level|default("DEBUG") }} +logger.kafka.network.RequestChannel$.appenderRefs=requestInfoAppender,requestDebugAppender +logger.kafka.network.RequestChannel$.appenderRef.requestInfoAppender.ref=RequestInfoAppender +logger.kafka.network.RequestChannel$.appenderRef.requestDebugAppender.ref=RequestDebugAppender +logger.kafka.network.RequestChannel$.additivity=false + +logger.kafka.network.Processor.name=kafka.network.Processor +logger.kafka.network.Processor.level={{ log_level|default("DEBUG") }} +logger.kafka.network.Processor.appenderRefs=requestInfoAppender,requestDebugAppender +logger.kafka.network.Processor.appenderRef.requestInfoAppender.ref=RequestInfoAppender +logger.kafka.network.Processor.appenderRef.requestDebugAppender.ref=RequestDebugAppender + +logger.kafka.server.KafkaApis.name=kafka.server.KafkaApis +logger.kafka.server.KafkaApis.level={{ log_level|default("DEBUG") }} +logger.kafka.server.KafkaApis.appenderRefs=requestInfoAppender,requestDebugAppender +logger.kafka.server.KafkaApis.appenderRef.requestInfoAppender.ref=RequestInfoAppender +logger.kafka.server.KafkaApis.appenderRef.requestDebugAppender.ref=RequestDebugAppender +logger.kafka.server.KafkaApis.additivity=false + +logger.kafka.request.logger.name=kafka.request.logger +logger.kafka.request.logger.level={{ log_level|default("DEBUG") }} +logger.kafka.request.logger.appenderRefs=requestInfoAppender,requestDebugAppender +logger.kafka.request.logger.appenderRef.requestInfoAppender.ref=RequestInfoAppender +logger.kafka.request.logger.appenderRef.requestDebugAppender.ref=RequestDebugAppender +logger.kafka.request.logger.additivity=false + +# Controller loggers +logger.org.apache.kafka.raft.name=org.apache.kafka.raft +logger.org.apache.kafka.raft.level={{ log_level|default("DEBUG") }} +logger.org.apache.kafka.raft.appenderRefs=controllerInfoAppender,controllerDebugAppender +logger.org.apache.kafka.raft.appenderRef.controllerInfoAppender.ref=ControllerInfoAppender +logger.org.apache.kafka.raft.appenderRef.controllerDebugAppender.ref=ControllerDebugAppender + +logger.org.apache.kafka.controller.name=org.apache.kafka.controller +logger.org.apache.kafka.controller.level={{ log_level|default("DEBUG") }} +logger.org.apache.kafka.controller.appenderRefs=controllerInfoAppender,controllerDebugAppender +logger.org.apache.kafka.controller.appenderRef.controllerInfoAppender.ref=ControllerInfoAppender +logger.org.apache.kafka.controller.appenderRef.controllerDebugAppender.ref=ControllerDebugAppender + +logger.kafka.controller.name=kafka.controller +logger.kafka.controller.level={{ log_level|default("DEBUG") }} +logger.kafka.controller.appenderRefs=controllerInfoAppender,controllerDebugAppender +logger.kafka.controller.appenderRef.controllerInfoAppender.ref=ControllerInfoAppender +logger.kafka.controller.appenderRef.controllerDebugAppender.ref=ControllerDebugAppender +logger.kafka.controller.additivity=false + +# Log Cleaner logger +logger.kafka.log.LogCleaner.name=kafka.log.LogCleaner +logger.kafka.log.LogCleaner.level={{ log_level|default("DEBUG") }} +logger.kafka.log.LogCleaner.appenderRefs=cleanerInfoAppender,cleanerDebugAppender +logger.kafka.log.LogCleaner.appenderRef.cleanerInfoAppender.ref=CleanerInfoAppender +logger.kafka.log.LogCleaner.appenderRef.cleanerDebugAppender.ref=CleanerDebugAppender +logger.kafka.log.LogCleaner.additivity=false + +# State change logger +logger.state.change.logger.name=state.change.logger +logger.state.change.logger.level={{ log_level|default("DEBUG") }} +logger.state.change.logger.appenderRefs=stateChangeInfoAppender,stateChangeDebugAppender +logger.state.change.logger.appenderRef.stateChangeInfoAppender.ref=StateChangeInfoAppender +logger.state.change.logger.appenderRef.stateChangeDebugAppender.ref=StateChangeDebugAppender +logger.state.change.logger.additivity=false + +# Authorizer logger +logger.kafka.authorizer.logger.name=kafka.authorizer.logger +logger.kafka.authorizer.logger.level={{ log_level|default("DEBUG") }} +logger.kafka.authorizer.logger.appenderRefs=authorizerInfoAppender,authorizerDebugAppender +logger.kafka.authorizer.logger.appenderRef.authorizerInfoAppender.ref=AuthorizerInfoAppender +logger.kafka.authorizer.logger.appenderRef.authorizerDebugAppender.ref=AuthorizerDebugAppender +logger.kafka.authorizer.logger.additivity=false + +# Coordinator logger +logger.org.apache.kafka.coordinator.group.name=org.apache.kafka.coordinator.group +logger.org.apache.kafka.coordinator.group.level={{ log_level|default("DEBUG") }} +logger.org.apache.kafka.coordinator.group.appenderRefs=kafkaInfoAppender,kafkaDebugAppender +logger.org.apache.kafka.coordinator.group.appenderRef.kafkaInfoAppender.ref=KafkaInfoAppender +logger.org.apache.kafka.coordinator.group.appenderRef.kafkaDebugAppender.ref=KafkaDebugAppender +logger.org.apache.kafka.coordinator.group.additivity=false diff --git a/tests/kafkatest/services/performance/consumer_performance.py b/tests/kafkatest/services/performance/consumer_performance.py index 3325fe5298a2a..b9171c3f0d338 100644 --- a/tests/kafkatest/services/performance/consumer_performance.py +++ b/tests/kafkatest/services/performance/consumer_performance.py @@ -49,7 +49,7 @@ class ConsumerPerformanceService(PerformanceService): STDOUT_CAPTURE = os.path.join(PERSISTENT_ROOT, "consumer_performance.stdout") STDERR_CAPTURE = os.path.join(PERSISTENT_ROOT, "consumer_performance.stderr") LOG_FILE = os.path.join(LOG_DIR, "consumer_performance.log") - LOG4J_CONFIG = os.path.join(PERSISTENT_ROOT, "tools-log4j.properties") + LOG4J_CONFIG = os.path.join(PERSISTENT_ROOT, "tools-log4j2.properties") CONFIG_FILE = os.path.join(PERSISTENT_ROOT, "consumer.properties") logs = { @@ -160,7 +160,7 @@ def parse_results(self, line, version): def _worker(self, idx, node): node.account.ssh("mkdir -p %s" % ConsumerPerformanceService.PERSISTENT_ROOT, allow_fail=False) - log_config = self.render('tools_log4j.properties', log_file=ConsumerPerformanceService.LOG_FILE) + log_config = self.render('tools_log4j2.properties', log_file=ConsumerPerformanceService.LOG_FILE) node.account.create_file(ConsumerPerformanceService.LOG4J_CONFIG, log_config) node.account.create_file(ConsumerPerformanceService.CONFIG_FILE, str(self.security_config)) self.security_config.setup_node(node) diff --git a/tests/kafkatest/services/performance/end_to_end_latency.py b/tests/kafkatest/services/performance/end_to_end_latency.py index a35d2e1427480..16a81c817423c 100644 --- a/tests/kafkatest/services/performance/end_to_end_latency.py +++ b/tests/kafkatest/services/performance/end_to_end_latency.py @@ -30,7 +30,7 @@ class EndToEndLatencyService(PerformanceService): STDOUT_CAPTURE = os.path.join(PERSISTENT_ROOT, "end_to_end_latency.stdout") STDERR_CAPTURE = os.path.join(PERSISTENT_ROOT, "end_to_end_latency.stderr") LOG_FILE = os.path.join(LOG_DIR, "end_to_end_latency.log") - LOG4J_CONFIG = os.path.join(PERSISTENT_ROOT, "tools-log4j.properties") + LOG4J_CONFIG = os.path.join(PERSISTENT_ROOT, "tools-log4j2.properties") CONFIG_FILE = os.path.join(PERSISTENT_ROOT, "client.properties") logs = { @@ -103,7 +103,7 @@ def start_cmd(self, node): def _worker(self, idx, node): node.account.ssh("mkdir -p %s" % EndToEndLatencyService.PERSISTENT_ROOT, allow_fail=False) - log_config = self.render('tools_log4j.properties', log_file=EndToEndLatencyService.LOG_FILE) + log_config = self.render('tools_log4j2.properties', log_file=EndToEndLatencyService.LOG_FILE) node.account.create_file(EndToEndLatencyService.LOG4J_CONFIG, log_config) client_config = str(self.security_config) diff --git a/tests/kafkatest/services/performance/producer_performance.py b/tests/kafkatest/services/performance/producer_performance.py index a990d4fe04527..b8d2577ad1964 100644 --- a/tests/kafkatest/services/performance/producer_performance.py +++ b/tests/kafkatest/services/performance/producer_performance.py @@ -32,7 +32,7 @@ class ProducerPerformanceService(HttpMetricsCollector, PerformanceService): STDERR_CAPTURE = os.path.join(PERSISTENT_ROOT, "producer_performance.stderr") LOG_DIR = os.path.join(PERSISTENT_ROOT, "logs") LOG_FILE = os.path.join(LOG_DIR, "producer_performance.log") - LOG4J_CONFIG = os.path.join(PERSISTENT_ROOT, "tools-log4j.properties") + LOG4J_CONFIG = os.path.join(PERSISTENT_ROOT, "tools-log4j2.properties") def __init__(self, context, num_nodes, kafka, topic, num_records, record_size, throughput, version=DEV_BRANCH, settings=None, intermediate_stats=False, client_id="producer-performance"): @@ -122,7 +122,7 @@ def _worker(self, idx, node): node.account.ssh("mkdir -p %s" % ProducerPerformanceService.PERSISTENT_ROOT, allow_fail=False) # Create and upload log properties - log_config = self.render('tools_log4j.properties', log_file=ProducerPerformanceService.LOG_FILE) + log_config = self.render('tools_log4j2.properties', log_file=ProducerPerformanceService.LOG_FILE) node.account.create_file(ProducerPerformanceService.LOG4J_CONFIG, log_config) cmd = self.start_cmd(node) diff --git a/tests/kafkatest/services/performance/templates/tools_log4j.properties b/tests/kafkatest/services/performance/templates/tools_log4j2.properties similarity index 66% rename from tests/kafkatest/services/performance/templates/tools_log4j.properties rename to tests/kafkatest/services/performance/templates/tools_log4j2.properties index df10d88b8abcc..303f53e03f18b 100644 --- a/tests/kafkatest/services/performance/templates/tools_log4j.properties +++ b/tests/kafkatest/services/performance/templates/tools_log4j2.properties @@ -14,12 +14,18 @@ # limitations under the License. # Define the root logger with appender file -log4j.rootLogger = {{ log_level|default("INFO") }}, FILE +name=KafkaTestsPerformanceTemplatesConfig -log4j.appender.FILE=org.apache.log4j.FileAppender -log4j.appender.FILE.File={{ log_file }} -log4j.appender.FILE.ImmediateFlush=true -# Set the append to false, overwrite -log4j.appender.FILE.Append=false -log4j.appender.FILE.layout=org.apache.log4j.PatternLayout -log4j.appender.FILE.layout.conversionPattern=[%d] %p %m (%c)%n +appenders=file + +appender.file.type=File +appender.file.name=FILE +appender.file.fileName={{ log_file }} +appender.file.append=false +appender.file.immediateFlush=true +appender.file.layout.type=PatternLayout +appender.file.layout.pattern=[%d] %p %m (%c)%n + +rootLogger.level={{ log_level|default("INFO") }} +rootLogger.appenderRefs=file +rootLogger.appenderRef.file.ref=FILE diff --git a/tests/kafkatest/services/streams.py b/tests/kafkatest/services/streams.py index 696e9f58f7fab..f8365d06ee7ad 100644 --- a/tests/kafkatest/services/streams.py +++ b/tests/kafkatest/services/streams.py @@ -38,7 +38,7 @@ class StreamsTestBaseService(KafkaPathResolverMixin, JmxMixin, Service): STDERR_FILE = os.path.join(PERSISTENT_ROOT, "streams.stderr") JMX_LOG_FILE = os.path.join(PERSISTENT_ROOT, "jmx_tool.log") JMX_ERR_FILE = os.path.join(PERSISTENT_ROOT, "jmx_tool.err.log") - LOG4J_CONFIG_FILE = os.path.join(PERSISTENT_ROOT, "tools-log4j.properties") + LOG4J_CONFIG_FILE = os.path.join(PERSISTENT_ROOT, "tools-log4j2.properties") PID_FILE = os.path.join(PERSISTENT_ROOT, "streams.pid") CLEAN_NODE_ENABLED = True @@ -306,7 +306,7 @@ def start_node(self, node): node.account.mkdirs(self.PERSISTENT_ROOT) prop_file = self.prop_file() node.account.create_file(self.CONFIG_FILE, prop_file) - node.account.create_file(self.LOG4J_CONFIG_FILE, self.render('tools_log4j.properties', log_file=self.LOG_FILE)) + node.account.create_file(self.LOG4J_CONFIG_FILE, self.render('tools_log4j2.properties', log_file=self.LOG_FILE)) self.logger.info("Starting StreamsTest process on " + str(node.account)) with node.account.monitor_log(self.STDOUT_FILE) as monitor: diff --git a/tests/kafkatest/services/templates/connect_log4j.properties b/tests/kafkatest/services/templates/connect_log4j2.properties similarity index 67% rename from tests/kafkatest/services/templates/connect_log4j.properties rename to tests/kafkatest/services/templates/connect_log4j2.properties index 9c90543ebe982..2dd5ec2696bc9 100644 --- a/tests/kafkatest/services/templates/connect_log4j.properties +++ b/tests/kafkatest/services/templates/connect_log4j2.properties @@ -16,11 +16,18 @@ ## # Define the root logger with appender file -log4j.rootLogger = {{ log_level|default("INFO") }}, FILE +name=TestsTemplatesConnectConfig -log4j.appender.FILE=org.apache.log4j.FileAppender -log4j.appender.FILE.File={{ log_file }} -log4j.appender.FILE.ImmediateFlush=true -log4j.appender.FILE.Append=true -log4j.appender.FILE.layout=org.apache.log4j.PatternLayout -log4j.appender.FILE.layout.conversionPattern=[%d] %p %m (%c)%n +appenders=file + +appender.file.type=File +appender.file.name=FILE +appender.file.fileName={{ log_file }} +appender.file.append=true +appender.file.immediateFlush=true +appender.file.layout.type=PatternLayout +appender.file.layout.pattern=[%d] %p %m (%c)%n + +rootLogger.level={{ log_level|default("INFO") }} +rootLogger.appenderRefs=file +rootLogger.appenderRef.file.ref=FILE diff --git a/tests/kafkatest/tests/streams/templates/log4j_template.properties b/tests/kafkatest/services/templates/tools_log4j2.properties similarity index 65% rename from tests/kafkatest/tests/streams/templates/log4j_template.properties rename to tests/kafkatest/services/templates/tools_log4j2.properties index 3f83b4220a1f5..e906667582fc9 100644 --- a/tests/kafkatest/tests/streams/templates/log4j_template.properties +++ b/tests/kafkatest/services/templates/tools_log4j2.properties @@ -14,18 +14,25 @@ # limitations under the License. # Define the root logger with appender file -log4j.rootLogger = {{ log_level|default("INFO") }}, FILE +name=KafkaTestsTemplatesToolConfig + +appenders=file + +appender.file.type=File +appender.file.name=FILE +appender.file.fileName={{ log_file }} +appender.file.append=true +appender.file.immediateFlush=true +appender.file.layout.type=PatternLayout +appender.file.layout.pattern=[%d] %p %m (%c)%n + +rootLogger.level={{ log_level|default("INFO") }} +rootLogger.appenderRefs=file +rootLogger.appenderRef.file.ref=FILE {% if loggers is defined %} {% for logger, log_level in loggers.items() %} -log4j.logger.{{ logger }}={{ log_level }} +logger.{{ logger }}.name={{ logger }} +logger.{{ logger }}.level={{ log_level }} {% endfor %} {% endif %} - -log4j.appender.FILE=org.apache.log4j.FileAppender -log4j.appender.FILE.File={{ log_file }} -log4j.appender.FILE.ImmediateFlush=true -# Set the append to true -log4j.appender.FILE.Append=true -log4j.appender.FILE.layout=org.apache.log4j.PatternLayout -log4j.appender.FILE.layout.conversionPattern=[%d] %p %m (%c)%n diff --git a/tests/kafkatest/services/transactional_message_copier.py b/tests/kafkatest/services/transactional_message_copier.py index 564a23fdcc389..a248f2b7ce507 100644 --- a/tests/kafkatest/services/transactional_message_copier.py +++ b/tests/kafkatest/services/transactional_message_copier.py @@ -31,7 +31,7 @@ class TransactionalMessageCopier(KafkaPathResolverMixin, BackgroundThreadService STDERR_CAPTURE = os.path.join(PERSISTENT_ROOT, "transactional_message_copier.stderr") LOG_DIR = os.path.join(PERSISTENT_ROOT, "logs") LOG_FILE = os.path.join(LOG_DIR, "transactional_message_copier.log") - LOG4J_CONFIG = os.path.join(PERSISTENT_ROOT, "tools-log4j.properties") + LOG4J_CONFIG = os.path.join(PERSISTENT_ROOT, "tools-log4j2.properties") logs = { "transactional_message_copier_stdout": { @@ -75,7 +75,7 @@ def _worker(self, idx, node): node.account.ssh("mkdir -p %s" % TransactionalMessageCopier.PERSISTENT_ROOT, allow_fail=False) # Create and upload log properties - log_config = self.render('tools_log4j.properties', + log_config = self.render('tools_log4j2.properties', log_file=TransactionalMessageCopier.LOG_FILE) node.account.create_file(TransactionalMessageCopier.LOG4J_CONFIG, log_config) # Configure security diff --git a/tests/kafkatest/services/trogdor/templates/log4j.properties b/tests/kafkatest/services/trogdor/templates/log4j.properties deleted file mode 100644 index 252668e3dabf8..0000000000000 --- a/tests/kafkatest/services/trogdor/templates/log4j.properties +++ /dev/null @@ -1,23 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -log4j.rootLogger=DEBUG, mylogger -log4j.logger.kafka=DEBUG -log4j.logger.org.apache.kafka=DEBUG -log4j.logger.org.eclipse=INFO -log4j.appender.mylogger=org.apache.log4j.FileAppender -log4j.appender.mylogger.File={{ log_path }} -log4j.appender.mylogger.layout=org.apache.log4j.PatternLayout -log4j.appender.mylogger.layout.ConversionPattern=[%d] %p %m (%c)%n diff --git a/tests/kafkatest/services/trogdor/templates/log4j2.properties b/tests/kafkatest/services/trogdor/templates/log4j2.properties new file mode 100644 index 0000000000000..c9978b28e3ac5 --- /dev/null +++ b/tests/kafkatest/services/trogdor/templates/log4j2.properties @@ -0,0 +1,45 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +name=KafkaTestsTrogdorConfig +appenders=mylogger + +appender.mylogger.type=File +appender.mylogger.name=MyFileLogger +appender.mylogger.fileName={{ log_path }} +appender.mylogger.layout.type=PatternLayout +appender.mylogger.layout.pattern=[%d] %p %m (%c)%n + +rootLogger.level=DEBUG +rootLogger.appenderRefs=mylogger +rootLogger.appenderRef.mylogger.ref=MyFileLogger + +logger.kafka.name=kafka +logger.kafka.level=DEBUG +logger.kafka.additivity=false +logger.kafka.appenderRefs=mylogger +logger.kafka.appenderRef.mylogger.ref=MyFileLogger + +logger.org.apache.kafka.name=org.apache.kafka +logger.org.apache.kafka.level=DEBUG +logger.org.apache.kafka.additivity=false +logger.org.apache.kafka.appenderRefs=mylogger +logger.org.apache.kafka.appenderRef.mylogger.ref=MyFileLogger + +logger.org.eclipse.name=org.eclipse +logger.org.eclipse.level=INFO +logger.org.eclipse.additivity=false +logger.org.eclipse.appenderRefs=mylogger +logger.org.eclipse.appenderRef.mylogger.ref=MyFileLogger diff --git a/tests/kafkatest/services/trogdor/trogdor.py b/tests/kafkatest/services/trogdor/trogdor.py index 3b941fe9059eb..615c08542879f 100644 --- a/tests/kafkatest/services/trogdor/trogdor.py +++ b/tests/kafkatest/services/trogdor/trogdor.py @@ -34,8 +34,8 @@ class TrogdorService(KafkaPathResolverMixin, Service): AGENT_STDOUT_STDERR The path where we store the agents's stdout/stderr output. COORDINATOR_LOG The path where we store the coordinator's log4j output. AGENT_LOG The path where we store the agent's log4j output. - AGENT_LOG4J_PROPERTIES The path to the agent log4j.properties file for log config. - COORDINATOR_LOG4J_PROPERTIES The path to the coordinator log4j.properties file for log config. + AGENT_LOG4J_PROPERTIES The path to the agent log4j2.properties file for log config. + COORDINATOR_LOG4J_PROPERTIES The path to the coordinator log4j2.properties file for log config. CONFIG_PATH The path to the trogdor configuration file. DEFAULT_AGENT_PORT The default port to use for trogdor_agent daemons. DEFAULT_COORDINATOR_PORT The default port to use for trogdor_coordinator daemons. @@ -48,8 +48,8 @@ class TrogdorService(KafkaPathResolverMixin, Service): AGENT_STDOUT_STDERR = os.path.join(PERSISTENT_ROOT, "trogdor-agent-stdout-stderr.log") COORDINATOR_LOG = os.path.join(PERSISTENT_ROOT, "trogdor-coordinator.log") AGENT_LOG = os.path.join(PERSISTENT_ROOT, "trogdor-agent.log") - COORDINATOR_LOG4J_PROPERTIES = os.path.join(PERSISTENT_ROOT, "trogdor-coordinator-log4j.properties") - AGENT_LOG4J_PROPERTIES = os.path.join(PERSISTENT_ROOT, "trogdor-agent-log4j.properties") + COORDINATOR_LOG4J_PROPERTIES = os.path.join(PERSISTENT_ROOT, "trogdor-coordinator-log4j2.properties") + AGENT_LOG4J_PROPERTIES = os.path.join(PERSISTENT_ROOT, "trogdor-agent-log4j2.properties") CONFIG_PATH = os.path.join(PERSISTENT_ROOT, "trogdor.conf") DEFAULT_AGENT_PORT=8888 DEFAULT_COORDINATOR_PORT=8889 @@ -142,7 +142,7 @@ def start_node(self, node): def _start_coordinator_node(self, node): node.account.create_file(TrogdorService.COORDINATOR_LOG4J_PROPERTIES, - self.render('log4j.properties', + self.render('log4j2.properties', log_path=TrogdorService.COORDINATOR_LOG)) self._start_trogdor_daemon("coordinator", TrogdorService.COORDINATOR_STDOUT_STDERR, TrogdorService.COORDINATOR_LOG4J_PROPERTIES, @@ -151,7 +151,7 @@ def _start_coordinator_node(self, node): def _start_agent_node(self, node): node.account.create_file(TrogdorService.AGENT_LOG4J_PROPERTIES, - self.render('log4j.properties', + self.render('log4j2.properties', log_path=TrogdorService.AGENT_LOG)) self._start_trogdor_daemon("agent", TrogdorService.AGENT_STDOUT_STDERR, TrogdorService.AGENT_LOG4J_PROPERTIES, diff --git a/tests/kafkatest/services/verifiable_consumer.py b/tests/kafkatest/services/verifiable_consumer.py index de1e6f2a1f2fc..7a8e890e18260 100644 --- a/tests/kafkatest/services/verifiable_consumer.py +++ b/tests/kafkatest/services/verifiable_consumer.py @@ -215,7 +215,7 @@ class VerifiableConsumer(KafkaPathResolverMixin, VerifiableClientMixin, Backgrou STDERR_CAPTURE = os.path.join(PERSISTENT_ROOT, "verifiable_consumer.stderr") LOG_DIR = os.path.join(PERSISTENT_ROOT, "logs") LOG_FILE = os.path.join(LOG_DIR, "verifiable_consumer.log") - LOG4J_CONFIG = os.path.join(PERSISTENT_ROOT, "tools-log4j.properties") + LOG4J_CONFIG = os.path.join(PERSISTENT_ROOT, "tools-log4j2.properties") CONFIG_FILE = os.path.join(PERSISTENT_ROOT, "verifiable_consumer.properties") logs = { @@ -298,7 +298,7 @@ def _worker(self, idx, node): node.account.ssh("mkdir -p %s" % VerifiableConsumer.PERSISTENT_ROOT, allow_fail=False) # Create and upload log properties - log_config = self.render('tools_log4j.properties', log_file=VerifiableConsumer.LOG_FILE) + log_config = self.render('tools_log4j2.properties', log_file=VerifiableConsumer.LOG_FILE) node.account.create_file(VerifiableConsumer.LOG4J_CONFIG, log_config) # Create and upload config file diff --git a/tests/kafkatest/services/verifiable_producer.py b/tests/kafkatest/services/verifiable_producer.py index ea6292d57725e..d4f29888cf533 100644 --- a/tests/kafkatest/services/verifiable_producer.py +++ b/tests/kafkatest/services/verifiable_producer.py @@ -41,7 +41,7 @@ class VerifiableProducer(KafkaPathResolverMixin, VerifiableClientMixin, Backgrou STDERR_CAPTURE = os.path.join(PERSISTENT_ROOT, "verifiable_producer.stderr") LOG_DIR = os.path.join(PERSISTENT_ROOT, "logs") LOG_FILE = os.path.join(LOG_DIR, "verifiable_producer.log") - LOG4J_CONFIG = os.path.join(PERSISTENT_ROOT, "tools-log4j.properties") + LOG4J_CONFIG = os.path.join(PERSISTENT_ROOT, "tools-log4j2.properties") CONFIG_FILE = os.path.join(PERSISTENT_ROOT, "verifiable_producer.properties") logs = { @@ -127,7 +127,7 @@ def _worker(self, idx, node): node.account.ssh("mkdir -p %s" % VerifiableProducer.PERSISTENT_ROOT, allow_fail=False) # Create and upload log properties - log_config = self.render('tools_log4j.properties', log_file=VerifiableProducer.LOG_FILE) + log_config = self.render('tools_log4j2.properties', log_file=VerifiableProducer.LOG_FILE) node.account.create_file(VerifiableProducer.LOG4J_CONFIG, log_config) # Configure security diff --git a/tests/kafkatest/tests/streams/streams_relational_smoke_test.py b/tests/kafkatest/tests/streams/streams_relational_smoke_test.py index c53715d769b8b..d62f1801fa627 100644 --- a/tests/kafkatest/tests/streams/streams_relational_smoke_test.py +++ b/tests/kafkatest/tests/streams/streams_relational_smoke_test.py @@ -33,7 +33,7 @@ def __init__(self, test_context, kafka, mode, nodeId, processing_guarantee): self.mode = mode self.nodeId = nodeId self.processing_guarantee = processing_guarantee - self.log4j_template = 'log4j_template.properties' + self.log4j_template = 'log4j2_template.properties' def start_cmd(self, node): return "( export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%(log4j)s\"; " \ @@ -55,7 +55,7 @@ def start_cmd(self, node): def start_node(self, node): node.account.mkdirs(self.PERSISTENT_ROOT) node.account.create_file(self.LOG4J_CONFIG_FILE, - self.render("log4j_template.properties", log_file=self.LOG_FILE)) + self.render("log4j2_template.properties", log_file=self.LOG_FILE)) self.logger.info("Starting process on " + str(node.account)) node.account.ssh(self.start_cmd(node)) diff --git a/tests/kafkatest/services/templates/tools_log4j.properties b/tests/kafkatest/tests/streams/templates/log4j2_template.properties similarity index 65% rename from tests/kafkatest/services/templates/tools_log4j.properties rename to tests/kafkatest/tests/streams/templates/log4j2_template.properties index 3f83b4220a1f5..45dbbf0502c0d 100644 --- a/tests/kafkatest/services/templates/tools_log4j.properties +++ b/tests/kafkatest/tests/streams/templates/log4j2_template.properties @@ -14,18 +14,25 @@ # limitations under the License. # Define the root logger with appender file -log4j.rootLogger = {{ log_level|default("INFO") }}, FILE +name=KafkaTestsStreamsTemplateConfig + +appenders=file + +appender.file.type=File +appender.file.name=FILE +appender.file.fileName={{ log_file }} +appender.file.append=true +appender.file.immediateFlush=true +appender.file.layout.type=PatternLayout +appender.file.layout.pattern=[%d] %p %m (%c)%n + +rootLogger.level={{ log_level|default("INFO") }} +rootLogger.appenderRefs=file +rootLogger.appenderRef.file.ref=FILE {% if loggers is defined %} {% for logger, log_level in loggers.items() %} -log4j.logger.{{ logger }}={{ log_level }} +logger.{{ logger }}.name={{ logger }} +logger.{{ logger }}.level={{ log_level }} {% endfor %} {% endif %} - -log4j.appender.FILE=org.apache.log4j.FileAppender -log4j.appender.FILE.File={{ log_file }} -log4j.appender.FILE.ImmediateFlush=true -# Set the append to true -log4j.appender.FILE.Append=true -log4j.appender.FILE.layout=org.apache.log4j.PatternLayout -log4j.appender.FILE.layout.conversionPattern=[%d] %p %m (%c)%n diff --git a/tools/src/main/java/org/apache/kafka/tools/VerifiableLog4jAppender.java b/tools/src/main/java/org/apache/kafka/tools/VerifiableLog4jAppender.java index 3cd3b0ec99ab0..573874d0b200c 100644 --- a/tools/src/main/java/org/apache/kafka/tools/VerifiableLog4jAppender.java +++ b/tools/src/main/java/org/apache/kafka/tools/VerifiableLog4jAppender.java @@ -24,12 +24,16 @@ import net.sourceforge.argparse4j.inf.ArgumentParserException; import net.sourceforge.argparse4j.inf.Namespace; -import org.apache.log4j.Logger; -import org.apache.log4j.PropertyConfigurator; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.core.LoggerContext; +import org.apache.logging.log4j.core.config.Configurator; +import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.nio.file.Files; +import java.nio.file.Path; import java.nio.file.Paths; import java.util.Properties; @@ -46,7 +50,7 @@ */ public class VerifiableLog4jAppender { - Logger logger = Logger.getLogger(VerifiableLog4jAppender.class); + Logger logger = LogManager.getLogger(VerifiableLog4jAppender.class); // If maxMessages < 0, log until the process is killed externally private long maxMessages = -1; @@ -235,7 +239,16 @@ public static VerifiableLog4jAppender createFromArgs(String[] args) { public VerifiableLog4jAppender(Properties props, int maxMessages) { this.maxMessages = maxMessages; - PropertyConfigurator.configure(props); + try { + Path tempConfigFile = Files.createTempFile("log4j2-", ".properties"); + try (FileOutputStream fos = new FileOutputStream(tempConfigFile.toFile()); + LoggerContext context = Configurator.initialize(null, tempConfigFile.toString())) { + props.store(fos, null); // 将 Properties 保存到文件 + Files.deleteIfExists(tempConfigFile); + } + } catch (IOException e) { + throw new RuntimeException(e); + } } public static void main(String[] args) { diff --git a/tools/src/test/java/org/apache/kafka/tools/other/ReplicationQuotasTestRig.java b/tools/src/test/java/org/apache/kafka/tools/other/ReplicationQuotasTestRig.java index 510c31231a9e4..1c577fe4c47aa 100644 --- a/tools/src/test/java/org/apache/kafka/tools/other/ReplicationQuotasTestRig.java +++ b/tools/src/test/java/org/apache/kafka/tools/other/ReplicationQuotasTestRig.java @@ -39,7 +39,7 @@ import org.apache.kafka.server.quota.QuotaType; import org.apache.kafka.tools.reassign.ReassignPartitionsCommand; -import org.apache.log4j.PropertyConfigurator; +import org.apache.logging.log4j.core.config.Configurator; import org.jfree.chart.ChartFactory; import org.jfree.chart.ChartFrame; import org.jfree.chart.JFreeChart; @@ -97,7 +97,7 @@ public class ReplicationQuotasTestRig { private static final String DIR; static { - PropertyConfigurator.configure("core/src/test/resources/log4j.properties"); + Configurator.initialize(null, "core/src/test/resources/log4j2.properties"); new File("Experiments").mkdir(); DIR = "Experiments/Run" + Long.valueOf(System.currentTimeMillis()).toString().substring(8); diff --git a/tools/src/test/resources/log4j.properties b/tools/src/test/resources/log4j.properties deleted file mode 100644 index 3aca07dc53016..0000000000000 --- a/tools/src/test/resources/log4j.properties +++ /dev/null @@ -1,22 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -log4j.rootLogger=INFO, stdout - -log4j.appender.stdout=org.apache.log4j.ConsoleAppender -log4j.appender.stdout.layout=org.apache.log4j.PatternLayout -log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c:%L)%n - -log4j.logger.org.apache.kafka=INFO -log4j.logger.org.eclipse.jetty=INFO diff --git a/tools/src/test/resources/log4j2.properties b/tools/src/test/resources/log4j2.properties new file mode 100644 index 0000000000000..00bfaa223cbb4 --- /dev/null +++ b/tools/src/test/resources/log4j2.properties @@ -0,0 +1,33 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +name=ToolsTestConfig +appenders=stdout + +appender.stdout.type=Console +appender.stdout.name=STDOUT +appender.stdout.layout.type=PatternLayout +appender.stdout.layout.pattern=[%d] %p %m (%c:%L)%n + +rootLogger.level=INFO +rootLogger.appenderRefs=stdout +rootLogger.appenderRef.stdout.ref=STDOUT + +loggers=org.apache.kafka,org.eclipse.jetty + +logger.org.apache.kafka.name=org.apache.kafka +logger.org.apache.kafka.level=INFO + +logger.org.eclipse.jetty.name=org.eclipse.jetty +logger.org.eclipse.jetty.level=INFO diff --git a/trogdor/src/test/resources/log4j.properties b/trogdor/src/test/resources/log4j.properties deleted file mode 100644 index 5291604d49ae5..0000000000000 --- a/trogdor/src/test/resources/log4j.properties +++ /dev/null @@ -1,22 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -log4j.rootLogger=TRACE, stdout - -log4j.appender.stdout=org.apache.log4j.ConsoleAppender -log4j.appender.stdout.layout=org.apache.log4j.PatternLayout -log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c:%L)%n - -log4j.logger.org.apache.kafka=INFO -log4j.logger.org.eclipse.jetty=INFO diff --git a/trogdor/src/test/resources/log4j2.properties b/trogdor/src/test/resources/log4j2.properties new file mode 100644 index 0000000000000..0698efc7d75be --- /dev/null +++ b/trogdor/src/test/resources/log4j2.properties @@ -0,0 +1,33 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +name=TrogdorTestConfig +appenders=stdout + +appender.stdout.type=Console +appender.stdout.name=STDOUT +appender.stdout.layout.type=PatternLayout +appender.stdout.layout.pattern=[%d] %p %m (%c:%L)%n + +rootLogger.level=TRACE +rootLogger.appenderRefs=stdout +rootLogger.appenderRef.stdout.ref=STDOUT + +loggers=org.apache.kafka,org.eclipse.jetty + +logger.org.apache.kafka.name=org.apache.kafka +logger.org.apache.kafka.level=INFO + +logger.org.eclipse.jetty.name=org.eclipse.jetty +logger.org.eclipse.jetty.level=INFO From 347388a5e869389b0ceed336bb4edbfdb62a0053 Mon Sep 17 00:00:00 2001 From: frankvicky Date: Sat, 5 Oct 2024 00:25:42 +0800 Subject: [PATCH 02/46] KAFKA-9366: Upgrade log4j to log4j2 --- bin/connect-distributed.sh | 3 +- bin/connect-mirror-maker.sh | 3 +- bin/connect-standalone.sh | 3 +- bin/kafka-run-class.sh | 2 +- bin/kafka-server-start.sh | 3 +- bin/windows/connect-distributed.bat | 3 +- bin/windows/connect-standalone.bat | 3 +- bin/windows/kafka-run-class.bat | 2 +- bin/windows/kafka-server-start.bat | 3 +- bin/windows/zookeeper-server-start.bat | 5 +- bin/zookeeper-server-start.sh | 5 +- build.gradle | 121 +++------------ checkstyle/import-control.xml | 2 +- .../common/utils/LogCaptureAppender.java | 3 - clients/src/test/resources/log4j2.properties | 21 ++- config/connect-log4j.properties | 39 +++++ config/connect-log4j2.properties | 36 ++--- config/log4j.properties | 96 ++++++++++++ config/log4j2.properties | 86 ++++++----- ...g4j2.properties => tools-log4j.properties} | 15 +- .../file/src/test/resources/log4j2.properties | 18 ++- .../mirror/MirrorSourceConnectorTest.java | 4 +- .../src/test/resources/log4j2.properties | 30 ++-- .../apache/kafka/connect/runtime/Loggers.java | 9 +- .../StandaloneWorkerIntegrationTest.java | 1 + .../kafka/connect/runtime/LoggersTest.java | 120 +++++++------- .../runtime/rest/ConnectRestServerTest.java | 2 +- .../src/test/resources/log4j2.properties | 42 ++--- .../kafka/docker/KafkaDockerWrapper.scala | 8 +- .../scala/kafka/utils/Log4jController.scala | 8 +- .../test/java/kafka/admin/AclCommandTest.java | 4 +- core/src/test/resources/log4j2.properties | 26 ++-- .../api/PlaintextAdminIntegrationTest.scala | 146 +++++++----------- ...g4j2.properties => kafka.log4j.properties} | 22 +-- .../kafka/docker/KafkaDockerWrapperTest.scala | 44 +++--- .../3.7.0/jvm/Dockerfile | 4 +- docker/examples/README.md | 6 +- docker/jvm/Dockerfile | 4 +- docker/native/Dockerfile | 4 +- docker/native/launch | 4 +- docs/ops.html | 2 +- docs/streams/tutorial.html | 2 +- gradle/dependencies.gradle | 1 + .../src/test/resources/log4j2.properties | 32 ++-- metadata/src/test/resources/log4j2.properties | 26 ++-- raft/bin/test-kraft-server-start.sh | 3 +- .../config/kraft-log4j.properties | 23 +-- raft/config/kraft-log4j2.properties | 26 ++-- raft/src/test/resources/log4j2.properties | 26 ++-- .../src/test/resources/test/log4j2.properties | 22 +-- shell/src/test/resources/log4j2.properties | 16 +- storage/src/test/resources/log4j2.properties | 60 +++---- .../internals/AbstractKeyValueStoreTest.java | 2 +- .../AbstractSessionBytesStoreTest.java | 2 +- .../AbstractWindowBytesStoreTest.java | 2 +- streams/src/test/resources/log4j2.properties | 53 +++---- .../src/test/resources/log4j2.properties | 36 ++--- .../src/test/resources/log4j2.properties | 20 +-- tests/kafkatest/services/connect.py | 6 +- tests/kafkatest/services/console_consumer.py | 4 +- tests/kafkatest/services/kafka/kafka.py | 4 +- .../services/kafka/templates/log4j.properties | 141 +++++++++++++++++ .../performance/consumer_performance.py | 4 +- .../performance/end_to_end_latency.py | 4 +- .../performance/producer_performance.py | 4 +- ...g4j2.properties => tools_log4j.properties} | 22 +-- tests/kafkatest/services/streams.py | 4 +- .../services/templates/tools_log4j.properties | 30 ++-- .../services/transactional_message_copier.py | 4 +- .../trogdor/templates/log4j.properties | 23 +++ tests/kafkatest/services/trogdor/trogdor.py | 12 +- .../kafkatest/services/verifiable_consumer.py | 4 +- .../kafkatest/services/verifiable_producer.py | 4 +- .../kafka/tools/VerifiableLog4jAppender.java | 21 +-- .../tools/other/ReplicationQuotasTestRig.java | 4 +- tools/src/test/resources/log4j.properties | 22 +++ trogdor/src/test/resources/log4j.properties | 22 +++ 77 files changed, 931 insertions(+), 722 deletions(-) create mode 100644 config/connect-log4j.properties create mode 100644 config/log4j.properties rename config/{tools-log4j2.properties => tools-log4j.properties} (71%) rename core/src/test/scala/other/{kafka.log4j2.properties => kafka.log4j.properties} (61%) rename tools/src/test/resources/log4j2.properties => raft/config/kraft-log4j.properties (61%) create mode 100644 tests/kafkatest/services/kafka/templates/log4j.properties rename tests/kafkatest/services/performance/templates/{tools_log4j2.properties => tools_log4j.properties} (66%) rename trogdor/src/test/resources/log4j2.properties => tests/kafkatest/services/templates/tools_log4j.properties (58%) create mode 100644 tests/kafkatest/services/trogdor/templates/log4j.properties create mode 100644 tools/src/test/resources/log4j.properties create mode 100644 trogdor/src/test/resources/log4j.properties diff --git a/bin/connect-distributed.sh b/bin/connect-distributed.sh index 0e3befbb0e01d..f367fe8e4aba7 100755 --- a/bin/connect-distributed.sh +++ b/bin/connect-distributed.sh @@ -23,7 +23,8 @@ fi base_dir=$(dirname $0) if [ "x$KAFKA_LOG4J_OPTS" = "x" ]; then - export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/connect-log4j2.properties" + echo "DEPRECATED: using log4j 1.x configuration. To use log4j 2.x configuration, run with: 'export KAFKA_LOG4J_OPTS=\"-Dlog4j.configurationFile=file:$base_dir/../config/connect-log4j2.properties\"'" + export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/connect-log4j.properties" fi if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then diff --git a/bin/connect-mirror-maker.sh b/bin/connect-mirror-maker.sh index 4831d91f30724..8bf70f178bffc 100755 --- a/bin/connect-mirror-maker.sh +++ b/bin/connect-mirror-maker.sh @@ -23,7 +23,8 @@ fi base_dir=$(dirname $0) if [ "x$KAFKA_LOG4J_OPTS" = "x" ]; then - export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/connect-log4j2.properties" + echo "DEPRECATED: using log4j 1.x configuration. To use log4j 2.x configuration, run with: 'export KAFKA_LOG4J_OPTS=\"-Dlog4j.configurationFile=file:$base_dir/../config/connect-log4j2.properties\"'" + export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/connect-log4j.properties" fi if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then diff --git a/bin/connect-standalone.sh b/bin/connect-standalone.sh index d574722c228bd..5ea9694c371c8 100755 --- a/bin/connect-standalone.sh +++ b/bin/connect-standalone.sh @@ -23,7 +23,8 @@ fi base_dir=$(dirname $0) if [ "x$KAFKA_LOG4J_OPTS" = "x" ]; then - export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/connect-log4j2.properties" + echo "DEPRECATED: using log4j 1.x configuration. To use log4j 2.x configuration, run with: 'export KAFKA_LOG4J_OPTS=\"-Dlog4j.configurationFile=file:$base_dir/../config/connect-log4j2.properties\"'" + export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/connect-log4j.properties" fi if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then diff --git a/bin/kafka-run-class.sh b/bin/kafka-run-class.sh index 94df649729de8..b3291e461f2ba 100755 --- a/bin/kafka-run-class.sh +++ b/bin/kafka-run-class.sh @@ -228,7 +228,7 @@ fi # Log4j settings if [ -z "$KAFKA_LOG4J_OPTS" ]; then # Log to console. This is a tool. - LOG4J_DIR="$base_dir/config/tools-log4j2.properties" + LOG4J_DIR="$base_dir/config/tools-log4j.properties" # If Cygwin is detected, LOG4J_DIR is converted to Windows format. (( WINDOWS_OS_FORMAT )) && LOG4J_DIR=$(cygpath --path --mixed "${LOG4J_DIR}") KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:${LOG4J_DIR}" diff --git a/bin/kafka-server-start.sh b/bin/kafka-server-start.sh index 9505eb62f233e..831d07b115bdb 100755 --- a/bin/kafka-server-start.sh +++ b/bin/kafka-server-start.sh @@ -22,7 +22,8 @@ fi base_dir=$(dirname $0) if [ "x$KAFKA_LOG4J_OPTS" = "x" ]; then - export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/log4j2.properties" + echo "DEPRECATED: using log4j 1.x configuration. To use log4j 2.x configuration, run with: 'export KAFKA_LOG4J_OPTS=\"-Dlog4j.configurationFile=file:$base_dir/../config/log4j2.properties\"'" + export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/log4j.properties" fi if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then diff --git a/bin/windows/connect-distributed.bat b/bin/windows/connect-distributed.bat index 2b473e4d8095a..c355595777228 100644 --- a/bin/windows/connect-distributed.bat +++ b/bin/windows/connect-distributed.bat @@ -27,7 +27,8 @@ popd rem Log4j settings IF ["%KAFKA_LOG4J_OPTS%"] EQU [""] ( - set KAFKA_LOG4J_OPTS=-Dlog4j.configuration=file:%BASE_DIR%/config/connect-log4j2.properties + echo DEPRECATED: using log4j 1.x configuration. To use log4j 2.x configuration, run with: 'set KAFKA_LOG4J_OPTS=-Dlog4j.configurationFile=file:%BASE_DIR%/config/connect-log4j2.properties' + set KAFKA_LOG4J_OPTS=-Dlog4j.configuration=file:%BASE_DIR%/config/connect-log4j.properties ) "%~dp0kafka-run-class.bat" org.apache.kafka.connect.cli.ConnectDistributed %* diff --git a/bin/windows/connect-standalone.bat b/bin/windows/connect-standalone.bat index e443f6a1c95fd..8c1d19ee5ccd4 100644 --- a/bin/windows/connect-standalone.bat +++ b/bin/windows/connect-standalone.bat @@ -27,7 +27,8 @@ popd rem Log4j settings IF ["%KAFKA_LOG4J_OPTS%"] EQU [""] ( - set KAFKA_LOG4J_OPTS=-Dlog4j.configuration=file:%BASE_DIR%/config/connect-log4j2.properties + echo DEPRECATED: using log4j 1.x configuration. To use log4j 2.x configuration, run with: 'set KAFKA_LOG4J_OPTS=-Dlog4j.configurationFile=file:%BASE_DIR%/config/connect-log4j2.properties' + set KAFKA_LOG4J_OPTS=-Dlog4j.configuration=file:%BASE_DIR%/config/connect-log4j.properties ) "%~dp0kafka-run-class.bat" org.apache.kafka.connect.cli.ConnectStandalone %* diff --git a/bin/windows/kafka-run-class.bat b/bin/windows/kafka-run-class.bat index e16287cd4114f..a163ccd0a7c08 100755 --- a/bin/windows/kafka-run-class.bat +++ b/bin/windows/kafka-run-class.bat @@ -116,7 +116,7 @@ IF ["%LOG_DIR%"] EQU [""] ( rem Log4j settings IF ["%KAFKA_LOG4J_OPTS%"] EQU [""] ( - set KAFKA_LOG4J_OPTS=-Dlog4j.configuration=file:%BASE_DIR%/config/tools-log4j2.properties + set KAFKA_LOG4J_OPTS=-Dlog4j.configuration=file:%BASE_DIR%/config/tools-log4j.properties ) ELSE ( rem create logs directory IF not exist "%LOG_DIR%" ( diff --git a/bin/windows/kafka-server-start.bat b/bin/windows/kafka-server-start.bat index e03e6398dacd7..b39c840056abd 100644 --- a/bin/windows/kafka-server-start.bat +++ b/bin/windows/kafka-server-start.bat @@ -21,7 +21,8 @@ IF [%1] EQU [] ( SetLocal IF ["%KAFKA_LOG4J_OPTS%"] EQU [""] ( - set KAFKA_LOG4J_OPTS=-Dlog4j.configuration=file:%~dp0../../config/log4j2.properties + echo DEPRECATED: using log4j 1.x configuration. To use log4j 2.x configuration, run with: 'set KAFKA_LOG4J_OPTS=-Dlog4j.configurationFile=file:%~dp0../../config/log4j2.properties' + set KAFKA_LOG4J_OPTS=-Dlog4j.configuration=file:%~dp0../../config/log4j.properties ) IF ["%KAFKA_HEAP_OPTS%"] EQU [""] ( rem detect OS architecture diff --git a/bin/windows/zookeeper-server-start.bat b/bin/windows/zookeeper-server-start.bat index 4549f240d19ee..ad42edf1df9da 100644 --- a/bin/windows/zookeeper-server-start.bat +++ b/bin/windows/zookeeper-server-start.bat @@ -19,9 +19,12 @@ IF [%1] EQU [] ( EXIT /B 1 ) +echo Running with log4j 2.x - Log4j MBean registration is not supported. + SetLocal IF ["%KAFKA_LOG4J_OPTS%"] EQU [""] ( - set KAFKA_LOG4J_OPTS=-Dlog4j.configuration=file:%~dp0../../config/log4j2.properties + echo DEPRECATED: using log4j 1.x configuration. To use log4j 2.x configuration, run with: 'set KAFKA_LOG4J_OPTS=-Dlog4j.configurationFile=file:%~dp0../../config/log4j2.properties' + set KAFKA_LOG4J_OPTS=-Dlog4j.configuration=file:%~dp0../../config/log4j.properties ) IF ["%KAFKA_HEAP_OPTS%"] EQU [""] ( set KAFKA_HEAP_OPTS=-Xmx512M -Xms512M diff --git a/bin/zookeeper-server-start.sh b/bin/zookeeper-server-start.sh index b1d72eb29537c..3d45d8b7f85d6 100755 --- a/bin/zookeeper-server-start.sh +++ b/bin/zookeeper-server-start.sh @@ -22,9 +22,12 @@ fi base_dir=$(dirname $0) if [ "x$KAFKA_LOG4J_OPTS" = "x" ]; then - export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/log4j2.properties" + echo "DEPRECATED: using log4j 1.x configuration. To use log4j 2.x configuration, run with: 'export KAFKA_LOG4J_OPTS=\"-Dlog4j.configurationFile=file:$base_dir/../config/log4j2.properties\"'" + export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/log4j.properties" fi +echo "Running with log4j 2.x - Log4j MBean registration is not supported." + if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then export KAFKA_HEAP_OPTS="-Xmx512M -Xms512M" fi diff --git a/build.gradle b/build.gradle index e58ec12b00adb..cf722bbb81d4c 100644 --- a/build.gradle +++ b/build.gradle @@ -124,7 +124,8 @@ ext { } runtimeTestLibs = [ - libs.slf4jReload4j, +// libs.slf4jReload4j, + libs.slf4jLog4j2, libs.junitPlatformLanucher, ] } @@ -167,10 +168,10 @@ allprojects { // ZooKeeper (potentially older and containing CVEs) libs.nettyHandler, libs.nettyTransportNativeEpoll, - // be explicit about the reload4j version instead of relying on the transitive versions - // libs.reload4j, + libs.reload4j, libs.log4j2Api, - libs.log4j2Core + libs.log4j2Core, + libs.log4j1Bridge2Api ) } } @@ -904,15 +905,14 @@ project(':server') { implementation libs.slf4jApi - // compileOnly libs.reload4j compileOnly libs.log4j2Api compileOnly libs.log4j2Core + compileOnly libs.log4j1Bridge2Api testImplementation project(':clients').sourceSets.test.output testImplementation libs.mockitoCore testImplementation libs.junitJupiter -// testImplementation libs.slf4jReload4j testImplementation libs.slf4jLog4j2 testRuntimeOnly libs.junitPlatformLanucher @@ -968,7 +968,6 @@ project(':share') { implementation libs.slf4jApi testImplementation libs.junitJupiter -// testImplementation libs.slf4jReload4j testImplementation libs.slf4jLog4j2 testRuntimeOnly libs.junitPlatformLanucher @@ -1062,9 +1061,8 @@ project(':core') { // ZooKeeperMain depends on commons-cli but declares the dependency as `provided` implementation libs.commonsCli implementation libs.log4j2Core - -// compileOnly libs.reload4j - compileOnly libs.log4j2Api + implementation libs.log4j2Api + implementation libs.log4j1Bridge2Api testImplementation project(':clients').sourceSets.test.output testImplementation project(':group-coordinator').sourceSets.test.output @@ -1094,7 +1092,6 @@ project(':core') { testImplementation libs.apachedsMavibotPartition testImplementation libs.apachedsJdbmPartition testImplementation libs.junitJupiter -// testImplementation libs.slf4jReload4j testImplementation libs.slf4jLog4j2 testImplementation libs.caffeine @@ -1345,14 +1342,13 @@ project(':metadata') { implementation libs.jacksonDatabind implementation libs.jacksonJDK8Datatypes implementation libs.metrics -// compileOnly libs.reload4j compileOnly libs.log4j2Api compileOnly libs.log4j2Core + compileOnly libs.log4j1Bridge2Api testImplementation libs.junitJupiter testImplementation libs.jqwik testImplementation libs.hamcrest testImplementation libs.mockitoCore -// testImplementation libs.slf4jReload4j testImplementation libs.slf4jLog4j2 testImplementation project(':clients').sourceSets.test.output testImplementation project(':raft').sourceSets.test.output @@ -1494,9 +1490,6 @@ project(':group-coordinator') { testImplementation libs.mockitoCore testRuntimeOnly runtimeTestLibs -// testRuntimeOnly libs.slf4jReload4j - testImplementation libs.slf4jLog4j2 - testRuntimeOnly libs.junitPlatformLanucher generator project(':generator') } @@ -1559,9 +1552,6 @@ project(':test-common') { testImplementation libs.mockitoCore testRuntimeOnly runtimeTestLibs -// testRuntimeOnly libs.slf4jReload4j - testRuntimeOnly libs.slf4jLog4j2 - testRuntimeOnly libs.junitPlatformLanucher } sourceSets { @@ -1704,9 +1694,6 @@ project(':coordinator-common') { testImplementation libs.mockitoCore testRuntimeOnly runtimeTestLibs -// testRuntimeOnly libs.slf4jReload4j - testRuntimeOnly libs.slf4jLog4j2 - testRuntimeOnly libs.junitPlatformLanucher } sourceSets { @@ -1758,9 +1745,6 @@ project(':share-coordinator') { testImplementation libs.mockitoCore testRuntimeOnly runtimeTestLibs -// testRuntimeOnly libs.slf4jReload4j - testRuntimeOnly libs.slf4jLog4j2 - testRuntimeOnly libs.junitPlatformLanucher generator project(':generator') } @@ -1870,16 +1854,14 @@ project(':clients') { testImplementation libs.jacksonJaxrsJsonProvider testImplementation libs.jose4j testImplementation libs.junitJupiter -// testImplementation libs.reload4j testImplementation libs.log4j2Api testImplementation libs.log4j2Core + testImplementation libs.log4j1Bridge2Api testImplementation libs.bndlib testImplementation libs.spotbugs testImplementation libs.mockitoCore testImplementation libs.mockitoJunitJupiter // supports MockitoExtension -// testRuntimeOnly libs.slf4jReload4j - testRuntimeOnly libs.slf4jLog4j2 testRuntimeOnly libs.jacksonDatabind testRuntimeOnly libs.jacksonJDK8Datatypes testRuntimeOnly runtimeTestLibs @@ -2048,9 +2030,6 @@ project(':raft') { testImplementation libs.hamcrest testRuntimeOnly runtimeTestLibs -// testRuntimeOnly libs.slf4jReload4j - testRuntimeOnly libs.slf4jLog4j2 - testRuntimeOnly libs.junitPlatformLanucher generator project(':generator') } @@ -2146,9 +2125,6 @@ project(':server-common') { testImplementation libs.hamcrest testRuntimeOnly runtimeTestLibs -// testRuntimeOnly libs.slf4jReload4j - testRuntimeOnly libs.slf4jLog4j2 - testRuntimeOnly libs.junitPlatformLanucher } task createVersionFile() { @@ -2207,9 +2183,6 @@ project(':storage:storage-api') { testImplementation libs.mockitoCore testRuntimeOnly runtimeTestLibs -// testRuntimeOnly libs.slf4jReload4j - testRuntimeOnly libs.slf4jLog4j2 - testRuntimeOnly libs.junitPlatformLanucher } task createVersionFile() { @@ -2299,9 +2272,6 @@ project(':storage') { testImplementation libs.bcpkix testRuntimeOnly runtimeTestLibs -// testRuntimeOnly libs.slf4jReload4j - testRuntimeOnly libs.slf4jLog4j2 - testRuntimeOnly libs.junitPlatformLanucher generator project(':generator') } @@ -2470,10 +2440,7 @@ project(':tools') { implementation libs.jacksonDataformatCsv implementation libs.jacksonJDK8Datatypes implementation libs.slf4jApi -// implementation libs.slf4jReload4j - implementation libs.slf4jLog4j2 - implementation libs.log4j2Core - implementation libs.spotbugs + implementation libs.slf4jReload4j implementation libs.joptSimple implementation libs.jose4j // for SASL/OAUTHBEARER JWT validation @@ -2500,8 +2467,7 @@ project(':tools') { testImplementation(libs.jfreechart) { exclude group: 'junit', module: 'junit' } -// testImplementation libs.reload4j - testImplementation libs.log4j2Api + testImplementation libs.reload4j testImplementation libs.apachedsCoreApi testImplementation libs.apachedsInterceptorKerberos testImplementation libs.apachedsProtocolShared @@ -2540,9 +2506,7 @@ project(':trogdor') { implementation libs.jacksonDatabind implementation libs.jacksonJDK8Datatypes implementation libs.slf4jApi -// runtimeOnly libs.reload4j - runtimeOnly libs.log4j2Api - runtimeOnly libs.log4j2Core + runtimeOnly libs.reload4j implementation libs.jacksonJaxrsJsonProvider implementation libs.jerseyContainerServlet @@ -2558,9 +2522,7 @@ project(':trogdor') { testImplementation project(':clients').sourceSets.test.output testImplementation libs.mockitoCore - testRuntimeOnly runtimeTestLibs -// testRuntimeOnly libs.slf4jReload4j - testRuntimeOnly libs.slf4jLog4j2 + testRuntimeOnly libs.slf4jReload4j testRuntimeOnly libs.junitPlatformLanucher } @@ -2609,9 +2571,6 @@ project(':shell') { testImplementation libs.junitJupiter testRuntimeOnly runtimeTestLibs -// testRuntimeOnly libs.slf4jReload4j - testRuntimeOnly libs.slf4jLog4j2 - testRuntimeOnly libs.junitPlatformLanucher } javadoc { @@ -2667,9 +2626,9 @@ project(':streams') { testImplementation project(':server-common') testImplementation project(':server-common').sourceSets.test.output testImplementation project(':server') -// testImplementation libs.reload4j testImplementation libs.log4j2Api testImplementation libs.log4j2Core + testImplementation libs.log4j1Bridge2Api testImplementation libs.junitJupiter testImplementation libs.bcpkix testImplementation libs.hamcrest @@ -2680,9 +2639,6 @@ project(':streams') { testRuntimeOnly project(':streams:test-utils') testRuntimeOnly runtimeTestLibs -// testRuntimeOnly libs.slf4jReload4j - testRuntimeOnly libs.slf4jLog4j2 - testRuntimeOnly libs.junitPlatformLanucher generator project(':generator') } @@ -2834,9 +2790,6 @@ project(':streams:streams-scala') { testImplementation libs.mockitoJunitJupiter // supports MockitoExtension testImplementation libs.hamcrest testRuntimeOnly runtimeTestLibs -// testRuntimeOnly libs.slf4jReload4j - testRuntimeOnly libs.slf4jLog4j2 - testRuntimeOnly libs.junitPlatformLanucher } javadoc { @@ -2889,9 +2842,6 @@ project(':streams:test-utils') { testImplementation libs.hamcrest testRuntimeOnly runtimeTestLibs -// testRuntimeOnly libs.slf4jReload4j - testRuntimeOnly libs.slf4jLog4j2 - testRuntimeOnly libs.junitPlatformLanucher } tasks.create(name: "copyDependantLibs", type: Copy) { @@ -2922,7 +2872,6 @@ project(':streams:examples') { implementation project(':streams') -// implementation libs.slf4jReload4j implementation libs.slf4jLog4j2 testImplementation project(':streams:test-utils') @@ -3353,7 +3302,6 @@ project(':jmh-benchmarks') { implementation libs.jacksonDatabind implementation libs.metrics implementation libs.mockitoCore -// implementation libs.slf4jReload4j implementation libs.slf4jLog4j2 implementation libs.scalaLibrary implementation libs.scalaJava8Compat @@ -3422,15 +3370,12 @@ project(':connect:api') { dependencies { api project(':clients') implementation libs.slf4jApi -// runtimeOnly libs.reload4j runtimeOnly libs.log4j2Api runtimeOnly libs.log4j2Core + runtimeOnly libs.log4j1Bridge2Api implementation libs.jaxrsApi testImplementation libs.junitJupiter - testRuntimeOnly libs.junitPlatformLanucher -// testRuntimeOnly libs.slf4jReload4j - testRuntimeOnly libs.slf4jLog4j2 testImplementation project(':clients').sourceSets.test.output testRuntimeOnly runtimeTestLibs @@ -3463,15 +3408,12 @@ project(':connect:transforms') { api project(':connect:api') implementation libs.slf4jApi -// runtimeOnly libs.reload4j runtimeOnly libs.log4j2Api runtimeOnly libs.log4j2Core + runtimeOnly libs.log4j1Bridge2Api testImplementation libs.junitJupiter -// testRuntimeOnly libs.slf4jReload4j - testRuntimeOnly libs.slf4jLog4j2 - testRuntimeOnly libs.junitPlatformLanucher testImplementation project(':clients').sourceSets.test.output testRuntimeOnly runtimeTestLibs @@ -3508,15 +3450,12 @@ project(':connect:json') { api libs.jacksonAfterburner implementation libs.slf4jApi -// runtimeOnly libs.reload4j runtimeOnly libs.log4j2Api runtimeOnly libs.log4j2Core + runtimeOnly libs.log4j1Bridge2Api testImplementation libs.junitJupiter -// testRuntimeOnly libs.slf4jReload4j - testRuntimeOnly libs.slf4jLog4j2 - testRuntimeOnly libs.junitPlatformLanucher testImplementation project(':clients').sourceSets.test.output testRuntimeOnly runtimeTestLibs @@ -3558,9 +3497,9 @@ project(':connect:runtime') { api project(':connect:transforms') implementation libs.slf4jApi -// implementation libs.reload4j implementation libs.log4j2Api implementation libs.log4j2Core + implementation libs.log4j1Bridge2Api implementation libs.bndlib implementation libs.spotbugs implementation libs.jose4j // for SASL/OAUTHBEARER JWT validation @@ -3601,8 +3540,6 @@ project(':connect:runtime') { testImplementation libs.mockitoJunitJupiter testImplementation libs.httpclient -// testRuntimeOnly libs.slf4jReload4j - testRuntimeOnly libs.slf4jLog4j2 testRuntimeOnly libs.bcpkix testRuntimeOnly runtimeTestLibs } @@ -3697,16 +3634,13 @@ project(':connect:file') { dependencies { implementation project(':connect:api') implementation libs.slf4jApi -// runtimeOnly libs.reload4j runtimeOnly libs.log4j2Api runtimeOnly libs.log4j2Core + runtimeOnly libs.log4j1Bridge2Api testImplementation libs.junitJupiter testImplementation libs.mockitoCore -// testRuntimeOnly libs.slf4jReload4j - testRuntimeOnly libs.slf4jLog4j2 - testRuntimeOnly libs.junitPlatformLanucher testImplementation project(':clients').sourceSets.test.output testImplementation project(':connect:runtime') testImplementation project(':connect:runtime').sourceSets.test.output @@ -3743,9 +3677,9 @@ project(':connect:basic-auth-extension') { dependencies { implementation project(':connect:api') implementation libs.slf4jApi -// runtimeOnly libs.reload4j runtimeOnly libs.log4j2Api runtimeOnly libs.log4j2Core + runtimeOnly libs.log4j1Bridge2Api implementation libs.jaxrsApi implementation libs.jaxAnnotationApi @@ -3754,8 +3688,6 @@ project(':connect:basic-auth-extension') { testImplementation libs.junitJupiter testImplementation project(':clients').sourceSets.test.output -// testRuntimeOnly libs.slf4jReload4j - testRuntimeOnly libs.slf4jLog4j2 testRuntimeOnly libs.jerseyContainerServlet testRuntimeOnly runtimeTestLibs } @@ -3792,9 +3724,9 @@ project(':connect:mirror') { implementation libs.argparse4j implementation libs.jacksonAnnotations implementation libs.slf4jApi -// runtimeOnly libs.reload4j runtimeOnly libs.log4j2Api runtimeOnly libs.log4j2Core + runtimeOnly libs.log4j1Bridge2Api implementation libs.jacksonAnnotations implementation libs.jacksonJaxrsJsonProvider implementation libs.jerseyContainerServlet @@ -3808,9 +3740,9 @@ project(':connect:mirror') { implementation libs.swaggerAnnotations testImplementation libs.junitJupiter -// testImplementation libs.reload4j testImplementation libs.log4j2Api testImplementation libs.log4j2Core + testImplementation libs.log4j1Bridge2Api testImplementation libs.bndlib testImplementation libs.mockitoCore testImplementation project(':clients').sourceSets.test.output @@ -3821,8 +3753,6 @@ project(':connect:mirror') { testImplementation project(':server-common').sourceSets.test.output testRuntimeOnly project(':connect:runtime') -// testRuntimeOnly libs.slf4jReload4j - testRuntimeOnly libs.slf4jLog4j2 testRuntimeOnly libs.bcpkix testRuntimeOnly runtimeTestLibs } @@ -3881,17 +3811,14 @@ project(':connect:mirror-client') { dependencies { implementation project(':clients') implementation libs.slf4jApi -// runtimeOnly libs.reload4j runtimeOnly libs.log4j2Api runtimeOnly libs.log4j2Core + runtimeOnly libs.log4j1Bridge2Api testImplementation libs.junitJupiter testImplementation project(':clients').sourceSets.test.output testRuntimeOnly runtimeTestLibs -// testRuntimeOnly libs.slf4jReload4j - testRuntimeOnly libs.slf4jLog4j2 - testRuntimeOnly libs.junitPlatformLanucher } javadoc { diff --git a/checkstyle/import-control.xml b/checkstyle/import-control.xml index cd0cc99191452..4d48c11cd3825 100644 --- a/checkstyle/import-control.xml +++ b/checkstyle/import-control.xml @@ -304,7 +304,7 @@ - + diff --git a/clients/src/test/java/org/apache/kafka/common/utils/LogCaptureAppender.java b/clients/src/test/java/org/apache/kafka/common/utils/LogCaptureAppender.java index 2cac9573a6d2f..71949e67356aa 100644 --- a/clients/src/test/java/org/apache/kafka/common/utils/LogCaptureAppender.java +++ b/clients/src/test/java/org/apache/kafka/common/utils/LogCaptureAppender.java @@ -111,16 +111,13 @@ public void setClassLogger(final Class clazz, Level level) { String loggerName = clazz.getName(); LoggerConfig loggerConfig = config.getLoggerConfig(loggerName); - // Store original level Level originalLevel = loggerConfig.getLevel(); logLevelChanges.add(new LogLevelChange(originalLevel, clazz)); if (!loggerConfig.getName().equals(loggerName)) { - // Create new LoggerConfig for the specific logger LoggerConfig newLoggerConfig = new LoggerConfig(loggerName, level, true); config.addLogger(loggerName, newLoggerConfig); } else { - // Update existing LoggerConfig loggerConfig.setLevel(level); } ctx.updateLoggers(); diff --git a/clients/src/test/resources/log4j2.properties b/clients/src/test/resources/log4j2.properties index 047595ab34cd5..34a7d635ecd04 100644 --- a/clients/src/test/resources/log4j2.properties +++ b/clients/src/test/resources/log4j2.properties @@ -15,17 +15,22 @@ name=TestConfig appenders=console -# Root logger configuration -rootLogger.level=off -rootLogger.appenderRefs=stdout -rootLogger.appenderRef.stdout.ref=STDOUT appender.console.type=Console appender.console.name=STDOUT appender.console.layout.type=PatternLayout appender.console.layout.pattern=[%d] %p %m (%c:%L)%n -loggers=org.apache.kafka.common.security.ssl.CommonNameLoggingTrustManagerFactoryWrapper -# CommonNameLoggingTrustManagerFactoryWrapper Logger configuration -logger.org.apache.kafka.common.security.ssl.CommonNameLoggingTrustManagerFactoryWrapper.name=org.apache.kafka.common.security.ssl.CommonNameLoggingTrustManagerFactoryWrapper -logger.org.apache.kafka.common.security.ssl.CommonNameLoggingTrustManagerFactoryWrapper.level=INFO +# Root logger configuration +rootLogger.level=OFF +rootLogger.appenderRefs=console +rootLogger.appenderRef.stdout.ref=STDOUT + +loggers=kafkaLogger,CommonNameLoggingTrustManagerFactoryWrapperLogger + +logger.kafkaLogger.name=org.apache.kafka +logger.kafkaLogger.level=ERROR + +# We are testing for a particular INFO log message in CommonNameLoggingTrustManagerFactoryWrapper +logger.CommonNameLoggingTrustManagerFactoryWrapperLogger.name=org.apache.kafka.common.security.ssl.CommonNameLoggingTrustManagerFactoryWrapper +logger.CommonNameLoggingTrustManagerFactoryWrapperLogger.level=INFO diff --git a/config/connect-log4j.properties b/config/connect-log4j.properties new file mode 100644 index 0000000000000..979cb3869f952 --- /dev/null +++ b/config/connect-log4j.properties @@ -0,0 +1,39 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +log4j.rootLogger=INFO, stdout, connectAppender + +# Send the logs to the console. +# +log4j.appender.stdout=org.apache.log4j.ConsoleAppender +log4j.appender.stdout.layout=org.apache.log4j.PatternLayout + +# Send the logs to a file, rolling the file at midnight local time. For example, the `File` option specifies the +# location of the log files (e.g. ${kafka.logs.dir}/connect.log), and at midnight local time the file is closed +# and copied in the same directory but with a filename that ends in the `DatePattern` option. +# +log4j.appender.connectAppender=org.apache.log4j.DailyRollingFileAppender +log4j.appender.connectAppender.DatePattern='.'yyyy-MM-dd-HH +log4j.appender.connectAppender.File=${kafka.logs.dir}/connect.log +log4j.appender.connectAppender.layout=org.apache.log4j.PatternLayout + +# The `%X{connector.context}` parameter in the layout includes connector-specific and task-specific information +# in the log messages, where appropriate. This makes it easier to identify those log messages that apply to a +# specific connector. +# +connect.log.pattern=[%d] %p %X{connector.context}%m (%c:%L)%n + +log4j.appender.stdout.layout.ConversionPattern=${connect.log.pattern} +log4j.appender.connectAppender.layout.ConversionPattern=${connect.log.pattern} diff --git a/config/connect-log4j2.properties b/config/connect-log4j2.properties index 024bbabebe153..8e57126162d46 100644 --- a/config/connect-log4j2.properties +++ b/config/connect-log4j2.properties @@ -15,25 +15,25 @@ name=ConfigConnectConfig -appenders=stdout, connectAppender +appenders=console, file -appender.stdout.type=Console -appender.stdout.name=STDOUT -appender.stdout.layout.type=PatternLayout -appender.stdout.layout.pattern=[%d] %p %X{connector.context}%m (%c:%L)%n +appender.console.type=Console +appender.console.name=STDOUT +appender.console.layout.type=PatternLayout +appender.console.layout.pattern=[%d] %p %X{connector.context}%m (%c:%L)%n -appender.connectAppender.type=RollingFile -appender.connectAppender.name=ConnectAppender -appender.connectAppender.fileName=${kafka.logs.dir}/connect.log -appender.connectAppender.filePattern=${kafka.logs.dir}/connect-%d{yyyy-MM-dd-HH}.log -appender.connectAppender.layout.type=PatternLayout -appender.connectAppender.layout.pattern=[%d] %p %X{connector.context}%m (%c:%L)%n -appender.connectAppender.policies.type=Policies -appender.connectAppender.policies.time.type=TimeBasedTriggeringPolicy -appender.connectAppender.policies.time.interval=1 -appender.connectAppender.policies.time.modulate=true +appender.file.type=RollingFile +appender.file.name=ConnectAppender +appender.file.fileName=${kafka.logs.dir}/connect.log +appender.file.filePattern=${kafka.logs.dir}/connect-%d{yyyy-MM-dd-HH}.log +appender.file.layout.type=PatternLayout +appender.file.layout.pattern=[%d] %p %X{connector.context}%m (%c:%L)%n +appender.file.policies.type=Policies +appender.file.policies.time.type=TimeBasedTriggeringPolicy +appender.file.policies.time.interval=1 +appender.file.policies.time.modulate=true rootLogger.level=INFO -rootLogger.appenderRefs=stdout,connectAppender -rootLogger.appenderRef.stdout.ref=STDOUT -rootLogger.appenderRef.connectAppender.ref=ConnectAppender +rootLogger.appenderRefs=console,file +rootLogger.appenderRef.console.ref=STDOUT +rootLogger.appenderRef.file.ref=ConnectAppender diff --git a/config/log4j.properties b/config/log4j.properties new file mode 100644 index 0000000000000..4dbdd83f83b74 --- /dev/null +++ b/config/log4j.properties @@ -0,0 +1,96 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Unspecified loggers and loggers with additivity=true output to server.log and stdout +# Note that INFO only applies to unspecified loggers, the log level of the child logger is used otherwise +log4j.rootLogger=INFO, stdout, kafkaAppender + +log4j.appender.stdout=org.apache.log4j.ConsoleAppender +log4j.appender.stdout.layout=org.apache.log4j.PatternLayout +log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c)%n + +log4j.appender.kafkaAppender=org.apache.log4j.DailyRollingFileAppender +log4j.appender.kafkaAppender.DatePattern='.'yyyy-MM-dd-HH +log4j.appender.kafkaAppender.File=${kafka.logs.dir}/server.log +log4j.appender.kafkaAppender.layout=org.apache.log4j.PatternLayout +log4j.appender.kafkaAppender.layout.ConversionPattern=[%d] %p %m (%c)%n + +log4j.appender.stateChangeAppender=org.apache.log4j.DailyRollingFileAppender +log4j.appender.stateChangeAppender.DatePattern='.'yyyy-MM-dd-HH +log4j.appender.stateChangeAppender.File=${kafka.logs.dir}/state-change.log +log4j.appender.stateChangeAppender.layout=org.apache.log4j.PatternLayout +log4j.appender.stateChangeAppender.layout.ConversionPattern=[%d] %p %m (%c)%n + +log4j.appender.requestAppender=org.apache.log4j.DailyRollingFileAppender +log4j.appender.requestAppender.DatePattern='.'yyyy-MM-dd-HH +log4j.appender.requestAppender.File=${kafka.logs.dir}/kafka-request.log +log4j.appender.requestAppender.layout=org.apache.log4j.PatternLayout +log4j.appender.requestAppender.layout.ConversionPattern=[%d] %p %m (%c)%n + +log4j.appender.cleanerAppender=org.apache.log4j.DailyRollingFileAppender +log4j.appender.cleanerAppender.DatePattern='.'yyyy-MM-dd-HH +log4j.appender.cleanerAppender.File=${kafka.logs.dir}/log-cleaner.log +log4j.appender.cleanerAppender.layout=org.apache.log4j.PatternLayout +log4j.appender.cleanerAppender.layout.ConversionPattern=[%d] %p %m (%c)%n + +log4j.appender.controllerAppender=org.apache.log4j.DailyRollingFileAppender +log4j.appender.controllerAppender.DatePattern='.'yyyy-MM-dd-HH +log4j.appender.controllerAppender.File=${kafka.logs.dir}/controller.log +log4j.appender.controllerAppender.layout=org.apache.log4j.PatternLayout +log4j.appender.controllerAppender.layout.ConversionPattern=[%d] %p %m (%c)%n + +log4j.appender.authorizerAppender=org.apache.log4j.DailyRollingFileAppender +log4j.appender.authorizerAppender.DatePattern='.'yyyy-MM-dd-HH +log4j.appender.authorizerAppender.File=${kafka.logs.dir}/kafka-authorizer.log +log4j.appender.authorizerAppender.layout=org.apache.log4j.PatternLayout +log4j.appender.authorizerAppender.layout.ConversionPattern=[%d] %p %m (%c)%n + +# Change the line below to adjust ZK client logging +log4j.logger.org.apache.zookeeper=INFO + +# Change the two lines below to adjust the general broker logging level (output to server.log and stdout) +log4j.logger.kafka=INFO +log4j.logger.org.apache.kafka=INFO + +# Change to DEBUG or TRACE to enable request logging +log4j.logger.kafka.request.logger=WARN, requestAppender +log4j.additivity.kafka.request.logger=false + +# Uncomment the lines below and change log4j.logger.kafka.network.RequestChannel$ to TRACE for additional output +# related to the handling of requests +#log4j.logger.kafka.network.Processor=TRACE, requestAppender +#log4j.logger.kafka.server.KafkaApis=TRACE, requestAppender +#log4j.additivity.kafka.server.KafkaApis=false +log4j.logger.kafka.network.RequestChannel$=WARN, requestAppender +log4j.additivity.kafka.network.RequestChannel$=false + +# Change the line below to adjust KRaft mode controller logging +log4j.logger.org.apache.kafka.controller=INFO, controllerAppender +log4j.additivity.org.apache.kafka.controller=false + +# Change the line below to adjust ZK mode controller logging +log4j.logger.kafka.controller=TRACE, controllerAppender +log4j.additivity.kafka.controller=false + +log4j.logger.kafka.log.LogCleaner=INFO, cleanerAppender +log4j.additivity.kafka.log.LogCleaner=false + +log4j.logger.state.change.logger=INFO, stateChangeAppender +log4j.additivity.state.change.logger=false + +# Access denials are logged at INFO level, change to DEBUG to also log allowed accesses +log4j.logger.kafka.authorizer.logger=INFO, authorizerAppender +log4j.additivity.kafka.authorizer.logger=false + diff --git a/config/log4j2.properties b/config/log4j2.properties index fadbcba695f67..0123b460da099 100644 --- a/config/log4j2.properties +++ b/config/log4j2.properties @@ -15,7 +15,7 @@ # Unspecified loggers and loggers with additivity=true output to server.log and stdout # Note that INFO only applies to unspecified loggers, the log level of the child logger is used otherwise -name=KafkaLogConfig +name=LogConfig appenders=stdout,kafkaAppender,stateChangeAppender,requestAppender,cleanerAppender,controllerAppender,authorizerAppender # Console appender (stdout) @@ -94,64 +94,70 @@ rootLogger.appenderRefs=stdout,kafkaAppender rootLogger.appenderRef.stdout.ref=STDOUT rootLogger.appenderRef.kafkaAppender.ref=KafkaAppender -loggers=org.apache.zookeeper,kafka,org.apache.kafka,kafka.request.logger,kafka.network.RequestChannel$,org.apache.kafka.controller,kafka.controller,kafka.log.LogCleaner,state.change.logger,kafka.authorizer.logger +loggers=zookeeper,kafka,apacheKafka,requestLogger,networkRequestChannel,apacheKafkaController,kafkaController,logCleaner,stateChangeLogger,authorizerLogger # Zookeeper logger -logger.org.apache.zookeeper.name=org.apache.zookeeper -logger.org.apache.zookeeper.level=INFO -logger.org.apache.zookeeper.additivity=false -logger.org.apache.zookeeper.appenderRef.kafkaAppender.ref=KafkaAppender +logger.zookeeper.name=org.apache.zookeeper +logger.zookeeper.level=INFO # Kafka logger logger.kafka.name=kafka logger.kafka.level=INFO -logger.kafka.additivity=false -logger.kafka.appenderRef.kafkaAppender.ref=KafkaAppender # Kafka org.apache logger -logger.org.apache.kafka.name=org.apache.kafka -logger.org.apache.kafka.level=INFO -logger.org.apache.kafka.additivity=false -logger.org.apache.kafka.appenderRef.kafkaAppender.ref=KafkaAppender +logger.apacheKafka.name=org.apache.kafka +logger.apacheKafka.level=INFO # Kafka request logger -logger.kafka.request.logger.name=kafka.request.logger -logger.kafka.request.logger.level=WARN -logger.kafka.request.logger.additivity=false -logger.kafka.request.logger.appenderRef.requestAppender.ref=RequestAppender +logger.requestLogger.name=kafka.request.logger +logger.requestLogger.level=WARN +logger.requestLogger.additivity=false +logger.requestLogger.appenderRef.requestAppender.ref=RequestAppender + + +# Uncomment the lines below and change log4j.logger.kafka.network.RequestChannel$ to TRACE for additional output +# related to the handling of requests +#loggers=zookeeper,kafka,apacheKafka,requestLogger,networkRequestChannel,apacheKafkaController,kafkaController,logCleaner,stateChangeLogger,authorizerLogger,networkProcessor,serverKafkaApis +#logger.networkProcessor.name=kafka.network.Processor +#logger.networkProcessor.level=TRACE +#logger.networkProcessor.appenderRef.requestAppender.ref=RequestAppender +#logger.serverKafkaApis.name=kafka.server.KafkaApis +#logger.serverKafkaApis.level=TRACE +#logger.serverKafkaApis.additivity=false +#logger.serverKafkaApis.appenderRef.requestAppender.ref=RequestAppender # Kafka network RequestChannel$ logger -logger.kafka.network.RequestChannel$.name=kafka.network.RequestChannel$ -logger.kafka.network.RequestChannel$.level=WARN -logger.kafka.network.RequestChannel$.additivity=false -logger.kafka.network.RequestChannel$.appenderRef.requestAppender.ref=RequestAppender +logger.networkRequestChannel.name=kafka.network.RequestChannel$ +logger.networkRequestChannel.level=WARN +logger.networkRequestChannel.additivity=false +logger.networkRequestChannel.appenderRef.requestAppender.ref=RequestAppender # KRaft mode controller logger -logger.org.apache.kafka.controller.name=org.apache.kafka.controller -logger.org.apache.kafka.controller.level=INFO -logger.org.apache.kafka.controller.additivity=false -logger.org.apache.kafka.controller.appenderRef.controllerAppender.ref=ControllerAppender +logger.apacheKafkaController.name=org.apache.kafka.controller +logger.apacheKafkaController.level=INFO +logger.apacheKafkaController.additivity=false +logger.apacheKafkaController.appenderRef.controllerAppender.ref=ControllerAppender # ZK mode controller logger -logger.kafka.controller.name=kafka.controller -logger.kafka.controller.level=TRACE -logger.kafka.controller.additivity=false -logger.kafka.controller.appenderRef.controllerAppender.ref=ControllerAppender +logger.kafkaController.name=kafka.controller +logger.kafkaController.level=TRACE +logger.kafkaController.additivity=false +logger.kafkaController.appenderRef.controllerAppender.ref=ControllerAppender # LogCleaner logger -logger.kafka.log.LogCleaner.name=kafka.log.LogCleaner -logger.kafka.log.LogCleaner.level=INFO -logger.kafka.log.LogCleaner.additivity=false -logger.kafka.log.LogCleaner.appenderRef.cleanerAppender.ref=CleanerAppender +logger.logCleaner.name=kafka.log.LogCleaner +logger.logCleaner.level=INFO +logger.logCleaner.additivity=false +logger.logCleaner.appenderRef.cleanerAppender.ref=CleanerAppender # State change logger -logger.state.change.logger.name=state.change.logger -logger.state.change.logger.level=INFO -logger.state.change.logger.additivity=false -logger.state.change.logger.appenderRef.stateChangeAppender.ref=StateChangeAppender +logger.stateChangeLogger.name=state.change.logger +logger.stateChangeLogger.level=INFO +logger.stateChangeLogger.additivity=false +logger.stateChangeLogger.appenderRef.stateChangeAppender.ref=StateChangeAppender # Authorizer logger -logger.kafka.authorizer.logger.name=kafka.authorizer.logger -logger.kafka.authorizer.logger.level=INFO -logger.kafka.authorizer.logger.additivity=false -logger.kafka.authorizer.logger.appenderRef.authorizerAppender.ref=AuthorizerAppender +logger.authorizerLogger.name=kafka.authorizer.logger +logger.authorizerLogger.level=INFO +logger.authorizerLogger.additivity=false +logger.authorizerLogger.appenderRef.authorizerAppender.ref=AuthorizerAppender diff --git a/config/tools-log4j2.properties b/config/tools-log4j.properties similarity index 71% rename from config/tools-log4j2.properties rename to config/tools-log4j.properties index 6ce22560b3be2..b19e343265fc3 100644 --- a/config/tools-log4j2.properties +++ b/config/tools-log4j.properties @@ -12,15 +12,10 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -name=ToolLog4j2 -appenders=console -appender.console.type=Console -appender.console.name=STDERR -appender.console.layout.type=PatternLayout -appender.console.layout.pattern=[%d] %p %m (%c)%n -appender.console.target=SYSTEM_ERR +log4j.rootLogger=WARN, stderr -rootLogger.level=WARN -rootLogger.appenderRefs=stderr -rootLogger.appenderRef.stderr.ref=STDERR +log4j.appender.stderr=org.apache.log4j.ConsoleAppender +log4j.appender.stderr.layout=org.apache.log4j.PatternLayout +log4j.appender.stderr.layout.ConversionPattern=[%d] %p %m (%c)%n +log4j.appender.stderr.Target=System.err diff --git a/connect/file/src/test/resources/log4j2.properties b/connect/file/src/test/resources/log4j2.properties index d553565800215..101b56d63c71d 100644 --- a/connect/file/src/test/resources/log4j2.properties +++ b/connect/file/src/test/resources/log4j2.properties @@ -14,21 +14,23 @@ # See the License for the specific language governing permissions and # limitations under the License. ## -name=ConnectTestConfig +name=ConnectFileTestConfig -appenders=stdout - -appender.stdout.type=Console -appender.stdout.name=STDOUT -appender.stdout.layout.type=PatternLayout +appenders=console +appender.console.type=Console +appender.console.name=STDOUT +appender.console.layout.type=PatternLayout # -# The `%X{connector.context}` parameter includes connector-specific and task-specific information in the log message. +# The `%X{connector.context}` parameter in the layout includes connector-specific and task-specific information +# in the log message, where appropriate. This makes it easier to identify those log messages that apply to a +# specific connector. Simply add this parameter to the log layout configuration below to include the contextual information. # appender.stdout.layout.pattern=[%d] %p %X{connector.context}%m (%c:%L)%n rootLogger.level=INFO -rootLogger.appenderRefs=stdout +rootLogger.appenderRefs=console rootLogger.appenderRef.stdout.ref=STDOUT +loggers=kafka logger.kafka.name=kafka logger.kafka.level=WARN diff --git a/connect/mirror/src/test/java/org/apache/kafka/connect/mirror/MirrorSourceConnectorTest.java b/connect/mirror/src/test/java/org/apache/kafka/connect/mirror/MirrorSourceConnectorTest.java index 6cca2c3ffa1a4..a24975f2f23b0 100644 --- a/connect/mirror/src/test/java/org/apache/kafka/connect/mirror/MirrorSourceConnectorTest.java +++ b/connect/mirror/src/test/java/org/apache/kafka/connect/mirror/MirrorSourceConnectorTest.java @@ -197,7 +197,7 @@ public void testNoBrokerAclAuthorizer() throws Exception { when(describeAclsResult.values()).thenReturn(describeAclsFuture); when(sourceAdmin.describeAcls(any())).thenReturn(describeAclsResult); - try (LogCaptureAppender connectorLogs = LogCaptureAppender.createAndRegister(MirrorSourceConnector.class.getName())) { + try (LogCaptureAppender connectorLogs = LogCaptureAppender.createAndRegister(MirrorSourceConnector.class)) { connectorLogs.setClassLogger(MirrorSourceConnector.class, Level.TRACE); connector.syncTopicAcls(); long aclSyncDisableMessages = connectorLogs.getMessages().stream() @@ -245,7 +245,7 @@ public void testMissingDescribeConfigsAcl() throws Exception { when(describeConfigsResult.all()).thenReturn(describeConfigsFuture); when(sourceAdmin.describeConfigs(any())).thenReturn(describeConfigsResult); - try (LogCaptureAppender connectorLogs = LogCaptureAppender.createAndRegister(MirrorUtils.class.getName())) { + try (LogCaptureAppender connectorLogs = LogCaptureAppender.createAndRegister(MirrorUtils.class)) { connectorLogs.setClassLogger(MirrorUtils.class, Level.TRACE); Set topics = new HashSet<>(); topics.add("topic1"); diff --git a/connect/mirror/src/test/resources/log4j2.properties b/connect/mirror/src/test/resources/log4j2.properties index bc50c138bcb14..aa5596ecd19b7 100644 --- a/connect/mirror/src/test/resources/log4j2.properties +++ b/connect/mirror/src/test/resources/log4j2.properties @@ -14,28 +14,30 @@ # See the License for the specific language governing permissions and # limitations under the License. ## -name=MirrorTestConfig -appenders=stdout +name=ConnectMirrorTestConfig -appender.stdout.type=Console -appender.stdout.name=STDOUT -appender.stdout.layout.type=PatternLayout +appenders=console +appender.console.type=Console +appender.console.name=STDOUT +appender.console.layout.type=PatternLayout # -# The `%X{connector.context}` parameter includes connector-specific and task-specific information in the log message. +# The `%X{connector.context}` parameter in the layout includes connector-specific and task-specific information +# in the log message, where appropriate. This makes it easier to identify those log messages that apply to a +# specific connector. Simply add this parameter to the log layout configuration below to include the contextual information. # -appender.stdout.layout.pattern=[%d] %p %X{connector.context}%m (%c:%L)%n +appender.console.layout.pattern=[%d] %p %X{connector.context}%m (%c:%L)%n -loggers=kafka,state.change.logger,org.apache.kafka.connect +loggers=kafka,stateChangeLogger,kafkaConnectLogger rootLogger.level=INFO -rootLogger.appenderRefs=stdout -rootLogger.appenderRef.stdout.ref=STDOUT +rootLogger.appenderRefs=console +rootLogger.appenderRef.console.ref=STDOUT logger.kafka.name=kafka logger.kafka.level=WARN -logger.state.change.logger.name=state.change.logger -logger.state.change.logger.level=OFF +logger.stateChangeLogger.name=state.change.logger +logger.stateChangeLogger.level=OFF -logger.org.apache.kafka.connect.name=org.apache.kafka.connect -logger.org.apache.kafka.connect.level=DEBUG +logger.kafkaConnectLogger.name=org.apache.kafka.connect +logger.kafkaConnectLogger.level=DEBUG diff --git a/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/Loggers.java b/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/Loggers.java index d688f375dabcf..d74dd7f2a6d12 100644 --- a/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/Loggers.java +++ b/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/Loggers.java @@ -48,9 +48,9 @@ public class Loggers { private static final Logger log = LoggerFactory.getLogger(Loggers.class); /** - * Log4j uses "root" (case-insensitive) as name of the root logger. + * Log4j2 uses "" (empty string) as name of the root logger. */ - private static final String ROOT_LOGGER_NAME = "root"; + private static final String ROOT_LOGGER_NAME = ""; private final Time time; private final Map lastModifiedTimes; @@ -101,11 +101,11 @@ public synchronized Map allLevels() { Map result = new TreeMap<>(); currentLoggers().stream() - .filter(logger -> logger.getLevel() != null) + .filter(logger -> !logger.getLevel().equals(Level.OFF)) .forEach(logger -> result.put(logger.getName(), loggerLevel(logger))); org.apache.logging.log4j.Logger root = rootLogger(); - if (root.getLevel() != null) { + if (!root.getLevel().equals(Level.OFF)) { result.put(ROOT_LOGGER_NAME, loggerLevel(root)); } @@ -214,5 +214,4 @@ private LoggerLevel loggerLevel(org.apache.logging.log4j.Logger logger) { Long lastModified = lastModifiedTimes.get(logger.getName()); return new LoggerLevel(Objects.toString(level), lastModified); } - } diff --git a/connect/runtime/src/test/java/org/apache/kafka/connect/integration/StandaloneWorkerIntegrationTest.java b/connect/runtime/src/test/java/org/apache/kafka/connect/integration/StandaloneWorkerIntegrationTest.java index 69a65ba7bfbde..3b13487b3e7bd 100644 --- a/connect/runtime/src/test/java/org/apache/kafka/connect/integration/StandaloneWorkerIntegrationTest.java +++ b/connect/runtime/src/test/java/org/apache/kafka/connect/integration/StandaloneWorkerIntegrationTest.java @@ -114,6 +114,7 @@ public void testDynamicLogging() { final String namespace1 = "org.apache.kafka.connect"; final String level1 = "DEBUG"; connect.setLogLevel(namespace1, "ERROR", null); + // fixme Map currentLevels = testSetLoggingLevel(namespace1, level1, null, initialLevels); // Tests with scope=worker diff --git a/connect/runtime/src/test/java/org/apache/kafka/connect/runtime/LoggersTest.java b/connect/runtime/src/test/java/org/apache/kafka/connect/runtime/LoggersTest.java index 3e34b512ff53b..7a9aa74b34d7d 100644 --- a/connect/runtime/src/test/java/org/apache/kafka/connect/runtime/LoggersTest.java +++ b/connect/runtime/src/test/java/org/apache/kafka/connect/runtime/LoggersTest.java @@ -62,18 +62,21 @@ public void setup() { @Test public void testGetLoggersIgnoresNullLevels() { - Logger root = logger("root"); + LoggerContext loggerContext = (LoggerContext) LogManager.getContext(false); + Logger root = loggerContext.getRootLogger(); + Configurator.setLevel(root, Level.OFF); - Logger a = logger("a"); + Logger a = loggerContext.getLogger("a"); Configurator.setLevel(a, null); - Logger b = logger("b"); + + Logger b = loggerContext.getLogger("b"); Configurator.setLevel(b, Level.INFO); Loggers loggers = new TestLoggers(root, a, b); Map expectedLevels = Collections.singletonMap( - "b", - new LoggerLevel(Level.INFO.toString(), null) + "b", + new LoggerLevel(Level.INFO.toString(), null) ); Map actualLevels = loggers.allLevels(); assertEquals(expectedLevels, actualLevels); @@ -81,61 +84,51 @@ public void testGetLoggersIgnoresNullLevels() { @Test public void testGetLoggerFallsBackToEffectiveLogLevel() { - LoggerContext context = (LoggerContext) LogManager.getContext(false); - Configuration config = context.getConfiguration(); - - Logger root = logger("root"); + LoggerContext loggerContext = (LoggerContext) LogManager.getContext(false); + Logger root = loggerContext.getRootLogger(); Configurator.setLevel(root, Level.ERROR); - LoggerConfig rootLoggerConfig = config.getLoggerConfig(LogManager.ROOT_LOGGER_NAME); - LoggerConfig aLoggerConfig = config.getLoggerConfig("a"); - aLoggerConfig.setLevel(null); + Logger a = loggerContext.getLogger("a"); + Configurator.setLevel(a, null); - LoggerConfig bLoggerConfig = config.getLoggerConfig("b"); - bLoggerConfig.setLevel(Level.INFO); + Logger b = loggerContext.getLogger("b"); + Configurator.setLevel(b, Level.INFO); - context.updateLoggers(); + Loggers loggers = new TestLoggers(root, a, b); - Level expectedLevel = rootLoggerConfig.getLevel(); - Level actualLevel = aLoggerConfig.getLevel() != null ? aLoggerConfig.getLevel() : rootLoggerConfig.getLevel(); + LoggerLevel expectedLevel = new LoggerLevel(Level.ERROR.toString(), null); + LoggerLevel actualLevel = loggers.level("a"); assertEquals(expectedLevel, actualLevel); } @Test public void testGetUnknownLogger() { - LoggerContext context = (LoggerContext) LogManager.getContext(false); - Configuration config = context.getConfiguration(); - - Logger root = LogManager.getRootLogger(); - LoggerConfig rootLoggerConfig = config.getLoggerConfig(LogManager.ROOT_LOGGER_NAME); - rootLoggerConfig.setLevel(Level.ERROR); - - LoggerConfig aLoggerConfig = config.getLoggerConfig("a"); - aLoggerConfig.setLevel(null); - - LoggerConfig bLoggerConfig = config.getLoggerConfig("b"); - bLoggerConfig.setLevel(Level.INFO); + LoggerContext loggerContext = (LoggerContext) LogManager.getContext(false); + Logger root = loggerContext.getRootLogger(); + Configurator.setLevel(root, Level.ERROR); - context.updateLoggers(); + Logger a = loggerContext.getLogger("a"); + Configurator.setLevel(a, null); - LoggerConfig cLoggerConfig = config.getLoggerConfig("c"); + Logger b = loggerContext.getLogger("b"); + Configurator.setLevel(b, Level.INFO); - if (cLoggerConfig.equals(rootLoggerConfig)) { - cLoggerConfig = null; - } + Loggers loggers = new TestLoggers(root, a, b); - assertNull(cLoggerConfig); + LoggerLevel level = loggers.level("c"); + assertNull(level); } @Test public void testSetLevel() { - Logger root = logger("root"); + LoggerContext loggerContext = (LoggerContext) LogManager.getContext(false); + Logger root = loggerContext.getRootLogger(); Configurator.setLevel(root, Level.ERROR); - Logger x = logger("a.b.c.p.X"); - Logger y = logger("a.b.c.p.Y"); - Logger z = logger("a.b.c.p.Z"); - Logger w = logger("a.b.c.s.W"); + Logger x = loggerContext.getLogger("a.b.c.p.X"); + Logger y = loggerContext.getLogger("a.b.c.p.Y"); + Logger z = loggerContext.getLogger("a.b.c.p.Z"); + Logger w = loggerContext.getLogger("a.b.c.s.W"); Configurator.setLevel(x, Level.INFO); Configurator.setLevel(y, Level.INFO); Configurator.setLevel(z, Level.INFO); @@ -182,14 +175,21 @@ public void testSetLevel() { @Test public void testSetRootLevel() { - Logger root = logger("root"); + LoggerContext loggerContext = (LoggerContext) LogManager.getContext(false); + Configuration config = loggerContext.getConfiguration(); + LoggerConfig rootConfig = new LoggerConfig("", Level.ERROR, false); + config.addLogger("", rootConfig); + loggerContext.updateLoggers(); + + Logger root = LogManager.getLogger(""); Configurator.setLevel(root, Level.ERROR); - Logger p = logger("a.b.c.p"); - Logger x = logger("a.b.c.p.X"); - Logger y = logger("a.b.c.p.Y"); - Logger z = logger("a.b.c.p.Z"); - Logger w = logger("a.b.c.s.W"); + Logger p = loggerContext.getLogger("a.b.c.p"); + Logger x = loggerContext.getLogger("a.b.c.p.X"); + Logger y = loggerContext.getLogger("a.b.c.p.Y"); + Logger z = loggerContext.getLogger("a.b.c.p.Z"); + Logger w = loggerContext.getLogger("a.b.c.s.W"); + Configurator.setLevel(p, null); Configurator.setLevel(x, Level.INFO); Configurator.setLevel(y, Level.INFO); Configurator.setLevel(z, Level.INFO); @@ -197,21 +197,20 @@ public void testSetRootLevel() { Loggers loggers = new TestLoggers(root, x, y, z, w); - List modified = loggers.setLevel("root", Level.DEBUG); - assertEquals(Arrays.asList("a.b.c.p.X", "a.b.c.p.Y", "a.b.c.p.Z", "a.b.c.s.W", "root"), modified); + List modified = loggers.setLevel("", Level.DEBUG); + assertEquals(Arrays.asList("", "a.b.c.p.X", "a.b.c.p.Y", "a.b.c.p.Z", "a.b.c.s.W"), modified); - // log4j2.properties has defined root logger level as INFO - assertEquals(Level.INFO, p.getLevel()); + assertEquals(p.getLevel(), Level.DEBUG); - assertEquals(Level.DEBUG, root.getLevel()); + assertEquals(root.getLevel(), Level.DEBUG); - assertEquals(Level.DEBUG, w.getLevel()); - assertEquals(Level.DEBUG, x.getLevel()); - assertEquals(Level.DEBUG, y.getLevel()); - assertEquals(Level.DEBUG, z.getLevel()); + assertEquals(w.getLevel(), Level.DEBUG); + assertEquals(x.getLevel(), Level.DEBUG); + assertEquals(y.getLevel(), Level.DEBUG); + assertEquals(z.getLevel(), Level.DEBUG); Map expectedLevels = new HashMap<>(); - expectedLevels.put("root", new LoggerLevel(Level.DEBUG.toString(), INITIAL_TIME)); + expectedLevels.put("", new LoggerLevel(Level.DEBUG.toString(), INITIAL_TIME)); expectedLevels.put("a.b.c.p.X", new LoggerLevel(Level.DEBUG.toString(), INITIAL_TIME)); expectedLevels.put("a.b.c.p.Y", new LoggerLevel(Level.DEBUG.toString(), INITIAL_TIME)); expectedLevels.put("a.b.c.p.Z", new LoggerLevel(Level.DEBUG.toString(), INITIAL_TIME)); @@ -223,7 +222,8 @@ public void testSetRootLevel() { @Test public void testSetLevelNullArguments() { - Logger root = logger("root"); + LoggerContext loggerContext = (LoggerContext) LogManager.getContext(false); + Logger root = loggerContext.getRootLogger(); Loggers loggers = new TestLoggers(root); assertThrows(NullPointerException.class, () -> loggers.setLevel(null, Level.INFO)); assertThrows(NullPointerException.class, () -> loggers.setLevel("root", null)); @@ -259,8 +259,4 @@ Logger rootLogger() { return rootLogger; } } - - private Logger logger(String name) { - return LogManager.getLogger(name); - } -} +} \ No newline at end of file diff --git a/connect/runtime/src/test/java/org/apache/kafka/connect/runtime/rest/ConnectRestServerTest.java b/connect/runtime/src/test/java/org/apache/kafka/connect/runtime/rest/ConnectRestServerTest.java index 68d83de8a46fe..04fe26f661851 100644 --- a/connect/runtime/src/test/java/org/apache/kafka/connect/runtime/rest/ConnectRestServerTest.java +++ b/connect/runtime/src/test/java/org/apache/kafka/connect/runtime/rest/ConnectRestServerTest.java @@ -345,7 +345,7 @@ public void testRequestLogs() throws IOException { server.initializeServer(); server.initializeResources(herder); - LogCaptureAppender restServerAppender = LogCaptureAppender.createAndRegister(ConnectRestServerTest.class); + LogCaptureAppender restServerAppender = LogCaptureAppender.createAndRegister(); HttpRequest request = new HttpGet("/"); HttpResponse response = executeRequest(server.advertisedUrl(), request); diff --git a/connect/runtime/src/test/resources/log4j2.properties b/connect/runtime/src/test/resources/log4j2.properties index f62294c0bef30..7b1ee4cd2991b 100644 --- a/connect/runtime/src/test/resources/log4j2.properties +++ b/connect/runtime/src/test/resources/log4j2.properties @@ -14,36 +14,38 @@ # See the License for the specific language governing permissions and # limitations under the License. ## -name=RuntimeTestConfig -appenders=stdout +name=ConnectRuntimeTestConfig +appenders=console -appender.stdout.type=Console -appender.stdout.name=STDOUT -appender.stdout.layout.type=PatternLayout +appender.console.type=Console +appender.console.name=STDOUT +appender.console.layout.type=PatternLayout # -# The `%X{connector.context}` parameter includes connector-specific and task-specific information in the log message +# The `%X{connector.context}` parameter in the layout includes connector-specific and task-specific information +# in the log message, where appropriate. This makes it easier to identify those log messages that apply to a +# specific connector. Simply add this parameter to the log layout configuration below to include the contextual information. # -appender.stdout.layout.pattern=[%d] %p %X{connector.context}%m (%c:%L)%n +appender.console.layout.pattern=[%d] %p %X{connector.context}%m (%c:%L)%n -loggers=kafka,state.change.logger,org.apache.kafka.connect,org.apache.kafka.consumer,org.apache.kafka.coordinator.group +loggers=kafka,stateChangeLogger,kafkaConnect,kafkaConsumer,coordinatorGroup rootLogger.level=INFO -rootLogger.appenderRefs=stdout -rootLogger.appenderRef.stdout.ref=STDOUT +rootLogger.appenderRefs=console +rootLogger.appenderRef.console.ref=STDOUT logger.kafka.name=kafka logger.kafka.level=WARN -logger.state.change.logger.name=state.change.logger -logger.state.change.logger.level=OFF +logger.stateChangeLogger.name=state.change.logger +logger.stateChangeLogger.level=OFF -logger.org.apache.kafka.connect.name=org.apache.kafka.connect -logger.org.apache.kafka.connect.level=DEBUG - -logger.org.apache.kafka.consumer.name=org.apache.kafka.consumer -logger.org.apache.kafka.consumer.level=DEBUG +logger.kafkaConnect.name=org.apache.kafka.connect +logger.kafkaConnect.level=DEBUG # Troubleshooting KAFKA-17493. -logger.org.apache.kafka.coordinator.group.name=org.apache.kafka.coordinator.group -logger.org.apache.kafka.coordinator.group.level=DEBUG -logger.org.apache.kafka.coordinator.group.additivity=false +logger.kafkaConsumer.name=org.apache.kafka.consumer +logger.kafkaConsumer.level=DEBUG + +logger.coordinatorGroup.name=org.apache.kafka.coordinator.group +logger.coordinatorGroup.level=DEBUG +#logger.coordinatorGroup.additivity=false diff --git a/core/src/main/scala/kafka/docker/KafkaDockerWrapper.scala b/core/src/main/scala/kafka/docker/KafkaDockerWrapper.scala index 1b90980bc7d77..361eb2954a29c 100644 --- a/core/src/main/scala/kafka/docker/KafkaDockerWrapper.scala +++ b/core/src/main/scala/kafka/docker/KafkaDockerWrapper.scala @@ -79,7 +79,7 @@ object KafkaDockerWrapper extends Logging { required(true). help( """Directory which holds default properties. It should contain the three file:- - |server.properties, log4j2.properties and tools-log4j2.properties. + |server.properties, log4j.properties and tools-log4j.properties. |""".stripMargin) setupParser.addArgument("--mounted-configs-dir", "-M"). @@ -87,7 +87,7 @@ object KafkaDockerWrapper extends Logging { required(true). help( """Directory which holds user mounted properties. It can contain none to all the three files:- - |server.properties, log4j2.properties and tools-log4j2.properties.""".stripMargin) + |server.properties, log4j.properties and tools-log4j.properties.""".stripMargin) setupParser.addArgument("--final-configs-dir", "-F"). action(store()). @@ -238,8 +238,8 @@ object KafkaDockerWrapper extends Logging { private object Constants { val ServerPropsFilename = "server.properties" - val Log4jPropsFilename = "log4j2.properties" - val ToolsLog4jFilename = "tools-log4j2.properties" + val Log4jPropsFilename = "log4j.properties" + val ToolsLog4jFilename = "tools-log4j.properties" val KafkaLog4JLoggersEnv = "KAFKA_LOG4J_LOGGERS" val KafkaLog4jRootLoglevelEnv = "KAFKA_LOG4J_ROOT_LOGLEVEL" val KafkaToolsLog4jLoglevelEnv = "KAFKA_TOOLS_LOG4J_LOGLEVEL" diff --git a/core/src/main/scala/kafka/utils/Log4jController.scala b/core/src/main/scala/kafka/utils/Log4jController.scala index 6d64757f56029..b4f4a33960da9 100755 --- a/core/src/main/scala/kafka/utils/Log4jController.scala +++ b/core/src/main/scala/kafka/utils/Log4jController.scala @@ -28,13 +28,7 @@ import scala.jdk.CollectionConverters._ object Log4jController { - /** - * Note: In log4j, the root logger's name was "root" and Kafka also followed that name for dynamic logging control feature. - * - * The root logger's name is changed in log4j2 to empty string (see: [[LogManager.ROOT_LOGGER_NAME]]) but for backward- - * compatibility. Kafka keeps its original root logger name. It is why here is a dedicated definition for the root logger name. - */ - val ROOT_LOGGER = "root" + val ROOT_LOGGER = "" /** * Returns given logger's parent's (or the first ancestor's) name. diff --git a/core/src/test/java/kafka/admin/AclCommandTest.java b/core/src/test/java/kafka/admin/AclCommandTest.java index b21a1bf2e3bc5..6dbf11b7c1bf5 100644 --- a/core/src/test/java/kafka/admin/AclCommandTest.java +++ b/core/src/test/java/kafka/admin/AclCommandTest.java @@ -254,7 +254,7 @@ public void testProducerConsumerCliWithAdminAPIAndBootstrapController(ClusterIns @ClusterTest public void testAclCliWithClientId(ClusterInstance cluster) throws IOException, InterruptedException { - try (LogCaptureAppender appender = LogCaptureAppender.createAndRegister(AclCommandTest.class)) { + try (LogCaptureAppender appender = LogCaptureAppender.createAndRegister()) { appender.setClassLogger(AppInfoParser.class, Level.WARN); testAclCli(cluster, adminArgs(cluster.bootstrapServers(), Optional.of(TestUtils.tempFile("client.id=my-client")))); assertEquals(0, appender.getEvents().stream() @@ -266,7 +266,7 @@ public void testAclCliWithClientId(ClusterInstance cluster) throws IOException, @ClusterTest public void testAclCliWithClientIdAndBootstrapController(ClusterInstance cluster) throws IOException, InterruptedException { - try (LogCaptureAppender appender = LogCaptureAppender.createAndRegister(AclCommandTest.class)) { + try (LogCaptureAppender appender = LogCaptureAppender.createAndRegister()) { appender.setClassLogger(AppInfoParser.class, Level.WARN); testAclCli(cluster, adminArgsWithBootstrapController(cluster.bootstrapControllers(), Optional.of(TestUtils.tempFile("client.id=my-client")))); assertEquals(0, appender.getEvents().stream() diff --git a/core/src/test/resources/log4j2.properties b/core/src/test/resources/log4j2.properties index 7fdec78ff7e04..a0cc7cfffa392 100644 --- a/core/src/test/resources/log4j2.properties +++ b/core/src/test/resources/log4j2.properties @@ -13,27 +13,29 @@ # See the License for the specific language governing permissions and # limitations under the License. # Root logger configuration -name=CoreTestConfig +name=TestConfig + # Appender configuration -appender.STDOUT.type=Console -appender.STDOUT.name=STDOUT -appender.STDOUT.layout.type=PatternLayout -appender.STDOUT.layout.pattern=[%d] %p %m (%c:%L)%n +appender=console +appender.console.type=Console +appender.console.name=STDOUT +appender.console.layout.type=PatternLayout +appender.console.layout.pattern=[%d] %p %m (%c:%L)%n rootLogger.level=OFF -rootLogger.appenderRefs=STDOUT -rootLogger.appenderRef.STDOUT.ref=STDOUT +rootLogger.appenderRefs=console +rootLogger.appenderRef.console.ref=STDOUT -loggers=kafka,org.apache.kafka,org.apache.zookeeper +loggers=kafka,apacheKafka,zookeeper # Logger configurations logger.kafka.name=kafka logger.kafka.level=WARN -logger.org.apache.kafka.name=org.apache.kafka -logger.org.apache.kafka.level=WARN +logger.apacheKafka.name=org.apache.kafka +logger.apacheKafka.level=WARN # zkclient can be verbose, during debugging it is common to adjust it separately -logger.org.apache.zookeeper.name=org.apache.zookeeper -logger.org.apache.zookeeper.level=WARN +logger.zookeeper.name=org.apache.zookeeper +logger.zookeeper.level=WARN diff --git a/core/src/test/scala/integration/kafka/api/PlaintextAdminIntegrationTest.scala b/core/src/test/scala/integration/kafka/api/PlaintextAdminIntegrationTest.scala index d04c9e6ee2224..f2df3cf46ede1 100644 --- a/core/src/test/scala/integration/kafka/api/PlaintextAdminIntegrationTest.scala +++ b/core/src/test/scala/integration/kafka/api/PlaintextAdminIntegrationTest.scala @@ -41,7 +41,7 @@ import org.apache.kafka.common.config.{ConfigResource, LogLevelConfig, SslConfig import org.apache.kafka.common.errors._ import org.apache.kafka.common.internals.Topic import org.apache.kafka.common.quota.{ClientQuotaAlteration, ClientQuotaEntity, ClientQuotaFilter} -import org.apache.kafka.common.requests.{DeleteRecordsRequest} +import org.apache.kafka.common.requests.DeleteRecordsRequest import org.apache.kafka.common.resource.{PatternType, ResourcePattern, ResourceType} import org.apache.kafka.common.serialization.{ByteArrayDeserializer, ByteArraySerializer} import org.apache.kafka.common.utils.{Time, Utils} @@ -53,8 +53,9 @@ import org.apache.kafka.security.authorizer.AclEntry import org.apache.kafka.server.config.{QuotaConfigs, ServerConfigs, ServerLogConfigs} import org.apache.kafka.storage.internals.log.{CleanerConfig, LogConfig} import org.apache.kafka.test.TestUtils.DEFAULT_MAX_WAIT_MS +import org.apache.logging.log4j.core.config.Configurator import org.junit.jupiter.api.Assertions._ -import org.junit.jupiter.api.{AfterEach, BeforeEach, Disabled, TestInfo, Timeout} +import org.junit.jupiter.api.{BeforeEach, TestInfo, Timeout} import org.junit.jupiter.params.ParameterizedTest import org.junit.jupiter.params.provider.ValueSource import org.slf4j.LoggerFactory @@ -86,16 +87,11 @@ class PlaintextAdminIntegrationTest extends BaseAdminIntegrationTest { @BeforeEach override def setUp(testInfo: TestInfo): Unit = { super.setUp(testInfo) + Configurator.reconfigure(); brokerLoggerConfigResource = new ConfigResource( ConfigResource.Type.BROKER_LOGGER, brokers.head.config.brokerId.toString) } - @AfterEach - override def tearDown(): Unit = { - teardownBrokerLoggers() - super.tearDown() - } - @ParameterizedTest @Timeout(30) @ValueSource(strings = Array("kraft")) @@ -128,7 +124,7 @@ class PlaintextAdminIntegrationTest extends BaseAdminIntegrationTest { try { val alterLogLevelsEntries = Seq( - new ConfigEntry("kafka.controller.KafkaController", LogLevelConfig.INFO_LOG_LEVEL) + new ConfigEntry("kafka.server.ControllerServer", LogLevelConfig.INFO_LOG_LEVEL) ).asJavaCollection val exception = assertThrows(classOf[ExecutionException], () => { @@ -3063,9 +3059,23 @@ class PlaintextAdminIntegrationTest extends BaseAdminIntegrationTest { client = createAdminClient LoggerFactory.getLogger("kafka.cluster.Replica").trace("Message to create the logger") val loggerConfig = describeBrokerLoggers() + + // Logger name can't be empty. + assertNull(loggerConfig.get("")) + + // "root" -> "OFF" + val rootLogLevel = loggerConfig.get(Log4jController.ROOT_LOGGER).value + assertEquals("OFF", rootLogLevel) + + // Configured loggers: "kafka" -> "WARN", "org.apache.kafka" -> "WARN", "org.apache.zookeeper" -> "WARN" + assertEquals("WARN", loggerConfig.get("kafka").value) + assertEquals("WARN", loggerConfig.get("org.apache.kafka").value) + assertEquals("WARN", loggerConfig.get("org.apache.zookeeper").value) + + // we expect the log level to be inherited from the first ancestor with a level configured. + // For example, `kafka.cluster.Replica` from `kafka` (ERROR). val kafkaLogLevel = loggerConfig.get("kafka").value() val logCleanerLogLevelConfig = loggerConfig.get("kafka.cluster.Replica") - // we expect the log level to be inherited from the first ancestor with a level configured assertEquals(kafkaLogLevel, logCleanerLogLevelConfig.value()) assertEquals("kafka.cluster.Replica", logCleanerLogLevelConfig.name()) assertEquals(ConfigSource.DYNAMIC_BROKER_LOGGER_CONFIG, logCleanerLogLevelConfig.source()) @@ -3076,72 +3086,59 @@ class PlaintextAdminIntegrationTest extends BaseAdminIntegrationTest { @ParameterizedTest @ValueSource(strings = Array("kraft")) - @Disabled // To be re-enabled once KAFKA-8779 is resolved def testIncrementalAlterConfigsForLog4jLogLevels(quorum: String): Unit = { client = createAdminClient + val ancestorLogger = "kafka"; val initialLoggerConfig = describeBrokerLoggers() - val initialRootLogLevel = initialLoggerConfig.get(Log4jController.ROOT_LOGGER).value() - assertEquals(initialRootLogLevel, initialLoggerConfig.get("kafka.controller.KafkaController").value()) - assertEquals(initialRootLogLevel, initialLoggerConfig.get("kafka.log.LogCleaner").value()) - assertEquals(initialRootLogLevel, initialLoggerConfig.get("kafka.server.ReplicaManager").value()) - - val newRootLogLevel = LogLevelConfig.DEBUG_LOG_LEVEL - val alterRootLoggerEntry = Seq( - new AlterConfigOp(new ConfigEntry(Log4jController.ROOT_LOGGER, newRootLogLevel), AlterConfigOp.OpType.SET) + val initialKafkaLogLevel = initialLoggerConfig.get("kafka").value() + assertEquals(initialKafkaLogLevel, initialLoggerConfig.get("kafka.server.ControllerServer").value()) + assertEquals(initialKafkaLogLevel, initialLoggerConfig.get("kafka.log.LogCleaner").value()) + assertEquals(initialKafkaLogLevel, initialLoggerConfig.get("kafka.server.ReplicaManager").value()) + + val newAncestorLogLevel = LogLevelConfig.DEBUG_LOG_LEVEL + val alterAncestorLoggerEntry = Seq( + new AlterConfigOp(new ConfigEntry(ancestorLogger, newAncestorLogLevel), AlterConfigOp.OpType.SET) ).asJavaCollection // Test validateOnly does not change anything - alterBrokerLoggers(alterRootLoggerEntry, validateOnly = true) + alterBrokerLoggers(alterAncestorLoggerEntry, validateOnly = true) val validatedLoggerConfig = describeBrokerLoggers() - assertEquals(initialRootLogLevel, validatedLoggerConfig.get(Log4jController.ROOT_LOGGER).value()) - assertEquals(initialRootLogLevel, validatedLoggerConfig.get("kafka.controller.KafkaController").value()) - assertEquals(initialRootLogLevel, validatedLoggerConfig.get("kafka.log.LogCleaner").value()) - assertEquals(initialRootLogLevel, validatedLoggerConfig.get("kafka.server.ReplicaManager").value()) - assertEquals(initialRootLogLevel, validatedLoggerConfig.get("kafka.zookeeper.ZooKeeperClient").value()) + assertEquals(initialKafkaLogLevel, validatedLoggerConfig.get("kafka").value()) + assertEquals(initialKafkaLogLevel, validatedLoggerConfig.get("kafka.server.ControllerServer").value()) + assertEquals(initialKafkaLogLevel, validatedLoggerConfig.get("kafka.log.LogCleaner").value()) + assertEquals(initialKafkaLogLevel, validatedLoggerConfig.get("kafka.server.ReplicaManager").value()) - // test that we can change them and unset loggers still use the root's log level - alterBrokerLoggers(alterRootLoggerEntry) + // test that we can change them and unset loggers still use the ancestor's log level + alterBrokerLoggers(alterAncestorLoggerEntry) val changedRootLoggerConfig = describeBrokerLoggers() - assertEquals(newRootLogLevel, changedRootLoggerConfig.get(Log4jController.ROOT_LOGGER).value()) - assertEquals(newRootLogLevel, changedRootLoggerConfig.get("kafka.controller.KafkaController").value()) - assertEquals(newRootLogLevel, changedRootLoggerConfig.get("kafka.log.LogCleaner").value()) - assertEquals(newRootLogLevel, changedRootLoggerConfig.get("kafka.server.ReplicaManager").value()) - assertEquals(newRootLogLevel, changedRootLoggerConfig.get("kafka.zookeeper.ZooKeeperClient").value()) - - // alter the ZK client's logger so we can later test resetting it - val alterZKLoggerEntry = Seq( - new AlterConfigOp(new ConfigEntry("kafka.zookeeper.ZooKeeperClient", LogLevelConfig.ERROR_LOG_LEVEL), AlterConfigOp.OpType.SET) - ).asJavaCollection - alterBrokerLoggers(alterZKLoggerEntry) - val changedZKLoggerConfig = describeBrokerLoggers() - assertEquals(LogLevelConfig.ERROR_LOG_LEVEL, changedZKLoggerConfig.get("kafka.zookeeper.ZooKeeperClient").value()) + assertEquals(newAncestorLogLevel, changedRootLoggerConfig.get("kafka").value()) + assertEquals(newAncestorLogLevel, changedRootLoggerConfig.get("kafka.server.ControllerServer").value()) + assertEquals(newAncestorLogLevel, changedRootLoggerConfig.get("kafka.log.LogCleaner").value()) + assertEquals(newAncestorLogLevel, changedRootLoggerConfig.get("kafka.server.ReplicaManager").value()) // properly test various set operations and one delete val alterLogLevelsEntries = Seq( - new AlterConfigOp(new ConfigEntry("kafka.controller.KafkaController", LogLevelConfig.INFO_LOG_LEVEL), AlterConfigOp.OpType.SET), + new AlterConfigOp(new ConfigEntry("kafka.server.ControllerServer", LogLevelConfig.INFO_LOG_LEVEL), AlterConfigOp.OpType.SET), new AlterConfigOp(new ConfigEntry("kafka.log.LogCleaner", LogLevelConfig.ERROR_LOG_LEVEL), AlterConfigOp.OpType.SET), new AlterConfigOp(new ConfigEntry("kafka.server.ReplicaManager", LogLevelConfig.TRACE_LOG_LEVEL), AlterConfigOp.OpType.SET), - new AlterConfigOp(new ConfigEntry("kafka.zookeeper.ZooKeeperClient", ""), AlterConfigOp.OpType.DELETE) // should reset to the root logger level ).asJavaCollection alterBrokerLoggers(alterLogLevelsEntries) val alteredLoggerConfig = describeBrokerLoggers() - assertEquals(newRootLogLevel, alteredLoggerConfig.get(Log4jController.ROOT_LOGGER).value()) - assertEquals(LogLevelConfig.INFO_LOG_LEVEL, alteredLoggerConfig.get("kafka.controller.KafkaController").value()) + assertEquals(newAncestorLogLevel, alteredLoggerConfig.get("kafka").value()) + assertEquals(LogLevelConfig.INFO_LOG_LEVEL, alteredLoggerConfig.get("kafka.server.ControllerServer").value()) assertEquals(LogLevelConfig.ERROR_LOG_LEVEL, alteredLoggerConfig.get("kafka.log.LogCleaner").value()) assertEquals(LogLevelConfig.TRACE_LOG_LEVEL, alteredLoggerConfig.get("kafka.server.ReplicaManager").value()) - assertEquals(newRootLogLevel, alteredLoggerConfig.get("kafka.zookeeper.ZooKeeperClient").value()) } /** * 1. Assume ROOT logger == TRACE - * 2. Change kafka.controller.KafkaController logger to INFO - * 3. Unset kafka.controller.KafkaController via AlterConfigOp.OpType.DELETE (resets it to the root logger - TRACE) + * 2. Change kafka.server.ControllerServer logger to INFO + * 3. Unset kafka.server.ControllerServer via AlterConfigOp.OpType.DELETE (resets it to the root logger - TRACE) * 4. Change ROOT logger to ERROR - * 5. Ensure the kafka.controller.KafkaController logger's level is ERROR (the current root logger level) + * 5. Ensure the kafka.server.ControllerServer logger's level is ERROR (the current root logger level) */ @ParameterizedTest @ValueSource(strings = Array("kraft")) - @Disabled // To be re-enabled once KAFKA-8779 is resolved def testIncrementalAlterConfigsForLog4jLogLevelsCanResetLoggerToCurrentRoot(quorum: String): Unit = { client = createAdminClient // step 1 - configure root logger @@ -3152,25 +3149,25 @@ class PlaintextAdminIntegrationTest extends BaseAdminIntegrationTest { alterBrokerLoggers(alterRootLoggerEntry) val initialLoggerConfig = describeBrokerLoggers() assertEquals(initialRootLogLevel, initialLoggerConfig.get(Log4jController.ROOT_LOGGER).value()) - assertEquals(initialRootLogLevel, initialLoggerConfig.get("kafka.controller.KafkaController").value()) + assertEquals(initialRootLogLevel, initialLoggerConfig.get("kafka.server.ControllerServer").value()) - // step 2 - change KafkaController logger to INFO + // step 2 - change ControllerServer logger to INFO val alterControllerLoggerEntry = Seq( - new AlterConfigOp(new ConfigEntry("kafka.controller.KafkaController", LogLevelConfig.INFO_LOG_LEVEL), AlterConfigOp.OpType.SET) + new AlterConfigOp(new ConfigEntry("kafka.server.ControllerServer", LogLevelConfig.INFO_LOG_LEVEL), AlterConfigOp.OpType.SET) ).asJavaCollection alterBrokerLoggers(alterControllerLoggerEntry) val changedControllerLoggerConfig = describeBrokerLoggers() assertEquals(initialRootLogLevel, changedControllerLoggerConfig.get(Log4jController.ROOT_LOGGER).value()) - assertEquals(LogLevelConfig.INFO_LOG_LEVEL, changedControllerLoggerConfig.get("kafka.controller.KafkaController").value()) + assertEquals(LogLevelConfig.INFO_LOG_LEVEL, changedControllerLoggerConfig.get("kafka.server.ControllerServer").value()) - // step 3 - unset KafkaController logger + // step 3 - unset ControllerServer logger val deleteControllerLoggerEntry = Seq( - new AlterConfigOp(new ConfigEntry("kafka.controller.KafkaController", ""), AlterConfigOp.OpType.DELETE) + new AlterConfigOp(new ConfigEntry("kafka.server.ControllerServer", ""), AlterConfigOp.OpType.DELETE) ).asJavaCollection alterBrokerLoggers(deleteControllerLoggerEntry) val deletedControllerLoggerConfig = describeBrokerLoggers() assertEquals(initialRootLogLevel, deletedControllerLoggerConfig.get(Log4jController.ROOT_LOGGER).value()) - assertEquals(initialRootLogLevel, deletedControllerLoggerConfig.get("kafka.controller.KafkaController").value()) + assertEquals(initialRootLogLevel, deletedControllerLoggerConfig.get("kafka.server.ControllerServer").value()) val newRootLogLevel = LogLevelConfig.ERROR_LOG_LEVEL val newAlterRootLoggerEntry = Seq( @@ -3179,12 +3176,11 @@ class PlaintextAdminIntegrationTest extends BaseAdminIntegrationTest { alterBrokerLoggers(newAlterRootLoggerEntry) val newRootLoggerConfig = describeBrokerLoggers() assertEquals(newRootLogLevel, newRootLoggerConfig.get(Log4jController.ROOT_LOGGER).value()) - assertEquals(newRootLogLevel, newRootLoggerConfig.get("kafka.controller.KafkaController").value()) + assertEquals(newRootLogLevel, newRootLoggerConfig.get("kafka.server.ControllerServer").value()) } @ParameterizedTest @ValueSource(strings = Array("kraft")) - @Disabled // to be re-enabled once KAFKA-8779 is resolved def testIncrementalAlterConfigsForLog4jLogLevelsCannotResetRootLogger(quorum: String): Unit = { client = createAdminClient val deleteRootLoggerEntry = Seq( @@ -3196,7 +3192,6 @@ class PlaintextAdminIntegrationTest extends BaseAdminIntegrationTest { @ParameterizedTest @ValueSource(strings = Array("kraft")) - @Disabled // To be re-enabled once KAFKA-8779 is resolved def testIncrementalAlterConfigsForLog4jLogLevelsDoesNotWorkWithInvalidConfigs(quorum: String): Unit = { client = createAdminClient val validLoggerName = "kafka.server.KafkaRequestHandler" @@ -3209,29 +3204,28 @@ class PlaintextAdminIntegrationTest extends BaseAdminIntegrationTest { new AlterConfigOp(new ConfigEntry("kafka.server.KafkaRequestHandler", LogLevelConfig.INFO_LOG_LEVEL), AlterConfigOp.OpType.SET), // valid new AlterConfigOp(new ConfigEntry("kafka.network.SocketServer", LogLevelConfig.ERROR_LOG_LEVEL), AlterConfigOp.OpType.APPEND) // append is not supported ).asJavaCollection - assertTrue(assertThrows(classOf[ExecutionException], - () => alterBrokerLoggers(appendLogLevelEntries)).getCause.isInstanceOf[InvalidRequestException]) + assertInstanceOf(classOf[InvalidRequestException], assertThrows(classOf[ExecutionException], () => alterBrokerLoggers(appendLogLevelEntries)).getCause) assertLogLevelDidNotChange() val subtractLogLevelEntries = Seq( new AlterConfigOp(new ConfigEntry("kafka.server.KafkaRequestHandler", LogLevelConfig.INFO_LOG_LEVEL), AlterConfigOp.OpType.SET), // valid new AlterConfigOp(new ConfigEntry("kafka.network.SocketServer", LogLevelConfig.ERROR_LOG_LEVEL), AlterConfigOp.OpType.SUBTRACT) // subtract is not supported ).asJavaCollection - assertTrue(assertThrows(classOf[ExecutionException], () => alterBrokerLoggers(subtractLogLevelEntries)).getCause.isInstanceOf[InvalidRequestException]) + assertInstanceOf(classOf[InvalidRequestException], assertThrows(classOf[ExecutionException], () => alterBrokerLoggers(subtractLogLevelEntries)).getCause) assertLogLevelDidNotChange() val invalidLogLevelLogLevelEntries = Seq( new AlterConfigOp(new ConfigEntry("kafka.server.KafkaRequestHandler", LogLevelConfig.INFO_LOG_LEVEL), AlterConfigOp.OpType.SET), // valid new AlterConfigOp(new ConfigEntry("kafka.network.SocketServer", "OFF"), AlterConfigOp.OpType.SET) // OFF is not a valid log level ).asJavaCollection - assertTrue(assertThrows(classOf[ExecutionException], () => alterBrokerLoggers(invalidLogLevelLogLevelEntries)).getCause.isInstanceOf[InvalidRequestException]) + assertInstanceOf(classOf[InvalidConfigurationException], assertThrows(classOf[ExecutionException], () => alterBrokerLoggers(invalidLogLevelLogLevelEntries)).getCause) assertLogLevelDidNotChange() val invalidLoggerNameLogLevelEntries = Seq( new AlterConfigOp(new ConfigEntry("kafka.server.KafkaRequestHandler", LogLevelConfig.INFO_LOG_LEVEL), AlterConfigOp.OpType.SET), // valid new AlterConfigOp(new ConfigEntry("Some Other LogCleaner", LogLevelConfig.ERROR_LOG_LEVEL), AlterConfigOp.OpType.SET) // invalid logger name is not supported ).asJavaCollection - assertTrue(assertThrows(classOf[ExecutionException], () => alterBrokerLoggers(invalidLoggerNameLogLevelEntries)).getCause.isInstanceOf[InvalidRequestException]) + assertInstanceOf(classOf[InvalidConfigurationException], assertThrows(classOf[ExecutionException], () => alterBrokerLoggers(invalidLoggerNameLogLevelEntries)).getCause) assertLogLevelDidNotChange() } @@ -3245,7 +3239,7 @@ class PlaintextAdminIntegrationTest extends BaseAdminIntegrationTest { client = createAdminClient val alterLogLevelsEntries = Seq( - new ConfigEntry("kafka.controller.KafkaController", LogLevelConfig.INFO_LOG_LEVEL) + new ConfigEntry("kafka.server.ControllerServer", LogLevelConfig.INFO_LOG_LEVEL) ).asJavaCollection val alterResult = client.alterConfigs(Map(brokerLoggerConfigResource -> new Config(alterLogLevelsEntries)).asJava) assertTrue(assertThrows(classOf[ExecutionException], () => alterResult.values.get(brokerLoggerConfigResource).get).getCause.isInstanceOf[InvalidRequestException]) @@ -3264,28 +3258,6 @@ class PlaintextAdminIntegrationTest extends BaseAdminIntegrationTest { def describeBrokerLoggers(): Config = client.describeConfigs(Collections.singletonList(brokerLoggerConfigResource)).values.get(brokerLoggerConfigResource).get() - /** - * Due to the fact that log4j is not re-initialized across tests, changing a logger's log level persists across test classes. - * We need to clean up the changes done while testing. - */ - private def teardownBrokerLoggers(): Unit = { - if (changedBrokerLoggers.nonEmpty) { - val validLoggers = describeBrokerLoggers().entries().asScala.filterNot(_.name.equals(Log4jController.ROOT_LOGGER)).map(_.name).toSet - val unsetBrokerLoggersEntries = changedBrokerLoggers - .intersect(validLoggers) - .map { logger => new AlterConfigOp(new ConfigEntry(logger, ""), AlterConfigOp.OpType.DELETE) } - .asJavaCollection - - // ensure that we first reset the root logger to an arbitrary log level. Note that we cannot reset it to its original value - alterBrokerLoggers(List( - new AlterConfigOp(new ConfigEntry(Log4jController.ROOT_LOGGER, LogLevelConfig.FATAL_LOG_LEVEL), AlterConfigOp.OpType.SET) - ).asJavaCollection) - alterBrokerLoggers(unsetBrokerLoggersEntries) - - changedBrokerLoggers.clear() - } - } - @ParameterizedTest @ValueSource(strings = Array("kraft")) def testAppendConfigToEmptyDefaultValue(ignored: String): Unit = { diff --git a/core/src/test/scala/other/kafka.log4j2.properties b/core/src/test/scala/other/kafka.log4j.properties similarity index 61% rename from core/src/test/scala/other/kafka.log4j2.properties rename to core/src/test/scala/other/kafka.log4j.properties index 1becd9dd31212..35a2b23796a4e 100644 --- a/core/src/test/scala/other/kafka.log4j2.properties +++ b/core/src/test/scala/other/kafka.log4j.properties @@ -4,25 +4,19 @@ # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at -# +# # http://www.apache.org/licenses/LICENSE-2.0 -# +# # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -name=CoreTestScalaOtherConfig +log4j.rootLogger=INFO, KAFKA -appenders=kafkaAppender +log4j.appender.KAFKA=kafka.log4j.KafkaAppender -appender.kafkaAppender.type=Kafka -appender.kafkaAppender.name=KAFKA -appender.kafkaAppender.topic=test-logger -appender.kafkaAppender.bootstrapServers=localhost:9092 -appender.kafkaAppender.keySerializer=org.apache.kafka.common.serialization.StringSerializer -appender.kafkaAppender.valueSerializer=kafka.AppenderStringSerializer - -rootLogger.level=INFO -rootLogger.appenderRefs=kafkaAppender -rootLogger.appenderRef.kafkaAppender.ref=KAFKA +log4j.appender.KAFKA.Port=9092 +log4j.appender.KAFKA.Host=localhost +log4j.appender.KAFKA.Topic=test-logger +log4j.appender.KAFKA.Serializer=kafka.AppenderStringSerializer \ No newline at end of file diff --git a/core/src/test/scala/unit/kafka/docker/KafkaDockerWrapperTest.scala b/core/src/test/scala/unit/kafka/docker/KafkaDockerWrapperTest.scala index b5a4b3a1c9ce7..409c5dccdbb2d 100644 --- a/core/src/test/scala/unit/kafka/docker/KafkaDockerWrapperTest.scala +++ b/core/src/test/scala/unit/kafka/docker/KafkaDockerWrapperTest.scala @@ -186,13 +186,13 @@ class KafkaDockerWrapperTest { "SOME_VARIABLE" -> "Some Value" ) - Files.write(defaultConfigsPath.resolve("log4j2.properties"), "default.config=default value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() - Files.write(mountedConfigsPath.resolve("log4j2.properties"), "mounted.config=mounted value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() - Files.write(finalConfigsPath.resolve("log4j2.properties"), "existing.config=existing value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() + Files.write(defaultConfigsPath.resolve("log4j.properties"), "default.config=default value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() + Files.write(mountedConfigsPath.resolve("log4j.properties"), "mounted.config=mounted value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() + Files.write(finalConfigsPath.resolve("log4j.properties"), "existing.config=existing value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() KafkaDockerWrapper.prepareLog4jConfigs(defaultConfigsPath, mountedConfigsPath, finalConfigsPath, envVars) - val source = scala.io.Source.fromFile(finalConfigsPath.toString + "/log4j2.properties") + val source = scala.io.Source.fromFile(finalConfigsPath.toString + "/log4j.properties") val actual = try source.mkString finally source.close() val expected = "mounted.config=mounted value" + "\n" + "log4j.rootLogger=ERROR, stdout" + "\n" + "log4j.logger.kafka=INFO" + "\n" + @@ -212,12 +212,12 @@ class KafkaDockerWrapperTest { "SOME_VARIABLE" -> "Some Value" ) - Files.write(defaultConfigsPath.resolve("log4j2.properties"), "default.config=default value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() - Files.write(finalConfigsPath.resolve("log4j2.properties"), "existing.config=existing value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() + Files.write(defaultConfigsPath.resolve("log4j.properties"), "default.config=default value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() + Files.write(finalConfigsPath.resolve("log4j.properties"), "existing.config=existing value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() KafkaDockerWrapper.prepareLog4jConfigs(defaultConfigsPath, mountedConfigsPath, finalConfigsPath, envVars) - val source = scala.io.Source.fromFile(finalConfigsPath.toString + "/log4j2.properties") + val source = scala.io.Source.fromFile(finalConfigsPath.toString + "/log4j.properties") val actual = try source.mkString finally source.close() val expected = "default.config=default value" + "\n" + "log4j.rootLogger=ERROR, stdout" + "\n" + "log4j.logger.kafka=INFO" + "\n" + @@ -233,13 +233,13 @@ class KafkaDockerWrapperTest { val envVars = Map.empty[String, String] - Files.write(defaultConfigsPath.resolve("log4j2.properties"), "default.config=default value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() - Files.write(mountedConfigsPath.resolve("log4j2.properties"), "mounted.config=mounted value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() - Files.write(finalConfigsPath.resolve("log4j2.properties"), "existing.config=existing value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() + Files.write(defaultConfigsPath.resolve("log4j.properties"), "default.config=default value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() + Files.write(mountedConfigsPath.resolve("log4j.properties"), "mounted.config=mounted value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() + Files.write(finalConfigsPath.resolve("log4j.properties"), "existing.config=existing value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() KafkaDockerWrapper.prepareLog4jConfigs(defaultConfigsPath, mountedConfigsPath, finalConfigsPath, envVars) - val source = scala.io.Source.fromFile(finalConfigsPath.toString + "/log4j2.properties") + val source = scala.io.Source.fromFile(finalConfigsPath.toString + "/log4j.properties") val actual = try source.mkString finally source.close() val expected = "mounted.config=mounted value" @@ -276,13 +276,13 @@ class KafkaDockerWrapperTest { val envVars = Map("KAFKA_TOOLS_LOG4J_LOGLEVEL" -> "TRACE") - Files.write(defaultConfigsPath.resolve("tools-log4j2.properties"), "default.config=default value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() - Files.write(mountedConfigsPath.resolve("tools-log4j2.properties"), "mounted.config=mounted value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() - Files.write(finalConfigsPath.resolve("tools-log4j2.properties"), "existing.config=existing value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() + Files.write(defaultConfigsPath.resolve("tools-log4j.properties"), "default.config=default value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() + Files.write(mountedConfigsPath.resolve("tools-log4j.properties"), "mounted.config=mounted value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() + Files.write(finalConfigsPath.resolve("tools-log4j.properties"), "existing.config=existing value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() KafkaDockerWrapper.prepareToolsLog4jConfigs(defaultConfigsPath, mountedConfigsPath, finalConfigsPath, envVars) - val source = scala.io.Source.fromFile(finalConfigsPath.toString + "/tools-log4j2.properties") + val source = scala.io.Source.fromFile(finalConfigsPath.toString + "/tools-log4j.properties") val actual = try source.mkString finally source.close() val expected = "mounted.config=mounted value" + "\n" + "log4j.rootLogger=TRACE, stderr" @@ -295,12 +295,12 @@ class KafkaDockerWrapperTest { val envVars = Map("KAFKA_TOOLS_LOG4J_LOGLEVEL" -> "TRACE") - Files.write(defaultConfigsPath.resolve("tools-log4j2.properties"), "default.config=default value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() - Files.write(finalConfigsPath.resolve("tools-log4j2.properties"), "existing.config=existing value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() + Files.write(defaultConfigsPath.resolve("tools-log4j.properties"), "default.config=default value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() + Files.write(finalConfigsPath.resolve("tools-log4j.properties"), "existing.config=existing value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() KafkaDockerWrapper.prepareToolsLog4jConfigs(defaultConfigsPath, mountedConfigsPath, finalConfigsPath, envVars) - val source = scala.io.Source.fromFile(finalConfigsPath.toString + "/tools-log4j2.properties") + val source = scala.io.Source.fromFile(finalConfigsPath.toString + "/tools-log4j.properties") val actual = try source.mkString finally source.close() val expected = "default.config=default value" + "\n" + "log4j.rootLogger=TRACE, stderr" @@ -313,13 +313,13 @@ class KafkaDockerWrapperTest { val envVars = Map.empty[String, String] - Files.write(defaultConfigsPath.resolve("tools-log4j2.properties"), "default.config=default value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() - Files.write(mountedConfigsPath.resolve("tools-log4j2.properties"), "mounted.config=mounted value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() - Files.write(finalConfigsPath.resolve("tools-log4j2.properties"), "existing.config=existing value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() + Files.write(defaultConfigsPath.resolve("tools-log4j.properties"), "default.config=default value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() + Files.write(mountedConfigsPath.resolve("tools-log4j.properties"), "mounted.config=mounted value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() + Files.write(finalConfigsPath.resolve("tools-log4j.properties"), "existing.config=existing value".getBytes(StandardCharsets.UTF_8)).toFile.deleteOnExit() KafkaDockerWrapper.prepareToolsLog4jConfigs(defaultConfigsPath, mountedConfigsPath, finalConfigsPath, envVars) - val source = scala.io.Source.fromFile(finalConfigsPath.toString + "/tools-log4j2.properties") + val source = scala.io.Source.fromFile(finalConfigsPath.toString + "/tools-log4j.properties") val actual = try source.mkString finally source.close() val expected = "mounted.config=mounted value" diff --git a/docker/docker_official_images/3.7.0/jvm/Dockerfile b/docker/docker_official_images/3.7.0/jvm/Dockerfile index a9b7dce603b53..905e2f2149bba 100755 --- a/docker/docker_official_images/3.7.0/jvm/Dockerfile +++ b/docker/docker_official_images/3.7.0/jvm/Dockerfile @@ -76,8 +76,8 @@ RUN set -eux ; \ chown appuser:appuser -R /usr/logs /opt/kafka /mnt/shared/config; \ chown appuser:root -R /var/lib/kafka /etc/kafka/secrets /etc/kafka; \ chmod -R ug+w /etc/kafka /var/lib/kafka /etc/kafka/secrets; \ - cp /opt/kafka/config/log4j2.properties /etc/kafka/docker/log4j2.properties; \ - cp /opt/kafka/config/tools-log4j2.properties /etc/kafka/docker/tools-log4j2.properties; \ + cp /opt/kafka/config/log4j.properties /etc/kafka/docker/log4j.properties; \ + cp /opt/kafka/config/tools-log4j.properties /etc/kafka/docker/tools-log4j.properties; \ cp /opt/kafka/config/kraft/server.properties /etc/kafka/docker/server.properties; \ rm kafka.tgz kafka.tgz.asc KEYS; \ apk del wget gpg gpg-agent; \ diff --git a/docker/examples/README.md b/docker/examples/README.md index 11258c91c5605..8f4fdce43673d 100644 --- a/docker/examples/README.md +++ b/docker/examples/README.md @@ -42,11 +42,11 @@ Using Environment Variables - To provide configs to log4j property files, following points should be considered:- - log4j properties provided via environment variables will be appended to the default properties file (log4j properties files bundled with kafka) -- `KAFKA_LOG4J_ROOT_LOGLEVEL` can be provided to set the value of log4j.rootLogger in log4j2.properties and tools-log4j2.properties -- log4j loggers can be added to log4j2.properties by setting them in KAFKA_LOG4J_LOGGERS environment variable in a single comma separated string +- `KAFKA_LOG4J_ROOT_LOGLEVEL` can be provided to set the value of log4j.rootLogger in log4j.properties and tools-log4j.properties +- log4j loggers can be added to log4j.properties by setting them in KAFKA_LOG4J_LOGGERS environment variable in a single comma separated string - Example: - Assuming that KAFKA_LOG4J_LOGGERS='property1=value1,property2=value2' environment variable is provided to docker container. - - log4j.logger.property1=value1 and log4j.logger.property2=value2 will be added to the log4j2.properties file inside docker container. + - log4j.logger.property1=value1 and log4j.logger.property2=value2 will be added to the log4j.properties file inside docker container. - Environment variables commonly used in Kafka can be provided via environment variables, for example `CLUSTER_ID`. - Command `docker run --env CONFIG_NAME=CONFIG_VALUE -p 9092:9092 apache/kafka:latest` can be used to provide environment variables to docker container diff --git a/docker/jvm/Dockerfile b/docker/jvm/Dockerfile index ccf4676b89233..72e35e63c0473 100644 --- a/docker/jvm/Dockerfile +++ b/docker/jvm/Dockerfile @@ -76,8 +76,8 @@ RUN set -eux ; \ chown appuser:appuser -R /usr/logs /opt/kafka /mnt/shared/config; \ chown appuser:root -R /var/lib/kafka /etc/kafka/secrets /etc/kafka; \ chmod -R ug+w /etc/kafka /var/lib/kafka /etc/kafka/secrets; \ - cp /opt/kafka/config/log4j2.properties /etc/kafka/docker/log4j2.properties; \ - cp /opt/kafka/config/tools-log4j2.properties /etc/kafka/docker/tools-log4j2.properties; \ + cp /opt/kafka/config/log4j.properties /etc/kafka/docker/log4j.properties; \ + cp /opt/kafka/config/tools-log4j.properties /etc/kafka/docker/tools-log4j.properties; \ cp /opt/kafka/config/kraft/server.properties /etc/kafka/docker/server.properties; \ rm kafka.tgz kafka.tgz.asc KEYS; \ apk del wget gpg gpg-agent; \ diff --git a/docker/native/Dockerfile b/docker/native/Dockerfile index a6ec7aed2c016..36104b76edca2 100644 --- a/docker/native/Dockerfile +++ b/docker/native/Dockerfile @@ -64,8 +64,8 @@ RUN apk update ; \ COPY --chown=appuser:root --from=build-native-image /app/kafka/kafka.Kafka /opt/kafka/ COPY --chown=appuser:root --from=build-native-image /app/kafka/config/kraft/server.properties /etc/kafka/docker/ -COPY --chown=appuser:root --from=build-native-image /app/kafka/config/log4j2.properties /etc/kafka/docker/ -COPY --chown=appuser:root --from=build-native-image /app/kafka/config/tools-log4j2.properties /etc/kafka/docker/ +COPY --chown=appuser:root --from=build-native-image /app/kafka/config/log4j.properties /etc/kafka/docker/ +COPY --chown=appuser:root --from=build-native-image /app/kafka/config/tools-log4j.properties /etc/kafka/docker/ COPY --chown=appuser:root resources/common-scripts /etc/kafka/docker/ COPY --chown=appuser:root launch /etc/kafka/docker/ diff --git a/docker/native/launch b/docker/native/launch index feb42ce646711..93de7aa9483b4 100755 --- a/docker/native/launch +++ b/docker/native/launch @@ -42,11 +42,11 @@ result=$(/opt/kafka/kafka.Kafka setup \ --default-configs-dir /etc/kafka/docker \ --mounted-configs-dir /mnt/shared/config \ --final-configs-dir /opt/kafka/config \ - -Dlog4j.configuration=file:/opt/kafka/config/tools-log4j2.properties 2>&1) || \ + -Dlog4j.configuration=file:/opt/kafka/config/tools-log4j.properties 2>&1) || \ echo $result | grep -i "already formatted" || \ { echo $result && (exit 1) } echo "WARNING: THIS IS AN EXPERIMENTAL DOCKER IMAGE RECOMMENDED FOR LOCAL TESTING AND DEVELOPMENT PURPOSES." -KAFKA_LOG4J_CMD_OPTS="-Dkafka.logs.dir=/opt/kafka/logs/ -Dlog4j.configuration=file:/opt/kafka/config/log4j2.properties" +KAFKA_LOG4J_CMD_OPTS="-Dkafka.logs.dir=/opt/kafka/logs/ -Dlog4j.configuration=file:/opt/kafka/config/log4j.properties" exec /opt/kafka/kafka.Kafka start --config /opt/kafka/config/server.properties $KAFKA_LOG4J_CMD_OPTS $KAFKA_JMX_OPTS ${KAFKA_OPTS-} diff --git a/docs/ops.html b/docs/ops.html index 12b5b43873581..e283cbc456f57 100644 --- a/docs/ops.html +++ b/docs/ops.html @@ -3957,7 +3957,7 @@

Preparing for migration

It is recommended to enable TRACE level logging for the migration components while the migration is active. This can - be done by adding the following log4j configuration to each KRaft controller's "log4j2.properties" file. + be done by adding the following log4j configuration to each KRaft controller's "log4j.properties" file.

log4j.logger.org.apache.kafka.metadata.migration=TRACE
diff --git a/docs/streams/tutorial.html b/docs/streams/tutorial.html index 6915be76f2495..933cd11f12be2 100644 --- a/docs/streams/tutorial.html +++ b/docs/streams/tutorial.html @@ -66,7 +66,7 @@

| |-- Pipe.java | |-- WordCount.java |-- resources - |-- log4j2.properties + |-- log4j.properties

The pom.xml file included in the project already has the Streams dependency defined. diff --git a/gradle/dependencies.gradle b/gradle/dependencies.gradle index 24c23f5017235..ed367af5eab5f 100644 --- a/gradle/dependencies.gradle +++ b/gradle/dependencies.gradle @@ -264,6 +264,7 @@ libs += [ zstd: "com.github.luben:zstd-jni:$versions.zstd", httpclient: "org.apache.httpcomponents:httpclient:$versions.httpclient", hdrHistogram: "org.hdrhistogram:HdrHistogram:$versions.hdrHistogram", + log4j1Bridge2Api: "org.apache.logging.log4j:log4j-1.2-api:$versions.log4j2", log4j2Api: "org.apache.logging.log4j:log4j-api:$versions.log4j2", log4j2Core: "org.apache.logging.log4j:log4j-core:$versions.log4j2", log4j2CoreTest: "org.apache.logging.log4j:log4j-core-test:$versions.log4j2", diff --git a/group-coordinator/src/test/resources/log4j2.properties b/group-coordinator/src/test/resources/log4j2.properties index 01ee02c2986ae..2d5b98a58e64f 100644 --- a/group-coordinator/src/test/resources/log4j2.properties +++ b/group-coordinator/src/test/resources/log4j2.properties @@ -12,28 +12,22 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -name=GroupCoordinatorTestConfig -appenders=stdout +name=TestConfig +appenders=console -appender.stdout.type=Console -appender.stdout.name=STDOUT -appender.stdout.layout.type=PatternLayout -appender.stdout.layout.pattern=[%d] %p %m (%c:%L)%n +appender.console.type=Console +appender.console.name=STDOUT +appender.console.layout.type=PatternLayout +appender.console.layout.pattern=[%d] %p %m (%c:%L)%n rootLogger.level=DEBUG -rootLogger.appenderRefs=stdout -rootLogger.appenderRef.stdout.ref=STDOUT +rootLogger.appenderRefs=console +rootLogger.appenderRef.console.ref=STDOUT -loggers=org.apache.kafka,org.apache.zookeeper +loggers=kafka,zookeeper -logger.org.apache.kafka.name=org.apache.kafka -logger.org.apache.kafka.level=DEBUG -logger.org.apache.kafka.additivity=false -logger.org.apache.kafka.appenderRefs=stdout -logger.org.apache.kafka.appenderRef.stdout.ref=STDOUT +logger.kafka.name=org.apache.kafka +logger.kafka.level=DEBUG -logger.org.apache.zookeeper.name=org.apache.zookeeper -logger.org.apache.zookeeper.level=WARN -logger.org.apache.zookeeper.additivity=false -logger.org.apache.zookeeper.appenderRefs=stdout -logger.org.apache.zookeeper.appenderRef.stdout.ref=STDOUT +logger.zookeeper.name=org.apache.zookeeper +logger.zookeeper.level=WARN diff --git a/metadata/src/test/resources/log4j2.properties b/metadata/src/test/resources/log4j2.properties index 39356d69dfd49..2d5b98a58e64f 100644 --- a/metadata/src/test/resources/log4j2.properties +++ b/metadata/src/test/resources/log4j2.properties @@ -12,22 +12,22 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -name=MetadatTestConfig -appenders=stdout +name=TestConfig +appenders=console -appender.stdout.type=Console -appender.stdout.name=STDOUT -appender.stdout.layout.type=PatternLayout -appender.stdout.layout.pattern=[%d] %p %m (%c:%L)%n +appender.console.type=Console +appender.console.name=STDOUT +appender.console.layout.type=PatternLayout +appender.console.layout.pattern=[%d] %p %m (%c:%L)%n rootLogger.level=DEBUG -rootLogger.appenderRefs=stdout -rootLogger.appenderRef.stdout.ref=STDOUT +rootLogger.appenderRefs=console +rootLogger.appenderRef.console.ref=STDOUT -loggers=org.apache.kafka,org.apache.zookeeper +loggers=kafka,zookeeper -logger.org.apache.kafka.name=org.apache.kafka -logger.org.apache.kafka.level=DEBUG +logger.kafka.name=org.apache.kafka +logger.kafka.level=DEBUG -logger.org.apache.zookeeper.name=org.apache.zookeeper -logger.org.apache.zookeeper.level=WARN +logger.zookeeper.name=org.apache.zookeeper +logger.zookeeper.level=WARN diff --git a/raft/bin/test-kraft-server-start.sh b/raft/bin/test-kraft-server-start.sh index 6efac054acaf5..2eefdd4912a6e 100755 --- a/raft/bin/test-kraft-server-start.sh +++ b/raft/bin/test-kraft-server-start.sh @@ -17,7 +17,8 @@ base_dir=$(dirname $0) if [ "x$KAFKA_LOG4J_OPTS" = "x" ]; then - export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/kraft-log4j2.properties" + echo "DEPRECATED: using log4j 1.x configuration. To use log4j 2.x configuration, run with: 'export KAFKA_LOG4J_OPTS=\"-Dlog4j.configurationFile=file:$base_dir/../config/kraft-log4j2.properties\"'" + export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/kraft-log4j.properties" fi if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then diff --git a/tools/src/test/resources/log4j2.properties b/raft/config/kraft-log4j.properties similarity index 61% rename from tools/src/test/resources/log4j2.properties rename to raft/config/kraft-log4j.properties index 00bfaa223cbb4..08c13eb17f818 100644 --- a/tools/src/test/resources/log4j2.properties +++ b/raft/config/kraft-log4j.properties @@ -12,22 +12,13 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -name=ToolsTestConfig -appenders=stdout -appender.stdout.type=Console -appender.stdout.name=STDOUT -appender.stdout.layout.type=PatternLayout -appender.stdout.layout.pattern=[%d] %p %m (%c:%L)%n +log4j.rootLogger=INFO, stderr -rootLogger.level=INFO -rootLogger.appenderRefs=stdout -rootLogger.appenderRef.stdout.ref=STDOUT +log4j.appender.stderr=org.apache.log4j.ConsoleAppender +log4j.appender.stderr.layout=org.apache.log4j.PatternLayout +log4j.appender.stderr.layout.ConversionPattern=[%d] %p %m (%c)%n +log4j.appender.stderr.Target=System.err -loggers=org.apache.kafka,org.eclipse.jetty - -logger.org.apache.kafka.name=org.apache.kafka -logger.org.apache.kafka.level=INFO - -logger.org.eclipse.jetty.name=org.eclipse.jetty -logger.org.eclipse.jetty.level=INFO +log4j.logger.org.apache.kafka.raft=INFO +log4j.logger.org.apache.kafka.snapshot=INFO \ No newline at end of file diff --git a/raft/config/kraft-log4j2.properties b/raft/config/kraft-log4j2.properties index 76d756ef39ff0..4da4ae4487695 100644 --- a/raft/config/kraft-log4j2.properties +++ b/raft/config/kraft-log4j2.properties @@ -14,22 +14,22 @@ # limitations under the License. name=RaftConfig -appenders=stderr +appenders=console -appender.stderr.type=Console -appender.stderr.name=STDERR -appender.stderr.layout.type=PatternLayout -appender.stderr.layout.pattern=[%d] %p %m (%c)%n -appender.stderr.target=SYSTEM_ERR +appender.console.type=Console +appender.console.name=STDERR +appender.console.layout.type=PatternLayout +appender.console.layout.pattern=[%d] %p %m (%c)%n +appender.console.target=SYSTEM_ERR rootLogger.level=INFO -rootLogger.appenderRefs=stderr -rootLogger.appenderRef.stderr.ref=STDERR +rootLogger.appenderRefs=console +rootLogger.appenderRef.console.ref=STDERR -loggers=org.apache.kafka.raft,org.apache.kafka.snapshot +loggers=raft,snapshot -logger.org.apache.kafka.raft.name=org.apache.kafka.raft -logger.org.apache.kafka.raft.level=INFO +logger.raft.name=org.apache.kafka.raft +logger.raft.level=INFO -logger.org.apache.kafka.snapshot.name=org.apache.kafka.snapshot -logger.org.apache.kafka.snapshot.level=INFO +logger.snapshot.name=org.apache.kafka.snapshot +logger.snapshot.level=INFO diff --git a/raft/src/test/resources/log4j2.properties b/raft/src/test/resources/log4j2.properties index 94d0feb0087ef..ed4c9a98da666 100644 --- a/raft/src/test/resources/log4j2.properties +++ b/raft/src/test/resources/log4j2.properties @@ -12,22 +12,22 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -name=RaftTestConfig -appenders=stdout +name=TestConfig +appenders=console -appender.stdout.type=Console -appender.stdout.name=STDOUT -appender.stdout.layout.type=PatternLayout -appender.stdout.layout.pattern=[%d] %p %m (%c:%L)%n +appender.console.type=Console +appender.console.name=STDOUT +appender.console.layout.type=PatternLayout +appender.console.layout.pattern=[%d] %p %m (%c:%L)%n rootLogger.level=OFF -rootLogger.appenderRefs=stdout -rootLogger.appenderRef.stdout.ref=STDOUT +rootLogger.appenderRefs=console +rootLogger.appenderRef.console.ref=STDOUT -loggers=org.apache.kafka.raft,org.apache.kafka.snapshot +loggers=raft,snapshot -logger.org.apache.kafka.raft.name=org.apache.kafka.raft -logger.org.apache.kafka.raft.level=ERROR +logger.raft.name=org.apache.kafka.raft +logger.raft.level=ERROR -logger.org.apache.kafka.snapshot.name=org.apache.kafka.snapshot -logger.org.apache.kafka.snapshot.level=ERROR +logger.snapshot.name=org.apache.kafka.snapshot +logger.snapshot.level=ERROR diff --git a/server-common/src/test/resources/test/log4j2.properties b/server-common/src/test/resources/test/log4j2.properties index d889660be3c35..a12aced7a9c64 100644 --- a/server-common/src/test/resources/test/log4j2.properties +++ b/server-common/src/test/resources/test/log4j2.properties @@ -12,19 +12,19 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -name=ServerCommonTestConfig -appenders=stdout +name=TestConfig +appenders=console -appender.stdout.type=Console -appender.stdout.name=STDOUT -appender.stdout.layout.type=PatternLayout -appender.stdout.layout.pattern=[%d] %p %m (%c:%L)%n +appender.console.type=Console +appender.console.name=STDOUT +appender.console.layout.type=PatternLayout +appender.console.layout.pattern=[%d] %p %m (%c:%L)%n rootLogger.level=INFO -rootLogger.appenderRefs=stdout -rootLogger.appenderRef.stdout.ref=STDOUT +rootLogger.appenderRefs=console +rootLogger.appenderRef.console.ref=STDOUT -loggers=org.apache.kafka +loggers=kafka -logger.org.apache.kafka.name=org.apache.kafka -logger.org.apache.kafka.level=INFO +logger.kafka.name=org.apache.kafka +logger.kafka.level=INFO diff --git a/shell/src/test/resources/log4j2.properties b/shell/src/test/resources/log4j2.properties index 85f7978580e01..2b0b46645f1cd 100644 --- a/shell/src/test/resources/log4j2.properties +++ b/shell/src/test/resources/log4j2.properties @@ -12,14 +12,14 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -name=ShellTestConfig -appenders=stdout +name=TestConfig +appenders=console -appender.stdout.type=Console -appender.stdout.name=STDOUT -appender.stdout.layout.type=PatternLayout -appender.stdout.layout.pattern=[%d] %p %m (%c:%L)%n +appender.console.type=Console +appender.console.name=STDOUT +appender.console.layout.type=PatternLayout +appender.console.layout.pattern=[%d] %p %m (%c:%L)%n rootLogger.level=DEBUG -rootLogger.appenderRefs=stdout -rootLogger.appenderRef.stdout.ref=STDOUT +rootLogger.appenderRefs=console +rootLogger.appenderRef.console.ref=STDOUT diff --git a/storage/src/test/resources/log4j2.properties b/storage/src/test/resources/log4j2.properties index 3bc0f99475be7..91db144ec94e9 100644 --- a/storage/src/test/resources/log4j2.properties +++ b/storage/src/test/resources/log4j2.properties @@ -12,42 +12,42 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -name=StorageTestConfig -appenders=stdout,fileAppender +name=TestConfig +appenders=console,file -appender.stdout.type=Console -appender.stdout.name=STDOUT -appender.stdout.layout.type=PatternLayout -appender.stdout.layout.pattern=[%d] %p %m (%c:%L)%n +appender.console.type=Console +appender.console.name=STDOUT +appender.console.layout.type=PatternLayout +appender.console.layout.pattern=[%d] %p %m (%c:%L)%n -appender.fileAppender.type=RollingFile -appender.fileAppender.name=FileAppender -appender.fileAppender.fileName=storage.log -appender.fileAppender.layout.type=PatternLayout -appender.fileAppender.layout.pattern=%d [%t] %-5p %c %x - %m%n +appender.file.type=RollingFile +appender.file.name=FileAppender +appender.file.fileName=storage.log +appender.file.layout.type=PatternLayout +appender.file.layout.pattern=%d [%t] %-5p %c %x - %m%n -appender.fileAppender.filePattern=storage-%d{yyyy-MM-dd}.log -appender.fileAppender.policies.type=Policies -appender.fileAppender.policies.time.type=TimeBasedTriggeringPolicy -appender.fileAppender.policies.time.interval=1 +appender.file.filePattern=storage-%d{yyyy-MM-dd}.log +appender.file.policies.type=Policies +appender.file.policies.time.type=TimeBasedTriggeringPolicy +appender.file.policies.time.interval=1 rootLogger.level=OFF -rootLogger.appenderRefs=stdout -rootLogger.appenderRef.stdout.ref=STDOUT +rootLogger.appenderRefs=console +rootLogger.appenderRef.console.ref=STDOUT -loggers=org.apache.kafka.server.log.remote.storage,org.apache.kafka.server.log.remote.metadata.storage,kafka.log.remote +loggers=remoteStorage,remoteMetadataStorage,remote -logger.org.apache.kafka.server.log.remote.storage.name=org.apache.kafka.server.log.remote.storage -logger.org.apache.kafka.server.log.remote.storage.level=INFO -logger.org.apache.kafka.server.log.remote.storage.appenderRefs=fileAppender -logger.org.apache.kafka.server.log.remote.storage.appenderRef.fileAppender.ref=FileAppender +logger.remoteStorage.name=org.apache.kafka.server.log.remote.storage +logger.remoteStorage.level=INFO +logger.remoteStorage.appenderRefs=file +logger.remoteStorage.appenderRef.fileAppender.ref=FileAppender -logger.org.apache.kafka.server.log.remote.metadata.storage.name=org.apache.kafka.server.log.remote.metadata.storage -logger.org.apache.kafka.server.log.remote.metadata.storage.level=INFO -logger.org.apache.kafka.server.log.remote.metadata.storage.appenderRefs=fileAppender -logger.org.apache.kafka.server.log.remote.metadata.storage.appenderRef.fileAppender.ref=FileAppender +logger.remoteMetadataStorage.name=org.apache.kafka.server.log.remote.metadata.storage +logger.remoteMetadataStorage.level=INFO +logger.remoteMetadataStorage.appenderRefs=file +logger.remoteMetadataStorage.appenderRef.fileAppender.ref=FileAppender -logger.kafka.log.remote.name=kafka.log.remote -logger.kafka.log.remote.level=INFO -logger.kafka.log.remote.appenderRefs=fileAppender -logger.kafka.log.remote.appenderRef.fileAppender.ref=FileAppender +logger.remote.name=kafka.log.remote +logger.remote.level=INFO +logger.remote.appenderRefs=file +logger.remote.appenderRef.fileAppender.ref=FileAppender diff --git a/streams/src/test/java/org/apache/kafka/streams/state/internals/AbstractKeyValueStoreTest.java b/streams/src/test/java/org/apache/kafka/streams/state/internals/AbstractKeyValueStoreTest.java index 14e0484158b55..1e8c96f6f5299 100644 --- a/streams/src/test/java/org/apache/kafka/streams/state/internals/AbstractKeyValueStoreTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/state/internals/AbstractKeyValueStoreTest.java @@ -668,5 +668,5 @@ public void prefixScanShouldNotThrowConcurrentModificationException() { iter.next(); } } - } + } } diff --git a/streams/src/test/java/org/apache/kafka/streams/state/internals/AbstractSessionBytesStoreTest.java b/streams/src/test/java/org/apache/kafka/streams/state/internals/AbstractSessionBytesStoreTest.java index 8a0ad6d9a00af..45941ecb3cfc4 100644 --- a/streams/src/test/java/org/apache/kafka/streams/state/internals/AbstractSessionBytesStoreTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/state/internals/AbstractSessionBytesStoreTest.java @@ -921,7 +921,7 @@ public void shouldNotThrowInvalidRangeExceptionWithNegativeFromKey() { final String keyTo = Serdes.String().deserializer() .deserialize("", Serdes.Integer().serializer().serialize("", 1)); - try (final LogCaptureAppender appender = LogCaptureAppender.createAndRegister(AbstractSessionBytesStoreTest.class); + try (final LogCaptureAppender appender = LogCaptureAppender.createAndRegister(); final KeyValueIterator, Long> iterator = sessionStore.findSessions(keyFrom, keyTo, 0L, 10L)) { assertFalse(iterator.hasNext()); diff --git a/streams/src/test/java/org/apache/kafka/streams/state/internals/AbstractWindowBytesStoreTest.java b/streams/src/test/java/org/apache/kafka/streams/state/internals/AbstractWindowBytesStoreTest.java index af96be1bc13f0..76e3306922ebe 100644 --- a/streams/src/test/java/org/apache/kafka/streams/state/internals/AbstractWindowBytesStoreTest.java +++ b/streams/src/test/java/org/apache/kafka/streams/state/internals/AbstractWindowBytesStoreTest.java @@ -971,7 +971,7 @@ public void shouldReturnSameResultsForSingleKeyFetchAndEqualKeyRangeFetch() { @Test public void shouldNotThrowInvalidRangeExceptionWithNegativeFromKey() { - try (final LogCaptureAppender appender = LogCaptureAppender.createAndRegister(AbstractWindowBytesStoreTest.class); + try (final LogCaptureAppender appender = LogCaptureAppender.createAndRegister(); final KeyValueIterator, String> iterator = windowStore.fetch(-1, 1, 0L, 10L)) { assertFalse(iterator.hasNext()); diff --git a/streams/src/test/resources/log4j2.properties b/streams/src/test/resources/log4j2.properties index 81f5117252aa3..c2d42f2a96bae 100644 --- a/streams/src/test/resources/log4j2.properties +++ b/streams/src/test/resources/log4j2.properties @@ -22,48 +22,43 @@ appender.console.layout.type=PatternLayout appender.console.layout.pattern=[%d] %p %m (%c:%L)%n rootLogger.level=INFO -rootLogger.appenderRefs=stdout +rootLogger.appenderRefs=console rootLogger.appenderRef.stdout.ref=STDOUT -#appender.stdout.type=Console -#appender.stdout.name=stdout -#appender.stdout.layout.type=PatternLayout -#appender.stdout.layout.pattern=[%d] %p %m (%c:%L)%n - -loggers=kafka,org.apache.zookeeper,org.apache.kafka.clients.producer.ProducerConfig,org.apache.kafka.clients.consumer.ConsumerConfig,org.apache.kafka.clients.admin.AdminClientConfig,org.apache.kafka.clients,org.apache.kafka.streams.StreamsConfig,org.apache.kafka.streams,org.apache.kafka,state.change.logger +loggers=kafka,stateChangeLogger,apacheKafka,zookeeper,kafkaClients,consumer,producer,streams,producerConfig,consumerConfig,adminClientConfig,streamsConfig logger.kafka.name=kafka logger.kafka.level=ERROR -logger.state.change.logger.name=state.change.logger -logger.state.change.logger.level=ERROR +logger.stateChangeLogger.name=state.change.logger +logger.stateChangeLogger.level=ERROR -logger.org.apache.kafka.name=org.apache.kafka -logger.org.apache.kafka.level=ERROR +logger.apacheKafka.name=org.apache.kafka +logger.apacheKafka.level=ERROR -logger.org.apache.zookeeper.name=org.apache.zookeeper -logger.org.apache.zookeeper.level=ERROR +logger.zookeeper.name=org.apache.zookeeper +logger.zookeeper.level=ERROR -logger.org.apache.kafka.clients.name=org.apache.kafka.clients -logger.org.apache.kafka.clients.level=ERROR +logger.kafkaClients.name=org.apache.kafka.clients +logger.kafkaClients.level=ERROR -logger.org.apache.kafka.clients.consumer.name=org.apache.kafka.clients.consumer -logger.org.apache.kafka.clients.consumer.level=INFO +logger.consumer.name=org.apache.kafka.clients.consumer +logger.consumer.level=INFO -logger.org.apache.kafka.clients.producer.name=org.apache.kafka.clients.producer -logger.org.apache.kafka.clients.producer.level=INFO +logger.producer.name=org.apache.kafka.clients.producer +logger.producer.level=INFO -logger.org.apache.kafka.streams.name=org.apache.kafka.streams -logger.org.apache.kafka.streams.level=INFO +logger.streams.name=org.apache.kafka.streams +logger.streams.level=INFO -logger.org.apache.kafka.clients.producer.ProducerConfig.name=org.apache.kafka.clients.producer.ProducerConfig -logger.org.apache.kafka.clients.producer.ProducerConfig.level=ERROR +logger.producerConfig.name=org.apache.kafka.clients.producer.ProducerConfig +logger.producerConfig.level=ERROR -logger.org.apache.kafka.clients.consumer.ConsumerConfig.name=org.apache.kafka.clients.consumer.ConsumerConfig -logger.org.apache.kafka.clients.consumer.ConsumerConfig.level=ERROR +logger.consumerConfig.name=org.apache.kafka.clients.consumer.ConsumerConfig +logger.consumerConfig.level=ERROR -logger.org.apache.kafka.clients.admin.AdminClientConfig.name=org.apache.kafka.clients.admin.AdminClientConfig -logger.org.apache.kafka.clients.admin.AdminClientConfig.level=ERROR +logger.adminClientConfig.name=org.apache.kafka.clients.admin.AdminClientConfig +logger.adminClientConfig.level=ERROR -logger.org.apache.kafka.streams.StreamsConfig.name=org.apache.kafka.streams.StreamsConfig -logger.org.apache.kafka.streams.StreamsConfig.level=ERROR +logger.streamsConfig.name=org.apache.kafka.streams.StreamsConfig +logger.streamsConfig.level=ERROR diff --git a/streams/streams-scala/src/test/resources/log4j2.properties b/streams/streams-scala/src/test/resources/log4j2.properties index 48c9aae534006..9284c02f92f8b 100644 --- a/streams/streams-scala/src/test/resources/log4j2.properties +++ b/streams/streams-scala/src/test/resources/log4j2.properties @@ -17,27 +17,25 @@ # limitations under the License. name=StreamsScalaTestConfig -appenders=rolling +appenders=console,rolling -rootLogger.level=INFO -rootLogger.appenderRefs=R -rootLogger.appenderRef.R.ref=R +appender.console.type=Console +appender.console.name=A1 -appender.A1.type=Console -appender.A1.name=A1 -appender.A1.layout.type=PatternLayout -appender.A1.layout.pattern=%-4r [%t] %-5p %c %x - %m%n +appender.rolling.type=RollingFile +appender.rolling.name=R +appender.rolling.fileName=logs/kafka-streams-scala.log +appender.rolling.filePattern=logs/kafka-streams-scala.log.%i +appender.rolling.layout.type=PatternLayout +appender.rolling.layout.pattern=%-4r [%t] %-5p %c %x - %m%n -appender.R.type=RollingFile -appender.R.name=R -appender.R.fileName=logs/kafka-streams-scala.log -appender.R.filePattern=logs/kafka-streams-scala.log.%i -appender.R.layout.type=PatternLayout -appender.R.layout.pattern=%-4r [%t] %-5p %c %x - %m%n +appender.rolling.policies.type=Policies +appender.rolling.policies.size.type=SizeBasedTriggeringPolicy +appender.rolling.policies.size.size=100KB -appender.R.policies.type=Policies -appender.R.policies.size.type=SizeBasedTriggeringPolicy -appender.R.policies.size.size=100KB +appender.rolling.strategy.type=DefaultRolloverStrategy +appender.rolling.strategy.max=1 -appender.R.strategy.type=DefaultRolloverStrategy -appender.R.strategy.max=1 +rootLogger.level=INFO +rootLogger.appenderRefs=R +rootLogger.appenderRef.R.ref=R diff --git a/streams/test-utils/src/test/resources/log4j2.properties b/streams/test-utils/src/test/resources/log4j2.properties index 91ffcef88a762..51f3728b70e4a 100644 --- a/streams/test-utils/src/test/resources/log4j2.properties +++ b/streams/test-utils/src/test/resources/log4j2.properties @@ -13,18 +13,18 @@ # See the License for the specific language governing permissions and # limitations under the License. name=StreamsTestUtilsTestConfig -appenders=stdout +appenders=console -appender.stdout.type=Console -appender.stdout.name=STDOUT -appender.stdout.layout.type=PatternLayout -appender.stdout.layout.pattern=[%d] %p %m (%c:%L)%n +appender.console.type=Console +appender.console.name=STDOUT +appender.console.layout.type=PatternLayout +appender.console.layout.pattern=[%d] %p %m (%c:%L)%n rootLogger.level=INFO -rootLogger.appenderRefs=stdout -rootLogger.appenderRef.stdout.ref=STDOUT +rootLogger.appenderRefs=console +rootLogger.appenderRef.console.ref=STDOUT -loggers=org.apache.kafka +loggers=kafka -logger.org.apache.kafka.name=org.apache.kafka -logger.org.apache.kafka.level=INFO +logger.kafka.name=org.apache.kafka +logger.kafka.level=INFO diff --git a/tests/kafkatest/services/connect.py b/tests/kafkatest/services/connect.py index 4780b5e714f1b..c84a3ec43c31e 100644 --- a/tests/kafkatest/services/connect.py +++ b/tests/kafkatest/services/connect.py @@ -38,7 +38,7 @@ class ConnectServiceBase(KafkaPathResolverMixin, Service): LOG_FILE = os.path.join(PERSISTENT_ROOT, "connect.log") STDOUT_FILE = os.path.join(PERSISTENT_ROOT, "connect.stdout") STDERR_FILE = os.path.join(PERSISTENT_ROOT, "connect.stderr") - LOG4J_CONFIG_FILE = os.path.join(PERSISTENT_ROOT, "connect-log4j2.properties") + LOG4J_CONFIG_FILE = os.path.join(PERSISTENT_ROOT, "connect-log4j.properties") PID_FILE = os.path.join(PERSISTENT_ROOT, "connect.pid") EXTERNAL_CONFIGS_FILE = os.path.join(PERSISTENT_ROOT, "connect-external-configs.properties") CONNECT_REST_PORT = 8083 @@ -364,7 +364,7 @@ def start_node(self, node, **kwargs): if self.external_config_template_func: node.account.create_file(self.EXTERNAL_CONFIGS_FILE, self.external_config_template_func(node)) node.account.create_file(self.CONFIG_FILE, self.config_template_func(node)) - node.account.create_file(self.LOG4J_CONFIG_FILE, self.render('connect_log4j2.properties', log_file=self.LOG_FILE)) + node.account.create_file(self.LOG4J_CONFIG_FILE, self.render('connect_log4j.properties', log_file=self.LOG_FILE)) remote_connector_configs = [] for idx, template in enumerate(self.connector_config_templates): target_file = os.path.join(self.PERSISTENT_ROOT, "connect-connector-" + str(idx) + ".properties") @@ -421,7 +421,7 @@ def start_node(self, node, **kwargs): if self.external_config_template_func: node.account.create_file(self.EXTERNAL_CONFIGS_FILE, self.external_config_template_func(node)) node.account.create_file(self.CONFIG_FILE, self.config_template_func(node)) - node.account.create_file(self.LOG4J_CONFIG_FILE, self.render('connect_log4j2.properties', log_file=self.LOG_FILE)) + node.account.create_file(self.LOG4J_CONFIG_FILE, self.render('connect_log4j.properties', log_file=self.LOG_FILE)) if self.connector_config_templates: raise DucktapeError("Config files are not valid in distributed mode, submit connectors via the REST API") diff --git a/tests/kafkatest/services/console_consumer.py b/tests/kafkatest/services/console_consumer.py index 4f11f370b3cd2..fb87f20df1993 100644 --- a/tests/kafkatest/services/console_consumer.py +++ b/tests/kafkatest/services/console_consumer.py @@ -36,7 +36,7 @@ class ConsoleConsumer(KafkaPathResolverMixin, JmxMixin, BackgroundThreadService) STDERR_CAPTURE = os.path.join(PERSISTENT_ROOT, "console_consumer.stderr") LOG_DIR = os.path.join(PERSISTENT_ROOT, "logs") LOG_FILE = os.path.join(LOG_DIR, "console_consumer.log") - LOG4J_CONFIG = os.path.join(PERSISTENT_ROOT, "tools-log4j2.properties") + LOG4J_CONFIG = os.path.join(PERSISTENT_ROOT, "tools-log4j.properties") CONFIG_FILE = os.path.join(PERSISTENT_ROOT, "console_consumer.properties") JMX_TOOL_LOG = os.path.join(PERSISTENT_ROOT, "jmx_tool.log") JMX_TOOL_ERROR_LOG = os.path.join(PERSISTENT_ROOT, "jmx_tool.err.log") @@ -253,7 +253,7 @@ def _worker(self, idx, node): node.account.create_file(ConsoleConsumer.CONFIG_FILE, prop_file) # Create and upload log properties - log_config = self.render('tools_log4j2.properties', log_file=ConsoleConsumer.LOG_FILE) + log_config = self.render('tools_log4j.properties', log_file=ConsoleConsumer.LOG_FILE) node.account.create_file(ConsoleConsumer.LOG4J_CONFIG, log_config) # Run and capture output diff --git a/tests/kafkatest/services/kafka/kafka.py b/tests/kafkatest/services/kafka/kafka.py index 1219b8e11b8e0..7d6f42d27b7ca 100644 --- a/tests/kafkatest/services/kafka/kafka.py +++ b/tests/kafkatest/services/kafka/kafka.py @@ -145,7 +145,7 @@ class for details. """ PERSISTENT_ROOT = "/mnt/kafka" STDOUT_STDERR_CAPTURE = os.path.join(PERSISTENT_ROOT, "server-start-stdout-stderr.log") - LOG4J_CONFIG = os.path.join(PERSISTENT_ROOT, "kafka-log4j2.properties") + LOG4J_CONFIG = os.path.join(PERSISTENT_ROOT, "kafka-log4j.properties") # Logs such as controller.log, server.log, etc all go here OPERATIONAL_LOG_DIR = os.path.join(PERSISTENT_ROOT, "kafka-operational-logs") OPERATIONAL_LOG_INFO_DIR = os.path.join(OPERATIONAL_LOG_DIR, "info") @@ -896,7 +896,7 @@ def start_node(self, node, timeout_sec=60, **kwargs): self.logger.info("kafka.properties:") self.logger.info(prop_file) node.account.create_file(KafkaService.CONFIG_FILE, prop_file) - node.account.create_file(self.LOG4J_CONFIG, self.render('log4j2.properties', log_dir=KafkaService.OPERATIONAL_LOG_DIR)) + node.account.create_file(self.LOG4J_CONFIG, self.render('log4j.properties', log_dir=KafkaService.OPERATIONAL_LOG_DIR)) if self.quorum_info.using_kraft: # format log directories if necessary diff --git a/tests/kafkatest/services/kafka/templates/log4j.properties b/tests/kafkatest/services/kafka/templates/log4j.properties new file mode 100644 index 0000000000000..04c9bd5d6ab6f --- /dev/null +++ b/tests/kafkatest/services/kafka/templates/log4j.properties @@ -0,0 +1,141 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +log4j.rootLogger={{ log_level|default("DEBUG") }}, stdout + +log4j.appender.stdout=org.apache.log4j.ConsoleAppender +log4j.appender.stdout.layout=org.apache.log4j.PatternLayout +log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c)%n + +# INFO level appenders +log4j.appender.kafkaInfoAppender=org.apache.log4j.DailyRollingFileAppender +log4j.appender.kafkaInfoAppender.DatePattern='.'yyyy-MM-dd-HH +log4j.appender.kafkaInfoAppender.File={{ log_dir }}/info/server.log +log4j.appender.kafkaInfoAppender.layout=org.apache.log4j.PatternLayout +log4j.appender.kafkaInfoAppender.layout.ConversionPattern=[%d] %p %m (%c)%n +log4j.appender.kafkaInfoAppender.Threshold=INFO + +log4j.appender.stateChangeInfoAppender=org.apache.log4j.DailyRollingFileAppender +log4j.appender.stateChangeInfoAppender.DatePattern='.'yyyy-MM-dd-HH +log4j.appender.stateChangeInfoAppender.File={{ log_dir }}/info/state-change.log +log4j.appender.stateChangeInfoAppender.layout=org.apache.log4j.PatternLayout +log4j.appender.stateChangeInfoAppender.layout.ConversionPattern=[%d] %p %m (%c)%n +log4j.appender.stateChangeInfoAppender.Threshold=INFO + +log4j.appender.requestInfoAppender=org.apache.log4j.DailyRollingFileAppender +log4j.appender.requestInfoAppender.DatePattern='.'yyyy-MM-dd-HH +log4j.appender.requestInfoAppender.File={{ log_dir }}/info/kafka-request.log +log4j.appender.requestInfoAppender.layout=org.apache.log4j.PatternLayout +log4j.appender.requestInfoAppender.layout.ConversionPattern=[%d] %p %m (%c)%n +log4j.appender.requestInfoAppender.Threshold=INFO + +log4j.appender.cleanerInfoAppender=org.apache.log4j.DailyRollingFileAppender +log4j.appender.cleanerInfoAppender.DatePattern='.'yyyy-MM-dd-HH +log4j.appender.cleanerInfoAppender.File={{ log_dir }}/info/log-cleaner.log +log4j.appender.cleanerInfoAppender.layout=org.apache.log4j.PatternLayout +log4j.appender.cleanerInfoAppender.layout.ConversionPattern=[%d] %p %m (%c)%n +log4j.appender.cleanerInfoAppender.Threshold=INFO + +log4j.appender.controllerInfoAppender=org.apache.log4j.DailyRollingFileAppender +log4j.appender.controllerInfoAppender.DatePattern='.'yyyy-MM-dd-HH +log4j.appender.controllerInfoAppender.File={{ log_dir }}/info/controller.log +log4j.appender.controllerInfoAppender.layout=org.apache.log4j.PatternLayout +log4j.appender.controllerInfoAppender.layout.ConversionPattern=[%d] %p %m (%c)%n +log4j.appender.controllerInfoAppender.Threshold=INFO + +log4j.appender.authorizerInfoAppender=org.apache.log4j.DailyRollingFileAppender +log4j.appender.authorizerInfoAppender.DatePattern='.'yyyy-MM-dd-HH +log4j.appender.authorizerInfoAppender.File={{ log_dir }}/info/kafka-authorizer.log +log4j.appender.authorizerInfoAppender.layout=org.apache.log4j.PatternLayout +log4j.appender.authorizerInfoAppender.layout.ConversionPattern=[%d] %p %m (%c)%n +log4j.appender.authorizerInfoAppender.Threshold=INFO + +# DEBUG level appenders +log4j.appender.kafkaDebugAppender=org.apache.log4j.DailyRollingFileAppender +log4j.appender.kafkaDebugAppender.DatePattern='.'yyyy-MM-dd-HH +log4j.appender.kafkaDebugAppender.File={{ log_dir }}/debug/server.log +log4j.appender.kafkaDebugAppender.layout=org.apache.log4j.PatternLayout +log4j.appender.kafkaDebugAppender.layout.ConversionPattern=[%d] %p %m (%c)%n +log4j.appender.kafkaDebugAppender.Threshold=DEBUG + +log4j.appender.stateChangeDebugAppender=org.apache.log4j.DailyRollingFileAppender +log4j.appender.stateChangeDebugAppender.DatePattern='.'yyyy-MM-dd-HH +log4j.appender.stateChangeDebugAppender.File={{ log_dir }}/debug/state-change.log +log4j.appender.stateChangeDebugAppender.layout=org.apache.log4j.PatternLayout +log4j.appender.stateChangeDebugAppender.layout.ConversionPattern=[%d] %p %m (%c)%n +log4j.appender.stateChangeDebugAppender.Threshold=DEBUG + +log4j.appender.requestDebugAppender=org.apache.log4j.DailyRollingFileAppender +log4j.appender.requestDebugAppender.DatePattern='.'yyyy-MM-dd-HH +log4j.appender.requestDebugAppender.File={{ log_dir }}/debug/kafka-request.log +log4j.appender.requestDebugAppender.layout=org.apache.log4j.PatternLayout +log4j.appender.requestDebugAppender.layout.ConversionPattern=[%d] %p %m (%c)%n +log4j.appender.requestDebugAppender.Threshold=DEBUG + +log4j.appender.cleanerDebugAppender=org.apache.log4j.DailyRollingFileAppender +log4j.appender.cleanerDebugAppender.DatePattern='.'yyyy-MM-dd-HH +log4j.appender.cleanerDebugAppender.File={{ log_dir }}/debug/log-cleaner.log +log4j.appender.cleanerDebugAppender.layout=org.apache.log4j.PatternLayout +log4j.appender.cleanerDebugAppender.layout.ConversionPattern=[%d] %p %m (%c)%n +log4j.appender.cleanerDebugAppender.Threshold=DEBUG + +log4j.appender.controllerDebugAppender=org.apache.log4j.DailyRollingFileAppender +log4j.appender.controllerDebugAppender.DatePattern='.'yyyy-MM-dd-HH +log4j.appender.controllerDebugAppender.File={{ log_dir }}/debug/controller.log +log4j.appender.controllerDebugAppender.layout=org.apache.log4j.PatternLayout +log4j.appender.controllerDebugAppender.layout.ConversionPattern=[%d] %p %m (%c)%n +log4j.appender.controllerDebugAppender.Threshold=DEBUG + +log4j.appender.authorizerDebugAppender=org.apache.log4j.DailyRollingFileAppender +log4j.appender.authorizerDebugAppender.DatePattern='.'yyyy-MM-dd-HH +log4j.appender.authorizerDebugAppender.File={{ log_dir }}/debug/kafka-authorizer.log +log4j.appender.authorizerDebugAppender.layout=org.apache.log4j.PatternLayout +log4j.appender.authorizerDebugAppender.layout.ConversionPattern=[%d] %p %m (%c)%n +log4j.appender.authorizerDebugAppender.Threshold=DEBUG + +# Turn on all our debugging info +log4j.logger.kafka.producer.async.DefaultEventHandler={{ log_level|default("DEBUG") }}, kafkaInfoAppender, kafkaDebugAppender +log4j.logger.kafka.client.ClientUtils={{ log_level|default("DEBUG") }}, kafkaInfoAppender, kafkaDebugAppender +log4j.logger.kafka.perf={{ log_level|default("DEBUG") }}, kafkaInfoAppender, kafkaDebugAppender +log4j.logger.kafka.perf.ProducerPerformance$ProducerThread={{ log_level|default("DEBUG") }}, kafkaInfoAppender, kafkaDebugAppender +log4j.logger.kafka={{ log_level|default("DEBUG") }}, kafkaInfoAppender, kafkaDebugAppender + +log4j.logger.kafka.network.RequestChannel$={{ log_level|default("DEBUG") }}, requestInfoAppender, requestDebugAppender +log4j.additivity.kafka.network.RequestChannel$=false + +log4j.logger.kafka.network.Processor={{ log_level|default("DEBUG") }}, requestInfoAppender, requestDebugAppender +log4j.logger.kafka.server.KafkaApis={{ log_level|default("DEBUG") }}, requestInfoAppender, requestDebugAppender +log4j.additivity.kafka.server.KafkaApis=false +log4j.logger.kafka.request.logger={{ log_level|default("DEBUG") }}, requestInfoAppender, requestDebugAppender +log4j.additivity.kafka.request.logger=false + +log4j.logger.org.apache.kafka.raft={{ log_level|default("DEBUG") }}, controllerInfoAppender, controllerDebugAppender +log4j.logger.org.apache.kafka.controller={{ log_level|default("DEBUG") }}, controllerInfoAppender, controllerDebugAppender +log4j.logger.kafka.controller={{ log_level|default("DEBUG") }}, controllerInfoAppender, controllerDebugAppender +log4j.additivity.kafka.controller=false + +log4j.logger.kafka.log.LogCleaner={{ log_level|default("DEBUG") }}, cleanerInfoAppender, cleanerDebugAppender +log4j.additivity.kafka.log.LogCleaner=false + +log4j.logger.state.change.logger={{ log_level|default("DEBUG") }}, stateChangeInfoAppender, stateChangeDebugAppender +log4j.additivity.state.change.logger=false + +#Change this to debug to get the actual audit log for authorizer. +log4j.logger.kafka.authorizer.logger={{ log_level|default("DEBUG") }}, authorizerInfoAppender, authorizerDebugAppender +log4j.additivity.kafka.authorizer.logger=false + +#New Group Coordinator logging. +log4j.logger.org.apache.kafka.coordinator.group={{ log_level|default("DEBUG") }}, kafkaInfoAppender, kafkaDebugAppender +log4j.additivity.org.apache.kafka.coordinator.group=false diff --git a/tests/kafkatest/services/performance/consumer_performance.py b/tests/kafkatest/services/performance/consumer_performance.py index b9171c3f0d338..3325fe5298a2a 100644 --- a/tests/kafkatest/services/performance/consumer_performance.py +++ b/tests/kafkatest/services/performance/consumer_performance.py @@ -49,7 +49,7 @@ class ConsumerPerformanceService(PerformanceService): STDOUT_CAPTURE = os.path.join(PERSISTENT_ROOT, "consumer_performance.stdout") STDERR_CAPTURE = os.path.join(PERSISTENT_ROOT, "consumer_performance.stderr") LOG_FILE = os.path.join(LOG_DIR, "consumer_performance.log") - LOG4J_CONFIG = os.path.join(PERSISTENT_ROOT, "tools-log4j2.properties") + LOG4J_CONFIG = os.path.join(PERSISTENT_ROOT, "tools-log4j.properties") CONFIG_FILE = os.path.join(PERSISTENT_ROOT, "consumer.properties") logs = { @@ -160,7 +160,7 @@ def parse_results(self, line, version): def _worker(self, idx, node): node.account.ssh("mkdir -p %s" % ConsumerPerformanceService.PERSISTENT_ROOT, allow_fail=False) - log_config = self.render('tools_log4j2.properties', log_file=ConsumerPerformanceService.LOG_FILE) + log_config = self.render('tools_log4j.properties', log_file=ConsumerPerformanceService.LOG_FILE) node.account.create_file(ConsumerPerformanceService.LOG4J_CONFIG, log_config) node.account.create_file(ConsumerPerformanceService.CONFIG_FILE, str(self.security_config)) self.security_config.setup_node(node) diff --git a/tests/kafkatest/services/performance/end_to_end_latency.py b/tests/kafkatest/services/performance/end_to_end_latency.py index 16a81c817423c..a35d2e1427480 100644 --- a/tests/kafkatest/services/performance/end_to_end_latency.py +++ b/tests/kafkatest/services/performance/end_to_end_latency.py @@ -30,7 +30,7 @@ class EndToEndLatencyService(PerformanceService): STDOUT_CAPTURE = os.path.join(PERSISTENT_ROOT, "end_to_end_latency.stdout") STDERR_CAPTURE = os.path.join(PERSISTENT_ROOT, "end_to_end_latency.stderr") LOG_FILE = os.path.join(LOG_DIR, "end_to_end_latency.log") - LOG4J_CONFIG = os.path.join(PERSISTENT_ROOT, "tools-log4j2.properties") + LOG4J_CONFIG = os.path.join(PERSISTENT_ROOT, "tools-log4j.properties") CONFIG_FILE = os.path.join(PERSISTENT_ROOT, "client.properties") logs = { @@ -103,7 +103,7 @@ def start_cmd(self, node): def _worker(self, idx, node): node.account.ssh("mkdir -p %s" % EndToEndLatencyService.PERSISTENT_ROOT, allow_fail=False) - log_config = self.render('tools_log4j2.properties', log_file=EndToEndLatencyService.LOG_FILE) + log_config = self.render('tools_log4j.properties', log_file=EndToEndLatencyService.LOG_FILE) node.account.create_file(EndToEndLatencyService.LOG4J_CONFIG, log_config) client_config = str(self.security_config) diff --git a/tests/kafkatest/services/performance/producer_performance.py b/tests/kafkatest/services/performance/producer_performance.py index b8d2577ad1964..a990d4fe04527 100644 --- a/tests/kafkatest/services/performance/producer_performance.py +++ b/tests/kafkatest/services/performance/producer_performance.py @@ -32,7 +32,7 @@ class ProducerPerformanceService(HttpMetricsCollector, PerformanceService): STDERR_CAPTURE = os.path.join(PERSISTENT_ROOT, "producer_performance.stderr") LOG_DIR = os.path.join(PERSISTENT_ROOT, "logs") LOG_FILE = os.path.join(LOG_DIR, "producer_performance.log") - LOG4J_CONFIG = os.path.join(PERSISTENT_ROOT, "tools-log4j2.properties") + LOG4J_CONFIG = os.path.join(PERSISTENT_ROOT, "tools-log4j.properties") def __init__(self, context, num_nodes, kafka, topic, num_records, record_size, throughput, version=DEV_BRANCH, settings=None, intermediate_stats=False, client_id="producer-performance"): @@ -122,7 +122,7 @@ def _worker(self, idx, node): node.account.ssh("mkdir -p %s" % ProducerPerformanceService.PERSISTENT_ROOT, allow_fail=False) # Create and upload log properties - log_config = self.render('tools_log4j2.properties', log_file=ProducerPerformanceService.LOG_FILE) + log_config = self.render('tools_log4j.properties', log_file=ProducerPerformanceService.LOG_FILE) node.account.create_file(ProducerPerformanceService.LOG4J_CONFIG, log_config) cmd = self.start_cmd(node) diff --git a/tests/kafkatest/services/performance/templates/tools_log4j2.properties b/tests/kafkatest/services/performance/templates/tools_log4j.properties similarity index 66% rename from tests/kafkatest/services/performance/templates/tools_log4j2.properties rename to tests/kafkatest/services/performance/templates/tools_log4j.properties index 303f53e03f18b..13d7aec28681f 100644 --- a/tests/kafkatest/services/performance/templates/tools_log4j2.properties +++ b/tests/kafkatest/services/performance/templates/tools_log4j.properties @@ -14,18 +14,12 @@ # limitations under the License. # Define the root logger with appender file -name=KafkaTestsPerformanceTemplatesConfig +log4j.rootLogger = {{ log_level|default("INFO") }}, FILE -appenders=file - -appender.file.type=File -appender.file.name=FILE -appender.file.fileName={{ log_file }} -appender.file.append=false -appender.file.immediateFlush=true -appender.file.layout.type=PatternLayout -appender.file.layout.pattern=[%d] %p %m (%c)%n - -rootLogger.level={{ log_level|default("INFO") }} -rootLogger.appenderRefs=file -rootLogger.appenderRef.file.ref=FILE +log4j.appender.FILE=org.apache.log4j.FileAppender +log4j.appender.FILE.File={{ log_file }} +log4j.appender.FILE.ImmediateFlush=true +# Set the append to false, overwrite +log4j.appender.FILE.Append=false +log4j.appender.FILE.layout=org.apache.log4j.PatternLayout +log4j.appender.FILE.layout.conversionPattern=[%d] %p %m (%c)%n \ No newline at end of file diff --git a/tests/kafkatest/services/streams.py b/tests/kafkatest/services/streams.py index f8365d06ee7ad..696e9f58f7fab 100644 --- a/tests/kafkatest/services/streams.py +++ b/tests/kafkatest/services/streams.py @@ -38,7 +38,7 @@ class StreamsTestBaseService(KafkaPathResolverMixin, JmxMixin, Service): STDERR_FILE = os.path.join(PERSISTENT_ROOT, "streams.stderr") JMX_LOG_FILE = os.path.join(PERSISTENT_ROOT, "jmx_tool.log") JMX_ERR_FILE = os.path.join(PERSISTENT_ROOT, "jmx_tool.err.log") - LOG4J_CONFIG_FILE = os.path.join(PERSISTENT_ROOT, "tools-log4j2.properties") + LOG4J_CONFIG_FILE = os.path.join(PERSISTENT_ROOT, "tools-log4j.properties") PID_FILE = os.path.join(PERSISTENT_ROOT, "streams.pid") CLEAN_NODE_ENABLED = True @@ -306,7 +306,7 @@ def start_node(self, node): node.account.mkdirs(self.PERSISTENT_ROOT) prop_file = self.prop_file() node.account.create_file(self.CONFIG_FILE, prop_file) - node.account.create_file(self.LOG4J_CONFIG_FILE, self.render('tools_log4j2.properties', log_file=self.LOG_FILE)) + node.account.create_file(self.LOG4J_CONFIG_FILE, self.render('tools_log4j.properties', log_file=self.LOG_FILE)) self.logger.info("Starting StreamsTest process on " + str(node.account)) with node.account.monitor_log(self.STDOUT_FILE) as monitor: diff --git a/trogdor/src/test/resources/log4j2.properties b/tests/kafkatest/services/templates/tools_log4j.properties similarity index 58% rename from trogdor/src/test/resources/log4j2.properties rename to tests/kafkatest/services/templates/tools_log4j.properties index 0698efc7d75be..117fc919f99bd 100644 --- a/trogdor/src/test/resources/log4j2.properties +++ b/tests/kafkatest/services/templates/tools_log4j.properties @@ -12,22 +12,20 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -name=TrogdorTestConfig -appenders=stdout -appender.stdout.type=Console -appender.stdout.name=STDOUT -appender.stdout.layout.type=PatternLayout -appender.stdout.layout.pattern=[%d] %p %m (%c:%L)%n +# Define the root logger with appender file +log4j.rootLogger = {{ log_level|default("INFO") }}, FILE -rootLogger.level=TRACE -rootLogger.appenderRefs=stdout -rootLogger.appenderRef.stdout.ref=STDOUT +{% if loggers is defined %} +{% for logger, log_level in loggers.items() %} +log4j.logger.{{ logger }}={{ log_level }} +{% endfor %} +{% endif %} -loggers=org.apache.kafka,org.eclipse.jetty - -logger.org.apache.kafka.name=org.apache.kafka -logger.org.apache.kafka.level=INFO - -logger.org.eclipse.jetty.name=org.eclipse.jetty -logger.org.eclipse.jetty.level=INFO +log4j.appender.FILE=org.apache.log4j.FileAppender +log4j.appender.FILE.File={{ log_file }} +log4j.appender.FILE.ImmediateFlush=true +# Set the append to true +log4j.appender.FILE.Append=true +log4j.appender.FILE.layout=org.apache.log4j.PatternLayout +log4j.appender.FILE.layout.conversionPattern=[%d] %p %m (%c)%n \ No newline at end of file diff --git a/tests/kafkatest/services/transactional_message_copier.py b/tests/kafkatest/services/transactional_message_copier.py index a248f2b7ce507..564a23fdcc389 100644 --- a/tests/kafkatest/services/transactional_message_copier.py +++ b/tests/kafkatest/services/transactional_message_copier.py @@ -31,7 +31,7 @@ class TransactionalMessageCopier(KafkaPathResolverMixin, BackgroundThreadService STDERR_CAPTURE = os.path.join(PERSISTENT_ROOT, "transactional_message_copier.stderr") LOG_DIR = os.path.join(PERSISTENT_ROOT, "logs") LOG_FILE = os.path.join(LOG_DIR, "transactional_message_copier.log") - LOG4J_CONFIG = os.path.join(PERSISTENT_ROOT, "tools-log4j2.properties") + LOG4J_CONFIG = os.path.join(PERSISTENT_ROOT, "tools-log4j.properties") logs = { "transactional_message_copier_stdout": { @@ -75,7 +75,7 @@ def _worker(self, idx, node): node.account.ssh("mkdir -p %s" % TransactionalMessageCopier.PERSISTENT_ROOT, allow_fail=False) # Create and upload log properties - log_config = self.render('tools_log4j2.properties', + log_config = self.render('tools_log4j.properties', log_file=TransactionalMessageCopier.LOG_FILE) node.account.create_file(TransactionalMessageCopier.LOG4J_CONFIG, log_config) # Configure security diff --git a/tests/kafkatest/services/trogdor/templates/log4j.properties b/tests/kafkatest/services/trogdor/templates/log4j.properties new file mode 100644 index 0000000000000..252668e3dabf8 --- /dev/null +++ b/tests/kafkatest/services/trogdor/templates/log4j.properties @@ -0,0 +1,23 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +log4j.rootLogger=DEBUG, mylogger +log4j.logger.kafka=DEBUG +log4j.logger.org.apache.kafka=DEBUG +log4j.logger.org.eclipse=INFO +log4j.appender.mylogger=org.apache.log4j.FileAppender +log4j.appender.mylogger.File={{ log_path }} +log4j.appender.mylogger.layout=org.apache.log4j.PatternLayout +log4j.appender.mylogger.layout.ConversionPattern=[%d] %p %m (%c)%n diff --git a/tests/kafkatest/services/trogdor/trogdor.py b/tests/kafkatest/services/trogdor/trogdor.py index 615c08542879f..3b941fe9059eb 100644 --- a/tests/kafkatest/services/trogdor/trogdor.py +++ b/tests/kafkatest/services/trogdor/trogdor.py @@ -34,8 +34,8 @@ class TrogdorService(KafkaPathResolverMixin, Service): AGENT_STDOUT_STDERR The path where we store the agents's stdout/stderr output. COORDINATOR_LOG The path where we store the coordinator's log4j output. AGENT_LOG The path where we store the agent's log4j output. - AGENT_LOG4J_PROPERTIES The path to the agent log4j2.properties file for log config. - COORDINATOR_LOG4J_PROPERTIES The path to the coordinator log4j2.properties file for log config. + AGENT_LOG4J_PROPERTIES The path to the agent log4j.properties file for log config. + COORDINATOR_LOG4J_PROPERTIES The path to the coordinator log4j.properties file for log config. CONFIG_PATH The path to the trogdor configuration file. DEFAULT_AGENT_PORT The default port to use for trogdor_agent daemons. DEFAULT_COORDINATOR_PORT The default port to use for trogdor_coordinator daemons. @@ -48,8 +48,8 @@ class TrogdorService(KafkaPathResolverMixin, Service): AGENT_STDOUT_STDERR = os.path.join(PERSISTENT_ROOT, "trogdor-agent-stdout-stderr.log") COORDINATOR_LOG = os.path.join(PERSISTENT_ROOT, "trogdor-coordinator.log") AGENT_LOG = os.path.join(PERSISTENT_ROOT, "trogdor-agent.log") - COORDINATOR_LOG4J_PROPERTIES = os.path.join(PERSISTENT_ROOT, "trogdor-coordinator-log4j2.properties") - AGENT_LOG4J_PROPERTIES = os.path.join(PERSISTENT_ROOT, "trogdor-agent-log4j2.properties") + COORDINATOR_LOG4J_PROPERTIES = os.path.join(PERSISTENT_ROOT, "trogdor-coordinator-log4j.properties") + AGENT_LOG4J_PROPERTIES = os.path.join(PERSISTENT_ROOT, "trogdor-agent-log4j.properties") CONFIG_PATH = os.path.join(PERSISTENT_ROOT, "trogdor.conf") DEFAULT_AGENT_PORT=8888 DEFAULT_COORDINATOR_PORT=8889 @@ -142,7 +142,7 @@ def start_node(self, node): def _start_coordinator_node(self, node): node.account.create_file(TrogdorService.COORDINATOR_LOG4J_PROPERTIES, - self.render('log4j2.properties', + self.render('log4j.properties', log_path=TrogdorService.COORDINATOR_LOG)) self._start_trogdor_daemon("coordinator", TrogdorService.COORDINATOR_STDOUT_STDERR, TrogdorService.COORDINATOR_LOG4J_PROPERTIES, @@ -151,7 +151,7 @@ def _start_coordinator_node(self, node): def _start_agent_node(self, node): node.account.create_file(TrogdorService.AGENT_LOG4J_PROPERTIES, - self.render('log4j2.properties', + self.render('log4j.properties', log_path=TrogdorService.AGENT_LOG)) self._start_trogdor_daemon("agent", TrogdorService.AGENT_STDOUT_STDERR, TrogdorService.AGENT_LOG4J_PROPERTIES, diff --git a/tests/kafkatest/services/verifiable_consumer.py b/tests/kafkatest/services/verifiable_consumer.py index 7a8e890e18260..de1e6f2a1f2fc 100644 --- a/tests/kafkatest/services/verifiable_consumer.py +++ b/tests/kafkatest/services/verifiable_consumer.py @@ -215,7 +215,7 @@ class VerifiableConsumer(KafkaPathResolverMixin, VerifiableClientMixin, Backgrou STDERR_CAPTURE = os.path.join(PERSISTENT_ROOT, "verifiable_consumer.stderr") LOG_DIR = os.path.join(PERSISTENT_ROOT, "logs") LOG_FILE = os.path.join(LOG_DIR, "verifiable_consumer.log") - LOG4J_CONFIG = os.path.join(PERSISTENT_ROOT, "tools-log4j2.properties") + LOG4J_CONFIG = os.path.join(PERSISTENT_ROOT, "tools-log4j.properties") CONFIG_FILE = os.path.join(PERSISTENT_ROOT, "verifiable_consumer.properties") logs = { @@ -298,7 +298,7 @@ def _worker(self, idx, node): node.account.ssh("mkdir -p %s" % VerifiableConsumer.PERSISTENT_ROOT, allow_fail=False) # Create and upload log properties - log_config = self.render('tools_log4j2.properties', log_file=VerifiableConsumer.LOG_FILE) + log_config = self.render('tools_log4j.properties', log_file=VerifiableConsumer.LOG_FILE) node.account.create_file(VerifiableConsumer.LOG4J_CONFIG, log_config) # Create and upload config file diff --git a/tests/kafkatest/services/verifiable_producer.py b/tests/kafkatest/services/verifiable_producer.py index d4f29888cf533..ea6292d57725e 100644 --- a/tests/kafkatest/services/verifiable_producer.py +++ b/tests/kafkatest/services/verifiable_producer.py @@ -41,7 +41,7 @@ class VerifiableProducer(KafkaPathResolverMixin, VerifiableClientMixin, Backgrou STDERR_CAPTURE = os.path.join(PERSISTENT_ROOT, "verifiable_producer.stderr") LOG_DIR = os.path.join(PERSISTENT_ROOT, "logs") LOG_FILE = os.path.join(LOG_DIR, "verifiable_producer.log") - LOG4J_CONFIG = os.path.join(PERSISTENT_ROOT, "tools-log4j2.properties") + LOG4J_CONFIG = os.path.join(PERSISTENT_ROOT, "tools-log4j.properties") CONFIG_FILE = os.path.join(PERSISTENT_ROOT, "verifiable_producer.properties") logs = { @@ -127,7 +127,7 @@ def _worker(self, idx, node): node.account.ssh("mkdir -p %s" % VerifiableProducer.PERSISTENT_ROOT, allow_fail=False) # Create and upload log properties - log_config = self.render('tools_log4j2.properties', log_file=VerifiableProducer.LOG_FILE) + log_config = self.render('tools_log4j.properties', log_file=VerifiableProducer.LOG_FILE) node.account.create_file(VerifiableProducer.LOG4J_CONFIG, log_config) # Configure security diff --git a/tools/src/main/java/org/apache/kafka/tools/VerifiableLog4jAppender.java b/tools/src/main/java/org/apache/kafka/tools/VerifiableLog4jAppender.java index 573874d0b200c..3cd3b0ec99ab0 100644 --- a/tools/src/main/java/org/apache/kafka/tools/VerifiableLog4jAppender.java +++ b/tools/src/main/java/org/apache/kafka/tools/VerifiableLog4jAppender.java @@ -24,16 +24,12 @@ import net.sourceforge.argparse4j.inf.ArgumentParserException; import net.sourceforge.argparse4j.inf.Namespace; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.apache.logging.log4j.core.LoggerContext; -import org.apache.logging.log4j.core.config.Configurator; +import org.apache.log4j.Logger; +import org.apache.log4j.PropertyConfigurator; -import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.nio.file.Files; -import java.nio.file.Path; import java.nio.file.Paths; import java.util.Properties; @@ -50,7 +46,7 @@ */ public class VerifiableLog4jAppender { - Logger logger = LogManager.getLogger(VerifiableLog4jAppender.class); + Logger logger = Logger.getLogger(VerifiableLog4jAppender.class); // If maxMessages < 0, log until the process is killed externally private long maxMessages = -1; @@ -239,16 +235,7 @@ public static VerifiableLog4jAppender createFromArgs(String[] args) { public VerifiableLog4jAppender(Properties props, int maxMessages) { this.maxMessages = maxMessages; - try { - Path tempConfigFile = Files.createTempFile("log4j2-", ".properties"); - try (FileOutputStream fos = new FileOutputStream(tempConfigFile.toFile()); - LoggerContext context = Configurator.initialize(null, tempConfigFile.toString())) { - props.store(fos, null); // 将 Properties 保存到文件 - Files.deleteIfExists(tempConfigFile); - } - } catch (IOException e) { - throw new RuntimeException(e); - } + PropertyConfigurator.configure(props); } public static void main(String[] args) { diff --git a/tools/src/test/java/org/apache/kafka/tools/other/ReplicationQuotasTestRig.java b/tools/src/test/java/org/apache/kafka/tools/other/ReplicationQuotasTestRig.java index 1c577fe4c47aa..510c31231a9e4 100644 --- a/tools/src/test/java/org/apache/kafka/tools/other/ReplicationQuotasTestRig.java +++ b/tools/src/test/java/org/apache/kafka/tools/other/ReplicationQuotasTestRig.java @@ -39,7 +39,7 @@ import org.apache.kafka.server.quota.QuotaType; import org.apache.kafka.tools.reassign.ReassignPartitionsCommand; -import org.apache.logging.log4j.core.config.Configurator; +import org.apache.log4j.PropertyConfigurator; import org.jfree.chart.ChartFactory; import org.jfree.chart.ChartFrame; import org.jfree.chart.JFreeChart; @@ -97,7 +97,7 @@ public class ReplicationQuotasTestRig { private static final String DIR; static { - Configurator.initialize(null, "core/src/test/resources/log4j2.properties"); + PropertyConfigurator.configure("core/src/test/resources/log4j.properties"); new File("Experiments").mkdir(); DIR = "Experiments/Run" + Long.valueOf(System.currentTimeMillis()).toString().substring(8); diff --git a/tools/src/test/resources/log4j.properties b/tools/src/test/resources/log4j.properties new file mode 100644 index 0000000000000..3aca07dc53016 --- /dev/null +++ b/tools/src/test/resources/log4j.properties @@ -0,0 +1,22 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +log4j.rootLogger=INFO, stdout + +log4j.appender.stdout=org.apache.log4j.ConsoleAppender +log4j.appender.stdout.layout=org.apache.log4j.PatternLayout +log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c:%L)%n + +log4j.logger.org.apache.kafka=INFO +log4j.logger.org.eclipse.jetty=INFO diff --git a/trogdor/src/test/resources/log4j.properties b/trogdor/src/test/resources/log4j.properties new file mode 100644 index 0000000000000..5291604d49ae5 --- /dev/null +++ b/trogdor/src/test/resources/log4j.properties @@ -0,0 +1,22 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +log4j.rootLogger=TRACE, stdout + +log4j.appender.stdout=org.apache.log4j.ConsoleAppender +log4j.appender.stdout.layout=org.apache.log4j.PatternLayout +log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c:%L)%n + +log4j.logger.org.apache.kafka=INFO +log4j.logger.org.eclipse.jetty=INFO From d12f3113debbe7cbdcfc7dddce2674d5203c2e08 Mon Sep 17 00:00:00 2001 From: frankvicky Date: Thu, 10 Oct 2024 21:59:05 +0800 Subject: [PATCH 03/46] KAFKA-9366: Cleanup --- README.md | 2 +- bin/windows/connect-distributed.bat | 2 +- build.gradle | 5 ++--- .../common/utils/LogCaptureAppender.java | 8 ++++---- .../apache/kafka/connect/runtime/Loggers.java | 20 +++++++++++++------ .../src/test/resources/log4j2.properties | 1 - .../api/PlaintextAdminIntegrationTest.scala | 3 --- 7 files changed, 22 insertions(+), 19 deletions(-) diff --git a/README.md b/README.md index 45b83e1215d13..c950d37894ca4 100644 --- a/README.md +++ b/README.md @@ -54,7 +54,7 @@ Follow instructions in https://kafka.apache.org/quickstart ./gradlew clients:test --tests org.apache.kafka.clients.MetadataTest.testTimeToNextUpdate ### Running a particular unit/integration test with log4j output ### -By default, there will be only small number of logs output while testing. You can adjust it by changing the `log4j2.properties` file in the module's `src/test/resources` directory. +By default, there will be only small number of logs output while testing. You can adjust it by changing the `log4j.properties` file in the module's `src/test/resources` directory. For example, if you want to see more logs for clients project tests, you can modify [the line](https://github.com/apache/kafka/blob/trunk/clients/src/test/resources/log4j.properties#L21) in `clients/src/test/resources/log4j2.properties` to `log4j.logger.org.apache.kafka=INFO` and then run: diff --git a/bin/windows/connect-distributed.bat b/bin/windows/connect-distributed.bat index c355595777228..cef0306b2bb6a 100644 --- a/bin/windows/connect-distributed.bat +++ b/bin/windows/connect-distributed.bat @@ -28,7 +28,7 @@ popd rem Log4j settings IF ["%KAFKA_LOG4J_OPTS%"] EQU [""] ( echo DEPRECATED: using log4j 1.x configuration. To use log4j 2.x configuration, run with: 'set KAFKA_LOG4J_OPTS=-Dlog4j.configurationFile=file:%BASE_DIR%/config/connect-log4j2.properties' - set KAFKA_LOG4J_OPTS=-Dlog4j.configuration=file:%BASE_DIR%/config/connect-log4j.properties + set KAFKA_LOG4J_OPTS=-Dlog4j.configuration=file:%BASE_DIR%/config/connect-log4j.properties ) "%~dp0kafka-run-class.bat" org.apache.kafka.connect.cli.ConnectDistributed %* diff --git a/build.gradle b/build.gradle index 4314072b64358..bd05dd7636338 100644 --- a/build.gradle +++ b/build.gradle @@ -124,7 +124,6 @@ ext { } runtimeTestLibs = [ -// libs.slf4jReload4j, libs.slf4jLog4j2, libs.junitPlatformLanucher, ] @@ -1541,8 +1540,8 @@ project(':group-coordinator') { "-m", "MessageDataGenerator", "JsonConverterGenerator" ] inputs.dir("src/main/resources/common/message") - .withPropertyName("messages") - .withPathSensitivity(PathSensitivity.RELATIVE) + .withPropertyName("messages") + .withPathSensitivity(PathSensitivity.RELATIVE) outputs.cacheIf { true } outputs.dir("${projectDir}/build/generated/main/java/org/apache/kafka/coordinator/group/generated") } diff --git a/clients/src/test/java/org/apache/kafka/common/utils/LogCaptureAppender.java b/clients/src/test/java/org/apache/kafka/common/utils/LogCaptureAppender.java index 71949e67356aa..42299f99a97ca 100644 --- a/clients/src/test/java/org/apache/kafka/common/utils/LogCaptureAppender.java +++ b/clients/src/test/java/org/apache/kafka/common/utils/LogCaptureAppender.java @@ -126,15 +126,15 @@ public void setClassLogger(final Class clazz, Level level) { @Override public void append(final LogEvent event) { synchronized (events) { - events.add(event.toImmutable()); + events.add(event); } } public List getMessages(String level) { return getEvents().stream() - .filter(e -> level.equals(e.getLevel())) - .map(Event::getMessage) - .collect(Collectors.toList()); + .filter(e -> level.equals(e.getLevel())) + .map(Event::getMessage) + .collect(Collectors.toList()); } public List getMessages() { diff --git a/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/Loggers.java b/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/Loggers.java index d74dd7f2a6d12..89fcda10e8731 100644 --- a/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/Loggers.java +++ b/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/Loggers.java @@ -28,11 +28,13 @@ import org.slf4j.LoggerFactory; import java.util.ArrayList; +import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.Set; import java.util.TreeMap; import java.util.stream.Collectors; @@ -105,7 +107,7 @@ public synchronized Map allLevels() { .forEach(logger -> result.put(logger.getName(), loggerLevel(logger))); org.apache.logging.log4j.Logger root = rootLogger(); - if (!root.getLevel().equals(Level.OFF)) { + if (root.getLevel() != null && !root.getLevel().equals(Level.OFF)) { result.put(ROOT_LOGGER_NAME, loggerLevel(root)); } @@ -179,10 +181,16 @@ org.apache.logging.log4j.Logger lookupLogger(String logger) { List currentLoggers() { LoggerContext context = (LoggerContext) LogManager.getContext(false); - return context.getLoggers() - .stream() - .filter(logger -> !logger.getName().equals(ROOT_LOGGER_NAME)) - .collect(Collectors.toList()); + Collection loggerConfigs = context.getConfiguration().getLoggers().values(); + Set loggerNames = loggerConfigs.stream() + .map(LoggerConfig::getName) + .collect(Collectors.toSet()); + + List loggers = new ArrayList<>(); + for (String name : loggerNames) { + loggers.add(LogManager.getLogger(name)); + } + return loggers; } // visible for testing @@ -203,7 +211,7 @@ private void setLevel(org.apache.logging.log4j.Logger logger, Level level) { log.debug("Setting level of logger {} (excluding children) to {}", loggerName, level); Configurator.setLevel(loggerName, level); - context.updateLoggers(); +// context.updateLoggers(); lastModifiedTimes.put(loggerName, time.milliseconds()); } diff --git a/connect/runtime/src/test/resources/log4j2.properties b/connect/runtime/src/test/resources/log4j2.properties index 7b1ee4cd2991b..ba84db34499ca 100644 --- a/connect/runtime/src/test/resources/log4j2.properties +++ b/connect/runtime/src/test/resources/log4j2.properties @@ -48,4 +48,3 @@ logger.kafkaConsumer.level=DEBUG logger.coordinatorGroup.name=org.apache.kafka.coordinator.group logger.coordinatorGroup.level=DEBUG -#logger.coordinatorGroup.additivity=false diff --git a/core/src/test/scala/integration/kafka/api/PlaintextAdminIntegrationTest.scala b/core/src/test/scala/integration/kafka/api/PlaintextAdminIntegrationTest.scala index a08fd6891b8da..707b03df073c6 100644 --- a/core/src/test/scala/integration/kafka/api/PlaintextAdminIntegrationTest.scala +++ b/core/src/test/scala/integration/kafka/api/PlaintextAdminIntegrationTest.scala @@ -3060,9 +3060,6 @@ class PlaintextAdminIntegrationTest extends BaseAdminIntegrationTest { LoggerFactory.getLogger("kafka.cluster.Replica").trace("Message to create the logger") val loggerConfig = describeBrokerLoggers() - // Logger name can't be empty. - assertNull(loggerConfig.get("")) - // "root" -> "OFF" val rootLogLevel = loggerConfig.get(Log4jController.ROOT_LOGGER).value assertEquals("OFF", rootLogLevel) From 7419f5fb8e6c0d813c2694a5474cad04e338ce3a Mon Sep 17 00:00:00 2001 From: frankvicky Date: Fri, 11 Oct 2024 21:52:27 +0800 Subject: [PATCH 04/46] revert unnecessary change revert unnecessary change --- .../org/apache/kafka/common/utils/LogCaptureAppender.java | 8 ++++---- .../java/org/apache/kafka/connect/runtime/Loggers.java | 3 +-- .../integration/StandaloneWorkerIntegrationTest.java | 1 - .../org/apache/kafka/connect/runtime/LoggersTest.java | 4 ++-- 4 files changed, 7 insertions(+), 9 deletions(-) diff --git a/clients/src/test/java/org/apache/kafka/common/utils/LogCaptureAppender.java b/clients/src/test/java/org/apache/kafka/common/utils/LogCaptureAppender.java index 42299f99a97ca..9126e7f9934b9 100644 --- a/clients/src/test/java/org/apache/kafka/common/utils/LogCaptureAppender.java +++ b/clients/src/test/java/org/apache/kafka/common/utils/LogCaptureAppender.java @@ -158,10 +158,10 @@ public List getEvents() { throwableString = Optional.empty(); throwableClassName = Optional.empty(); } else { - StringWriter sw = new StringWriter(); - PrintWriter pw = new PrintWriter(sw); - throwable.printStackTrace(pw); - throwableString = Optional.of(sw.toString()); + StringWriter stringWriter = new StringWriter(); + PrintWriter printWriter = new PrintWriter(stringWriter); + throwable.printStackTrace(printWriter); + throwableString = Optional.of(stringWriter.toString()); throwableClassName = Optional.of(throwable.getClass().getName()); } diff --git a/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/Loggers.java b/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/Loggers.java index 89fcda10e8731..952cedf109d06 100644 --- a/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/Loggers.java +++ b/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/Loggers.java @@ -107,7 +107,7 @@ public synchronized Map allLevels() { .forEach(logger -> result.put(logger.getName(), loggerLevel(logger))); org.apache.logging.log4j.Logger root = rootLogger(); - if (root.getLevel() != null && !root.getLevel().equals(Level.OFF)) { + if (!root.getLevel().equals(Level.OFF)) { result.put(ROOT_LOGGER_NAME, loggerLevel(root)); } @@ -211,7 +211,6 @@ private void setLevel(org.apache.logging.log4j.Logger logger, Level level) { log.debug("Setting level of logger {} (excluding children) to {}", loggerName, level); Configurator.setLevel(loggerName, level); -// context.updateLoggers(); lastModifiedTimes.put(loggerName, time.milliseconds()); } diff --git a/connect/runtime/src/test/java/org/apache/kafka/connect/integration/StandaloneWorkerIntegrationTest.java b/connect/runtime/src/test/java/org/apache/kafka/connect/integration/StandaloneWorkerIntegrationTest.java index 3b13487b3e7bd..69a65ba7bfbde 100644 --- a/connect/runtime/src/test/java/org/apache/kafka/connect/integration/StandaloneWorkerIntegrationTest.java +++ b/connect/runtime/src/test/java/org/apache/kafka/connect/integration/StandaloneWorkerIntegrationTest.java @@ -114,7 +114,6 @@ public void testDynamicLogging() { final String namespace1 = "org.apache.kafka.connect"; final String level1 = "DEBUG"; connect.setLogLevel(namespace1, "ERROR", null); - // fixme Map currentLevels = testSetLoggingLevel(namespace1, level1, null, initialLevels); // Tests with scope=worker diff --git a/connect/runtime/src/test/java/org/apache/kafka/connect/runtime/LoggersTest.java b/connect/runtime/src/test/java/org/apache/kafka/connect/runtime/LoggersTest.java index 7a9aa74b34d7d..08824ea116a37 100644 --- a/connect/runtime/src/test/java/org/apache/kafka/connect/runtime/LoggersTest.java +++ b/connect/runtime/src/test/java/org/apache/kafka/connect/runtime/LoggersTest.java @@ -75,8 +75,8 @@ public void testGetLoggersIgnoresNullLevels() { Loggers loggers = new TestLoggers(root, a, b); Map expectedLevels = Collections.singletonMap( - "b", - new LoggerLevel(Level.INFO.toString(), null) + "b", + new LoggerLevel(Level.INFO.toString(), null) ); Map actualLevels = loggers.allLevels(); assertEquals(expectedLevels, actualLevels); From d69df892004e4423d256c65e60d9f37108dab686 Mon Sep 17 00:00:00 2001 From: frankvicky Date: Fri, 11 Oct 2024 22:07:08 +0800 Subject: [PATCH 05/46] revert unnecessary change --- build.gradle | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/build.gradle b/build.gradle index bd05dd7636338..d89728f55b6ba 100644 --- a/build.gradle +++ b/build.gradle @@ -1540,8 +1540,8 @@ project(':group-coordinator') { "-m", "MessageDataGenerator", "JsonConverterGenerator" ] inputs.dir("src/main/resources/common/message") - .withPropertyName("messages") - .withPathSensitivity(PathSensitivity.RELATIVE) + .withPropertyName("messages") + .withPathSensitivity(PathSensitivity.RELATIVE) outputs.cacheIf { true } outputs.dir("${projectDir}/build/generated/main/java/org/apache/kafka/coordinator/group/generated") } From 8ec412ded2272e6c251539b926cfc584a1f0e4ec Mon Sep 17 00:00:00 2001 From: frankvicky Date: Fri, 11 Oct 2024 22:13:08 +0800 Subject: [PATCH 06/46] KAFKA-9366: update readme --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index c950d37894ca4..45b83e1215d13 100644 --- a/README.md +++ b/README.md @@ -54,7 +54,7 @@ Follow instructions in https://kafka.apache.org/quickstart ./gradlew clients:test --tests org.apache.kafka.clients.MetadataTest.testTimeToNextUpdate ### Running a particular unit/integration test with log4j output ### -By default, there will be only small number of logs output while testing. You can adjust it by changing the `log4j.properties` file in the module's `src/test/resources` directory. +By default, there will be only small number of logs output while testing. You can adjust it by changing the `log4j2.properties` file in the module's `src/test/resources` directory. For example, if you want to see more logs for clients project tests, you can modify [the line](https://github.com/apache/kafka/blob/trunk/clients/src/test/resources/log4j.properties#L21) in `clients/src/test/resources/log4j2.properties` to `log4j.logger.org.apache.kafka=INFO` and then run: From 84c467cc8f0c10abeb973d5cbe4ac95e4ca8451d Mon Sep 17 00:00:00 2001 From: frankvicky Date: Wed, 23 Oct 2024 22:50:09 +0800 Subject: [PATCH 07/46] KAFKA-9366: Address comments for core module --- checkstyle/import-control-core.xml | 5 +++++ .../kafka/connect/runtime/LoggersTest.java | 4 ++++ .../scala/kafka/utils/Log4jController.scala | 6 ------ .../api/PlaintextAdminIntegrationTest.scala | 21 +++++++++++++++++++ 4 files changed, 30 insertions(+), 6 deletions(-) diff --git a/checkstyle/import-control-core.xml b/checkstyle/import-control-core.xml index ec032e9a5a785..b41d04b1ae698 100644 --- a/checkstyle/import-control-core.xml +++ b/checkstyle/import-control-core.xml @@ -153,4 +153,9 @@ + + + + + diff --git a/connect/runtime/src/test/java/org/apache/kafka/connect/runtime/LoggersTest.java b/connect/runtime/src/test/java/org/apache/kafka/connect/runtime/LoggersTest.java index 08824ea116a37..6907190069414 100644 --- a/connect/runtime/src/test/java/org/apache/kafka/connect/runtime/LoggersTest.java +++ b/connect/runtime/src/test/java/org/apache/kafka/connect/runtime/LoggersTest.java @@ -175,6 +175,10 @@ public void testSetLevel() { @Test public void testSetRootLevel() { + // In this test case, we focus on setting the level for the root logger. + // Ideally, we want to start with a "clean" configuration to conduct this test case. + // By programmatically creating a new configuration at the beginning, we can ensure + // that this test case is not affected by existing Log4j configurations. LoggerContext loggerContext = (LoggerContext) LogManager.getContext(false); Configuration config = loggerContext.getConfiguration(); LoggerConfig rootConfig = new LoggerConfig("", Level.ERROR, false); diff --git a/core/src/main/scala/kafka/utils/Log4jController.scala b/core/src/main/scala/kafka/utils/Log4jController.scala index b4f4a33960da9..d45b5394cf832 100755 --- a/core/src/main/scala/kafka/utils/Log4jController.scala +++ b/core/src/main/scala/kafka/utils/Log4jController.scala @@ -30,12 +30,6 @@ object Log4jController { val ROOT_LOGGER = "" - /** - * Returns given logger's parent's (or the first ancestor's) name. - * - * @throws IllegalArgumentException loggerName is null or empty. - */ - /** * Returns a map of the log4j loggers and their assigned log level. * If a logger does not have a log level assigned, we return the log level of the first ancestor with a level configured. diff --git a/core/src/test/scala/integration/kafka/api/PlaintextAdminIntegrationTest.scala b/core/src/test/scala/integration/kafka/api/PlaintextAdminIntegrationTest.scala index 707b03df073c6..13b66e8744d18 100644 --- a/core/src/test/scala/integration/kafka/api/PlaintextAdminIntegrationTest.scala +++ b/core/src/test/scala/integration/kafka/api/PlaintextAdminIntegrationTest.scala @@ -3176,6 +3176,27 @@ class PlaintextAdminIntegrationTest extends BaseAdminIntegrationTest { assertEquals(newRootLogLevel, newRootLoggerConfig.get("kafka.server.ControllerServer").value()) } + @ParameterizedTest + @ValueSource(strings = Array("kraft")) + def testIncrementalAlterConfigsForLog4jLogLevelsCanSetToRootLogger(quorum: String): Unit = { + client = createAdminClient + val initialLoggerConfig = describeBrokerLoggers() + val initialRootLogLevel = initialLoggerConfig.get(Log4jController.ROOT_LOGGER).value() + val newRootLogLevel = LogLevelConfig.DEBUG_LOG_LEVEL + + val alterRootLoggerEntry = Seq( + new AlterConfigOp(new ConfigEntry(Log4jController.ROOT_LOGGER, newRootLogLevel), AlterConfigOp.OpType.SET) + ).asJavaCollection + + alterBrokerLoggers(alterRootLoggerEntry, validateOnly = true) + val validatedRootLoggerConfig = describeBrokerLoggers() + assertEquals(initialRootLogLevel, validatedRootLoggerConfig.get(Log4jController.ROOT_LOGGER).value()) + + alterBrokerLoggers(alterRootLoggerEntry) + val changedRootLoggerConfig = describeBrokerLoggers() + assertEquals(newRootLogLevel, changedRootLoggerConfig.get(Log4jController.ROOT_LOGGER).value()) + } + @ParameterizedTest @ValueSource(strings = Array("kraft")) def testIncrementalAlterConfigsForLog4jLogLevelsCannotResetRootLogger(quorum: String): Unit = { From 32ae43c8926453607778013aa21bef397ceffba3 Mon Sep 17 00:00:00 2001 From: frankvicky Date: Wed, 23 Oct 2024 22:52:21 +0800 Subject: [PATCH 08/46] KAFKA-9366: Address comments for tools module --- build.gradle | 7 +++++-- checkstyle/import-control.xml | 4 ++++ .../org/apache/kafka/tools/VerifiableLog4jAppender.java | 7 +++++-- 3 files changed, 14 insertions(+), 4 deletions(-) diff --git a/build.gradle b/build.gradle index d89728f55b6ba..c110ef44b732d 100644 --- a/build.gradle +++ b/build.gradle @@ -2456,7 +2456,11 @@ project(':tools') { implementation libs.jacksonDataformatCsv implementation libs.jacksonJDK8Datatypes implementation libs.slf4jApi - implementation libs.slf4jReload4j + implementation libs.slf4jLog4j2 + implementation libs.log4j2Api + implementation libs.log4j2Core + implementation libs.log4j1Bridge2Api + implementation libs.spotbugs implementation libs.joptSimple implementation libs.jose4j // for SASL/OAUTHBEARER JWT validation @@ -2483,7 +2487,6 @@ project(':tools') { testImplementation(libs.jfreechart) { exclude group: 'junit', module: 'junit' } - testImplementation libs.reload4j testImplementation libs.apachedsCoreApi testImplementation libs.apachedsInterceptorKerberos testImplementation libs.apachedsProtocolShared diff --git a/checkstyle/import-control.xml b/checkstyle/import-control.xml index 3d4931f6e49f9..e34d91918decf 100644 --- a/checkstyle/import-control.xml +++ b/checkstyle/import-control.xml @@ -305,6 +305,10 @@ + + + + diff --git a/tools/src/main/java/org/apache/kafka/tools/VerifiableLog4jAppender.java b/tools/src/main/java/org/apache/kafka/tools/VerifiableLog4jAppender.java index 3cd3b0ec99ab0..09e4f9c7ef3d3 100644 --- a/tools/src/main/java/org/apache/kafka/tools/VerifiableLog4jAppender.java +++ b/tools/src/main/java/org/apache/kafka/tools/VerifiableLog4jAppender.java @@ -24,8 +24,10 @@ import net.sourceforge.argparse4j.inf.ArgumentParserException; import net.sourceforge.argparse4j.inf.Namespace; -import org.apache.log4j.Logger; import org.apache.log4j.PropertyConfigurator; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.logging.log4j.spi.LoggerContext; import java.io.IOException; import java.io.InputStream; @@ -46,7 +48,8 @@ */ public class VerifiableLog4jAppender { - Logger logger = Logger.getLogger(VerifiableLog4jAppender.class); + LoggerContext loggerContext = LogManager.getContext(false); + Logger logger = loggerContext.getLogger(VerifiableLog4jAppender.class); // If maxMessages < 0, log until the process is killed externally private long maxMessages = -1; From ae77d179abb62170783650257f30bcefa51d0c85 Mon Sep 17 00:00:00 2001 From: frankvicky Date: Wed, 23 Oct 2024 23:34:59 +0800 Subject: [PATCH 09/46] KAFKA-9366: upgrade trogdor to log4j2 --- build.gradle | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/build.gradle b/build.gradle index c110ef44b732d..a1cc7e210a8c3 100644 --- a/build.gradle +++ b/build.gradle @@ -2526,7 +2526,9 @@ project(':trogdor') { implementation libs.jacksonDatabind implementation libs.jacksonJDK8Datatypes implementation libs.slf4jApi - runtimeOnly libs.reload4j + runtimeOnly libs.log4j2Api + runtimeOnly libs.log4j2Core + runtimeOnly libs.log4j1Bridge2Api implementation libs.jacksonJaxrsJsonProvider implementation libs.jerseyContainerServlet From dfdcb1cfcdf697fbc565af768f362904ff66b08d Mon Sep 17 00:00:00 2001 From: frankvicky Date: Thu, 24 Oct 2024 12:31:06 +0800 Subject: [PATCH 10/46] KAFKA-9366: update build.gradle for lib reference --- build.gradle | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/build.gradle b/build.gradle index a1cc7e210a8c3..3e2372f47dcc8 100644 --- a/build.gradle +++ b/build.gradle @@ -1874,11 +1874,12 @@ project(':clients') { testImplementation libs.log4j2Api testImplementation libs.log4j2Core testImplementation libs.log4j1Bridge2Api - testImplementation libs.bndlib testImplementation libs.spotbugs testImplementation libs.mockitoCore testImplementation libs.mockitoJunitJupiter // supports MockitoExtension + testCompileOnly libs.bndlib + testRuntimeOnly libs.jacksonDatabind testRuntimeOnly libs.jacksonJDK8Datatypes testRuntimeOnly runtimeTestLibs @@ -2631,10 +2632,10 @@ project(':streams') { implementation libs.slf4jApi implementation libs.jacksonAnnotations implementation libs.jacksonDatabind - implementation libs.bndlib // testCompileOnly prevents streams from exporting a dependency on test-utils, which would cause a dependency cycle testCompileOnly project(':streams:test-utils') + testCompileOnly libs.bndlib testImplementation project(':metadata') testImplementation project(':clients').sourceSets.test.output @@ -3511,7 +3512,6 @@ project(':connect:runtime') { implementation libs.log4j2Api implementation libs.log4j2Core implementation libs.log4j1Bridge2Api - implementation libs.bndlib implementation libs.spotbugs implementation libs.jose4j // for SASL/OAUTHBEARER JWT validation implementation libs.jacksonAnnotations @@ -3528,6 +3528,8 @@ project(':connect:runtime') { implementation libs.mavenArtifact implementation libs.swaggerAnnotations + compileOnly libs.bndlib + // We use this library to generate OpenAPI docs for the REST API, but we don't want or need it at compile // or run time. So, we add it to a separate configuration, which we use later on during docs generation swagger libs.swaggerJaxrs2 @@ -3551,6 +3553,8 @@ project(':connect:runtime') { testImplementation libs.mockitoJunitJupiter testImplementation libs.httpclient + testCompileOnly libs.bndlib + testRuntimeOnly libs.bcpkix testRuntimeOnly runtimeTestLibs } From ae45089f87258bedccd8f02f91a4d234173b1d2e Mon Sep 17 00:00:00 2001 From: frankvicky Date: Thu, 24 Oct 2024 12:39:17 +0800 Subject: [PATCH 11/46] KAFKA-9366: Remove unnecessary semicolon --- .../integration/kafka/api/PlaintextAdminIntegrationTest.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/test/scala/integration/kafka/api/PlaintextAdminIntegrationTest.scala b/core/src/test/scala/integration/kafka/api/PlaintextAdminIntegrationTest.scala index 13b66e8744d18..3d7bb9a19fdc8 100644 --- a/core/src/test/scala/integration/kafka/api/PlaintextAdminIntegrationTest.scala +++ b/core/src/test/scala/integration/kafka/api/PlaintextAdminIntegrationTest.scala @@ -3086,7 +3086,7 @@ class PlaintextAdminIntegrationTest extends BaseAdminIntegrationTest { def testIncrementalAlterConfigsForLog4jLogLevels(quorum: String): Unit = { client = createAdminClient - val ancestorLogger = "kafka"; + val ancestorLogger = "kafka" val initialLoggerConfig = describeBrokerLoggers() val initialKafkaLogLevel = initialLoggerConfig.get("kafka").value() assertEquals(initialKafkaLogLevel, initialLoggerConfig.get("kafka.server.ControllerServer").value()) From d1ed1d298f303a096da0f5eb8ba25c40d6101b44 Mon Sep 17 00:00:00 2001 From: frankvicky Date: Thu, 24 Oct 2024 12:44:00 +0800 Subject: [PATCH 12/46] KAFKA-9366: Revert the line break --- raft/config/kraft-log4j.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/raft/config/kraft-log4j.properties b/raft/config/kraft-log4j.properties index 08c13eb17f818..14f739af05df5 100644 --- a/raft/config/kraft-log4j.properties +++ b/raft/config/kraft-log4j.properties @@ -21,4 +21,4 @@ log4j.appender.stderr.layout.ConversionPattern=[%d] %p %m (%c)%n log4j.appender.stderr.Target=System.err log4j.logger.org.apache.kafka.raft=INFO -log4j.logger.org.apache.kafka.snapshot=INFO \ No newline at end of file +log4j.logger.org.apache.kafka.snapshot=INFO From bafb1778531d33efe78cdaaf8dbb8a275a786894 Mon Sep 17 00:00:00 2001 From: frankvicky Date: Thu, 24 Oct 2024 19:49:35 +0800 Subject: [PATCH 13/46] KAFKA-9366: Remove reload4j and sl4j-reload4j dependencies --- build.gradle | 10 ++++------ gradle/dependencies.gradle | 3 --- 2 files changed, 4 insertions(+), 9 deletions(-) diff --git a/build.gradle b/build.gradle index e2a220770fc5b..9ec75f45b64b3 100644 --- a/build.gradle +++ b/build.gradle @@ -164,7 +164,6 @@ allprojects { // ZooKeeper (potentially older and containing CVEs) libs.nettyHandler, libs.nettyTransportNativeEpoll, - libs.reload4j, libs.log4j2Api, libs.log4j2Core, libs.log4j1Bridge2Api @@ -1084,7 +1083,7 @@ project(':core') { implementation libs.dropwizardMetrics exclude module: 'slf4j-log4j12' exclude module: 'log4j' - // Both Kafka and Zookeeper use slf4j. ZooKeeper moved from log4j to logback in v3.8.0, but Kafka relies on reload4j. + // Both Kafka and Zookeeper use slf4j. ZooKeeper moved from log4j to logback in v3.8.0. // We are removing Zookeeper's dependency on logback so we have a singular logging backend. exclude module: 'logback-classic' exclude module: 'logback-core' @@ -1155,9 +1154,6 @@ project(':core') { } tasks.create(name: "copyDependantLibs", type: Copy) { - from (configurations.compileClasspath) { - include('reload4j*jar') - } from (configurations.runtimeClasspath) { exclude('kafka-clients*') } @@ -2564,7 +2560,9 @@ project(':trogdor') { testImplementation project(':clients').sourceSets.test.output testImplementation libs.mockitoCore - testRuntimeOnly libs.slf4jReload4j + testRuntimeOnly libs.log4j2Api + testRuntimeOnly libs.log4j2Core + testRuntimeOnly libs.log4j1Bridge2Api testRuntimeOnly libs.junitPlatformLanucher } diff --git a/gradle/dependencies.gradle b/gradle/dependencies.gradle index 82caa504e63ed..47256c5f32ff4 100644 --- a/gradle/dependencies.gradle +++ b/gradle/dependencies.gradle @@ -117,7 +117,6 @@ versions += [ opentelemetryProto: "1.0.0-alpha", protobuf: "3.25.5", // a dependency of opentelemetryProto pcollections: "4.0.1", - reload4j: "1.2.25", rocksDB: "7.9.2", // When updating the scalafmt version please also update the version field in checkstyle/.scalafmt.conf. scalafmt now // has the version field as mandatory in its configuration, see @@ -219,13 +218,11 @@ libs += [ pcollections: "org.pcollections:pcollections:$versions.pcollections", opentelemetryProto: "io.opentelemetry.proto:opentelemetry-proto:$versions.opentelemetryProto", protobuf: "com.google.protobuf:protobuf-java:$versions.protobuf", - reload4j: "ch.qos.reload4j:reload4j:$versions.reload4j", rocksDBJni: "org.rocksdb:rocksdbjni:$versions.rocksDB", scalaLibrary: "org.scala-lang:scala-library:$versions.scala", scalaLogging: "com.typesafe.scala-logging:scala-logging_$versions.baseScala:$versions.scalaLogging", scalaReflect: "org.scala-lang:scala-reflect:$versions.scala", slf4jApi: "org.slf4j:slf4j-api:$versions.slf4j", - slf4jReload4j: "org.slf4j:slf4j-reload4j:$versions.slf4j", snappy: "org.xerial.snappy:snappy-java:$versions.snappy", swaggerAnnotations: "io.swagger.core.v3:swagger-annotations:$swaggerVersion", swaggerJaxrs2: "io.swagger.core.v3:swagger-jaxrs2:$swaggerVersion", From 2b224ea5709eace5aec817deacc14c5787986cf7 Mon Sep 17 00:00:00 2001 From: frankvicky Date: Tue, 29 Oct 2024 12:26:00 +0800 Subject: [PATCH 14/46] KAFKA-9366: Address comments --- bin/connect-distributed.sh | 10 ++++++++-- bin/connect-mirror-maker.sh | 10 ++++++++-- bin/connect-standalone.sh | 10 ++++++++-- bin/kafka-server-start.sh | 10 ++++++++-- bin/windows/connect-distributed.bat | 6 +++++- bin/windows/connect-standalone.bat | 8 ++++++-- bin/windows/kafka-server-start.bat | 6 +++++- bin/windows/zookeeper-server-start.bat | 8 +++++--- bin/zookeeper-server-start.sh | 2 -- 9 files changed, 53 insertions(+), 17 deletions(-) diff --git a/bin/connect-distributed.sh b/bin/connect-distributed.sh index f367fe8e4aba7..fff5e9595e2e2 100755 --- a/bin/connect-distributed.sh +++ b/bin/connect-distributed.sh @@ -22,9 +22,15 @@ fi base_dir=$(dirname $0) -if [ "x$KAFKA_LOG4J_OPTS" = "x" ]; then - echo "DEPRECATED: using log4j 1.x configuration. To use log4j 2.x configuration, run with: 'export KAFKA_LOG4J_OPTS=\"-Dlog4j.configurationFile=file:$base_dir/../config/connect-log4j2.properties\"'" +if [ -f "$base_dir/../config/connect-log4j.properties" ]; then + echo DEPRECATED: Using Log4j 1.x configuration file \$KAFKA_HOME/config/connect-log4j.properties >&2 + echo To use a Log4j 2.x configuration, create a \$KAFKA_HOME/config/log4j2.xml file and remove the Log4j 1.x configration. >&2 + echo See https://logging.apache.org/log4j/2.x/migrate-from-log4j1.html#Log4j2ConfigurationFormat for details about Log4j configuration file migration. >&2 export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/connect-log4j.properties" +elif [ -f "$base_dir/../config/connect-log4j2.properties" ]; then + export KAFKA_LOG4J_OPTS="-Dlog4j2.configurationFile=$base_dir/../config/connect-log4j2.properties" +elif [ -f "$base_dir/../config/connect-log4j2.xml" ]; then + export KAFKA_LOG4J_OPTS="-Dlog4j2.configurationFile=$base_dir/../config/connect-log4j2.xml" fi if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then diff --git a/bin/connect-mirror-maker.sh b/bin/connect-mirror-maker.sh index 8bf70f178bffc..56368d61391e2 100755 --- a/bin/connect-mirror-maker.sh +++ b/bin/connect-mirror-maker.sh @@ -22,9 +22,15 @@ fi base_dir=$(dirname $0) -if [ "x$KAFKA_LOG4J_OPTS" = "x" ]; then - echo "DEPRECATED: using log4j 1.x configuration. To use log4j 2.x configuration, run with: 'export KAFKA_LOG4J_OPTS=\"-Dlog4j.configurationFile=file:$base_dir/../config/connect-log4j2.properties\"'" +if [ -f "$base_dir/../config/connect-log4j.properties" ]; then + echo DEPRECATED: Using Log4j 1.x configuration file \$KAFKA_HOME/config/connect-log4j.properties >&2 + echo To use a Log4j 2.x configuration, create a \$KAFKA_HOME/config/log4j2.xml file and remove the Log4j 1.x configration. >&2 + echo See https://logging.apache.org/log4j/2.x/migrate-from-log4j1.html#Log4j2ConfigurationFormat for details about Log4j configuration file migration. >&2 export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/connect-log4j.properties" +elif [ -f "$base_dir/../config/connect-log4j2.properties" ]; then + export KAFKA_LOG4J_OPTS="-Dlog4j2.configurationFile=$base_dir/../config/connect-log4j2.properties" +elif [ -f "$base_dir/../config/connect-log4j2.xml" ]; then + export KAFKA_LOG4J_OPTS="-Dlog4j2.configurationFile=$base_dir/../config/connect-log4j2.xml" fi if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then diff --git a/bin/connect-standalone.sh b/bin/connect-standalone.sh index 5ea9694c371c8..a1d648dc1e129 100755 --- a/bin/connect-standalone.sh +++ b/bin/connect-standalone.sh @@ -22,9 +22,15 @@ fi base_dir=$(dirname $0) -if [ "x$KAFKA_LOG4J_OPTS" = "x" ]; then - echo "DEPRECATED: using log4j 1.x configuration. To use log4j 2.x configuration, run with: 'export KAFKA_LOG4J_OPTS=\"-Dlog4j.configurationFile=file:$base_dir/../config/connect-log4j2.properties\"'" +if [ -f "$base_dir/../config/connect-log4j.properties" ]; then + echo DEPRECATED: Using Log4j 1.x configuration file \$KAFKA_HOME/config/connect-log4j.properties >&2 + echo To use a Log4j 2.x configuration, create a \$KAFKA_HOME/config/log4j2.xml file and remove the Log4j 1.x configration. >&2 + echo See https://logging.apache.org/log4j/2.x/migrate-from-log4j1.html#Log4j2ConfigurationFormat for details about Log4j configuration file migration. >&2 export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/connect-log4j.properties" +elif [ -f "$base_dir/../config/connect-log4j2.properties" ]; then + export KAFKA_LOG4J_OPTS="-Dlog4j2.configurationFile=$base_dir/../config/connect-log4j2.properties" +elif [ -f "$base_dir/../config/connect-log4j2.xml" ]; then + export KAFKA_LOG4J_OPTS="-Dlog4j2.configurationFile=$base_dir/../config/connect-log4j2.xml" fi if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then diff --git a/bin/kafka-server-start.sh b/bin/kafka-server-start.sh index 831d07b115bdb..fb54a15cdcead 100755 --- a/bin/kafka-server-start.sh +++ b/bin/kafka-server-start.sh @@ -21,9 +21,15 @@ then fi base_dir=$(dirname $0) -if [ "x$KAFKA_LOG4J_OPTS" = "x" ]; then - echo "DEPRECATED: using log4j 1.x configuration. To use log4j 2.x configuration, run with: 'export KAFKA_LOG4J_OPTS=\"-Dlog4j.configurationFile=file:$base_dir/../config/log4j2.properties\"'" +if [ -f "$base_dir/../config/log4j.properties" ]; then + echo DEPRECATED: Using Log4j 1.x configuration file \$KAFKA_HOME/config/log4j.properties >&2 + echo To use a Log4j 2.x configuration, create a \$KAFKA_HOME/config/log4j2.xml file and remove the Log4j 1.x configration. >&2 + echo See https://logging.apache.org/log4j/2.x/migrate-from-log4j1.html#Log4j2ConfigurationFormat for details about Log4j configuration file migration. >&2 export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/log4j.properties" +elif [ -f "$base_dir/../config/log4j2.properties" ]; then + export KAFKA_LOG4J_OPTS="-Dlog4j2.configurationFile=$base_dir/../config/log4j2.properties" +elif [ -f "$base_dir/../config/log4j2.xml" ]; then + export KAFKA_LOG4J_OPTS="-Dlog4j2.configurationFile=$base_dir/../config/log4j2.xml" fi if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then diff --git a/bin/windows/connect-distributed.bat b/bin/windows/connect-distributed.bat index cef0306b2bb6a..9e9a07ebe1fe6 100644 --- a/bin/windows/connect-distributed.bat +++ b/bin/windows/connect-distributed.bat @@ -26,9 +26,13 @@ set BASE_DIR=%CD% popd rem Log4j settings -IF ["%KAFKA_LOG4J_OPTS%"] EQU [""] ( +IF EXIST "%BASE_DIR%/config/connect-log4j.properties" ( echo DEPRECATED: using log4j 1.x configuration. To use log4j 2.x configuration, run with: 'set KAFKA_LOG4J_OPTS=-Dlog4j.configurationFile=file:%BASE_DIR%/config/connect-log4j2.properties' set KAFKA_LOG4J_OPTS=-Dlog4j.configuration=file:%BASE_DIR%/config/connect-log4j.properties +) ELSE IF EXIST "%BASE_DIR%/config/connect-log4j2.properties" ( + set KAFKA_LOG4J_OPTS=-Dlog4j2.configurationFile=%BASE_DIR%/config/connect-log4j2.properties +) ELSE IF EXIST "%BASE_DIR%/config/connect-log4j2.xml" ( + set KAFKA_LOG4J_OPTS=-Dlog4j2.configurationFile=%BASE_DIR%/config/connect-log4j2.xml ) "%~dp0kafka-run-class.bat" org.apache.kafka.connect.cli.ConnectDistributed %* diff --git a/bin/windows/connect-standalone.bat b/bin/windows/connect-standalone.bat index 8c1d19ee5ccd4..167bfe65731f3 100644 --- a/bin/windows/connect-standalone.bat +++ b/bin/windows/connect-standalone.bat @@ -26,9 +26,13 @@ set BASE_DIR=%CD% popd rem Log4j settings -IF ["%KAFKA_LOG4J_OPTS%"] EQU [""] ( +IF EXIST "%BASE_DIR%/config/connect-log4j.properties" ( echo DEPRECATED: using log4j 1.x configuration. To use log4j 2.x configuration, run with: 'set KAFKA_LOG4J_OPTS=-Dlog4j.configurationFile=file:%BASE_DIR%/config/connect-log4j2.properties' - set KAFKA_LOG4J_OPTS=-Dlog4j.configuration=file:%BASE_DIR%/config/connect-log4j.properties + set KAFKA_LOG4J_OPTS=-Dlog4j.configuration=file:%BASE_DIR%/config/connect-log4j.properties +) ELSE IF EXIST "%BASE_DIR%/config/connect-log4j2.properties" ( + set KAFKA_LOG4J_OPTS=-Dlog4j2.configurationFile=%BASE_DIR%/config/connect-log4j2.properties +) ELSE IF EXIST "%BASE_DIR%/config/connect-log4j2.xml" ( + set KAFKA_LOG4J_OPTS=-Dlog4j2.configurationFile=%BASE_DIR%/config/connect-log4j2.xml ) "%~dp0kafka-run-class.bat" org.apache.kafka.connect.cli.ConnectStandalone %* diff --git a/bin/windows/kafka-server-start.bat b/bin/windows/kafka-server-start.bat index b39c840056abd..f9b9661badf18 100644 --- a/bin/windows/kafka-server-start.bat +++ b/bin/windows/kafka-server-start.bat @@ -20,9 +20,13 @@ IF [%1] EQU [] ( ) SetLocal -IF ["%KAFKA_LOG4J_OPTS%"] EQU [""] ( +IF EXIST "%~dp0../../config/log4j.properties" ( echo DEPRECATED: using log4j 1.x configuration. To use log4j 2.x configuration, run with: 'set KAFKA_LOG4J_OPTS=-Dlog4j.configurationFile=file:%~dp0../../config/log4j2.properties' set KAFKA_LOG4J_OPTS=-Dlog4j.configuration=file:%~dp0../../config/log4j.properties +) ELSE IF EXIST "%~dp0../../config/log4j2.properties" ( + set KAFKA_LOG4J_OPTS=-Dlog4j2.configurationFile=%~dp0../../config/log4j2.properties +) ELSE IF EXIST "%~dp0../../config/log4j2.xml" ( + set KAFKA_LOG4J_OPTS=-Dlog4j2.configurationFile=%~dp0../../config/log4j2.xml ) IF ["%KAFKA_HEAP_OPTS%"] EQU [""] ( rem detect OS architecture diff --git a/bin/windows/zookeeper-server-start.bat b/bin/windows/zookeeper-server-start.bat index ad42edf1df9da..465e5fe1042d5 100644 --- a/bin/windows/zookeeper-server-start.bat +++ b/bin/windows/zookeeper-server-start.bat @@ -19,12 +19,14 @@ IF [%1] EQU [] ( EXIT /B 1 ) -echo Running with log4j 2.x - Log4j MBean registration is not supported. - SetLocal -IF ["%KAFKA_LOG4J_OPTS%"] EQU [""] ( +IF EXIST "%~dp0../../config/log4j.properties" ( echo DEPRECATED: using log4j 1.x configuration. To use log4j 2.x configuration, run with: 'set KAFKA_LOG4J_OPTS=-Dlog4j.configurationFile=file:%~dp0../../config/log4j2.properties' set KAFKA_LOG4J_OPTS=-Dlog4j.configuration=file:%~dp0../../config/log4j.properties +) ELSE IF EXIST "%~dp0../../config/log4j2.properties" ( + set KAFKA_LOG4J_OPTS=-Dlog4j2.configurationFile=%~dp0../../config/log4j2.properties +) ELSE IF EXIST "%~dp0../../config/log4j2.xml" ( + set KAFKA_LOG4J_OPTS=-Dlog4j2.configurationFile=%~dp0../../config/log4j2.xml ) IF ["%KAFKA_HEAP_OPTS%"] EQU [""] ( set KAFKA_HEAP_OPTS=-Xmx512M -Xms512M diff --git a/bin/zookeeper-server-start.sh b/bin/zookeeper-server-start.sh index 3d45d8b7f85d6..3dfbb491dea79 100755 --- a/bin/zookeeper-server-start.sh +++ b/bin/zookeeper-server-start.sh @@ -26,8 +26,6 @@ if [ "x$KAFKA_LOG4J_OPTS" = "x" ]; then export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/log4j.properties" fi -echo "Running with log4j 2.x - Log4j MBean registration is not supported." - if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then export KAFKA_HEAP_OPTS="-Xmx512M -Xms512M" fi From b301c921f75788ba9b566b5b2609cf8f1e2bc580 Mon Sep 17 00:00:00 2001 From: frankvicky Date: Sun, 3 Nov 2024 19:16:06 +0800 Subject: [PATCH 15/46] WIP --- build.gradle | 4 +- .../src/test/resources/log4j.properties | 37 ----------- .../src/test/resources/log4j2.properties | 64 +++++++++++++++++++ 3 files changed, 67 insertions(+), 38 deletions(-) delete mode 100644 streams/integration-tests/src/test/resources/log4j.properties create mode 100644 streams/integration-tests/src/test/resources/log4j2.properties diff --git a/build.gradle b/build.gradle index a261056d56c95..6de3d9d7cf33f 100644 --- a/build.gradle +++ b/build.gradle @@ -2875,7 +2875,9 @@ project(':streams:integration-tests') { testImplementation libs.junitJupiter testImplementation libs.junitPlatformSuiteEngine // supports suite test testImplementation libs.mockitoCore - testImplementation libs.reload4j + testImplementation libs.log4j2Api + testImplementation libs.log4j2Core + testImplementation libs.log4j1Bridge2Api testImplementation libs.slf4jApi testImplementation project(':streams:test-utils') diff --git a/streams/integration-tests/src/test/resources/log4j.properties b/streams/integration-tests/src/test/resources/log4j.properties deleted file mode 100644 index b7e1fb2d60ea4..0000000000000 --- a/streams/integration-tests/src/test/resources/log4j.properties +++ /dev/null @@ -1,37 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -log4j.rootLogger=INFO, stdout - -log4j.appender.stdout=org.apache.log4j.ConsoleAppender -log4j.appender.stdout.layout=org.apache.log4j.PatternLayout -log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c:%L)%n - -log4j.logger.kafka=ERROR -log4j.logger.state.change.logger=ERROR -log4j.logger.org.apache.kafka=ERROR -log4j.logger.org.apache.zookeeper=ERROR -log4j.logger.org.apache.kafka.clients=ERROR - -# These are the only logs we will likely ever find anything useful in to debug Streams test failures -log4j.logger.org.apache.kafka.clients.consumer=INFO -log4j.logger.org.apache.kafka.clients.producer=INFO -log4j.logger.org.apache.kafka.streams=INFO - -# printing out the configs takes up a huge amount of the allotted characters, -# and provides little value as we can always figure out the test configs without the logs -log4j.logger.org.apache.kafka.clients.producer.ProducerConfig=ERROR -log4j.logger.org.apache.kafka.clients.consumer.ConsumerConfig=ERROR -log4j.logger.org.apache.kafka.clients.admin.AdminClientConfig=ERROR -log4j.logger.org.apache.kafka.streams.StreamsConfig=ERROR diff --git a/streams/integration-tests/src/test/resources/log4j2.properties b/streams/integration-tests/src/test/resources/log4j2.properties new file mode 100644 index 0000000000000..c2d42f2a96bae --- /dev/null +++ b/streams/integration-tests/src/test/resources/log4j2.properties @@ -0,0 +1,64 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# Appender Configuration +name=TestConfig +appenders=console + +appender.console.type=Console +appender.console.name=STDOUT +appender.console.layout.type=PatternLayout +appender.console.layout.pattern=[%d] %p %m (%c:%L)%n + +rootLogger.level=INFO +rootLogger.appenderRefs=console +rootLogger.appenderRef.stdout.ref=STDOUT + +loggers=kafka,stateChangeLogger,apacheKafka,zookeeper,kafkaClients,consumer,producer,streams,producerConfig,consumerConfig,adminClientConfig,streamsConfig + +logger.kafka.name=kafka +logger.kafka.level=ERROR + +logger.stateChangeLogger.name=state.change.logger +logger.stateChangeLogger.level=ERROR + +logger.apacheKafka.name=org.apache.kafka +logger.apacheKafka.level=ERROR + +logger.zookeeper.name=org.apache.zookeeper +logger.zookeeper.level=ERROR + +logger.kafkaClients.name=org.apache.kafka.clients +logger.kafkaClients.level=ERROR + +logger.consumer.name=org.apache.kafka.clients.consumer +logger.consumer.level=INFO + +logger.producer.name=org.apache.kafka.clients.producer +logger.producer.level=INFO + +logger.streams.name=org.apache.kafka.streams +logger.streams.level=INFO + +logger.producerConfig.name=org.apache.kafka.clients.producer.ProducerConfig +logger.producerConfig.level=ERROR + +logger.consumerConfig.name=org.apache.kafka.clients.consumer.ConsumerConfig +logger.consumerConfig.level=ERROR + +logger.adminClientConfig.name=org.apache.kafka.clients.admin.AdminClientConfig +logger.adminClientConfig.level=ERROR + +logger.streamsConfig.name=org.apache.kafka.streams.StreamsConfig +logger.streamsConfig.level=ERROR From c00618a4b16ed6f3a3420c424fdbc64025258efd Mon Sep 17 00:00:00 2001 From: TengYao Chi Date: Fri, 8 Nov 2024 11:25:31 +0800 Subject: [PATCH 16/46] Update LogEvent to immutable Co-authored-by: Piotr P. Karwasz --- .../java/org/apache/kafka/common/utils/LogCaptureAppender.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/clients/src/test/java/org/apache/kafka/common/utils/LogCaptureAppender.java b/clients/src/test/java/org/apache/kafka/common/utils/LogCaptureAppender.java index 9126e7f9934b9..2df74c8681c59 100644 --- a/clients/src/test/java/org/apache/kafka/common/utils/LogCaptureAppender.java +++ b/clients/src/test/java/org/apache/kafka/common/utils/LogCaptureAppender.java @@ -126,7 +126,7 @@ public void setClassLogger(final Class clazz, Level level) { @Override public void append(final LogEvent event) { synchronized (events) { - events.add(event); + events.add(event.toImmutable()); } } From 74085893a62c7d86e41bf825339fee8f9786cef8 Mon Sep 17 00:00:00 2001 From: frankvicky Date: Sun, 10 Nov 2024 14:07:29 +0800 Subject: [PATCH 17/46] KAFKA-9366: Resolve conflicts --- .../integration/kafka/api/PlaintextAdminIntegrationTest.scala | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/core/src/test/scala/integration/kafka/api/PlaintextAdminIntegrationTest.scala b/core/src/test/scala/integration/kafka/api/PlaintextAdminIntegrationTest.scala index 609ebf76630b3..f5ff058ad1f39 100644 --- a/core/src/test/scala/integration/kafka/api/PlaintextAdminIntegrationTest.scala +++ b/core/src/test/scala/integration/kafka/api/PlaintextAdminIntegrationTest.scala @@ -52,11 +52,12 @@ import org.apache.kafka.security.authorizer.AclEntry import org.apache.kafka.server.config.{QuotaConfig, ServerConfigs, ServerLogConfigs} import org.apache.kafka.storage.internals.log.{CleanerConfig, LogConfig} import org.apache.kafka.test.TestUtils.DEFAULT_MAX_WAIT_MS +import org.apache.log4j.PropertyConfigurator import org.apache.logging.log4j.core.config.Configurator import org.junit.jupiter.api.Assertions._ import org.junit.jupiter.api.{BeforeEach, TestInfo, Timeout} import org.junit.jupiter.params.ParameterizedTest -import org.junit.jupiter.params.provider.{MethodSource, ValueSource} +import org.junit.jupiter.params.provider.{Arguments, MethodSource, ValueSource} import org.slf4j.LoggerFactory import java.util.AbstractMap.SimpleImmutableEntry From 97509f402c2a92c57a140e9831405a06dcf377e0 Mon Sep 17 00:00:00 2001 From: frankvicky Date: Thu, 14 Nov 2024 11:35:57 +0800 Subject: [PATCH 18/46] Reslove Conflicts --- core/src/test/resources/log4j2.properties | 7 +------ group-coordinator/src/test/resources/log4j2.properties | 5 +---- metadata/src/test/resources/log4j2.properties | 5 +---- .../integration-tests/src/test/resources/log4j2.properties | 5 +---- streams/src/test/resources/log4j2.properties | 5 +---- 5 files changed, 5 insertions(+), 22 deletions(-) diff --git a/core/src/test/resources/log4j2.properties b/core/src/test/resources/log4j2.properties index a0cc7cfffa392..18f04dd600475 100644 --- a/core/src/test/resources/log4j2.properties +++ b/core/src/test/resources/log4j2.properties @@ -15,7 +15,6 @@ # Root logger configuration name=TestConfig - # Appender configuration appender=console appender.console.type=Console @@ -27,7 +26,7 @@ rootLogger.level=OFF rootLogger.appenderRefs=console rootLogger.appenderRef.console.ref=STDOUT -loggers=kafka,apacheKafka,zookeeper +loggers=kafka,apacheKafka # Logger configurations logger.kafka.name=kafka @@ -35,7 +34,3 @@ logger.kafka.level=WARN logger.apacheKafka.name=org.apache.kafka logger.apacheKafka.level=WARN - -# zkclient can be verbose, during debugging it is common to adjust it separately -logger.zookeeper.name=org.apache.zookeeper -logger.zookeeper.level=WARN diff --git a/group-coordinator/src/test/resources/log4j2.properties b/group-coordinator/src/test/resources/log4j2.properties index 2d5b98a58e64f..0284fc8354190 100644 --- a/group-coordinator/src/test/resources/log4j2.properties +++ b/group-coordinator/src/test/resources/log4j2.properties @@ -24,10 +24,7 @@ rootLogger.level=DEBUG rootLogger.appenderRefs=console rootLogger.appenderRef.console.ref=STDOUT -loggers=kafka,zookeeper +loggers=kafka logger.kafka.name=org.apache.kafka logger.kafka.level=DEBUG - -logger.zookeeper.name=org.apache.zookeeper -logger.zookeeper.level=WARN diff --git a/metadata/src/test/resources/log4j2.properties b/metadata/src/test/resources/log4j2.properties index 2d5b98a58e64f..0284fc8354190 100644 --- a/metadata/src/test/resources/log4j2.properties +++ b/metadata/src/test/resources/log4j2.properties @@ -24,10 +24,7 @@ rootLogger.level=DEBUG rootLogger.appenderRefs=console rootLogger.appenderRef.console.ref=STDOUT -loggers=kafka,zookeeper +loggers=kafka logger.kafka.name=org.apache.kafka logger.kafka.level=DEBUG - -logger.zookeeper.name=org.apache.zookeeper -logger.zookeeper.level=WARN diff --git a/streams/integration-tests/src/test/resources/log4j2.properties b/streams/integration-tests/src/test/resources/log4j2.properties index c2d42f2a96bae..aaf3b14f667e7 100644 --- a/streams/integration-tests/src/test/resources/log4j2.properties +++ b/streams/integration-tests/src/test/resources/log4j2.properties @@ -25,7 +25,7 @@ rootLogger.level=INFO rootLogger.appenderRefs=console rootLogger.appenderRef.stdout.ref=STDOUT -loggers=kafka,stateChangeLogger,apacheKafka,zookeeper,kafkaClients,consumer,producer,streams,producerConfig,consumerConfig,adminClientConfig,streamsConfig +loggers=kafka,stateChangeLogger,apacheKafka,kafkaClients,consumer,producer,streams,producerConfig,consumerConfig,adminClientConfig,streamsConfig logger.kafka.name=kafka logger.kafka.level=ERROR @@ -36,9 +36,6 @@ logger.stateChangeLogger.level=ERROR logger.apacheKafka.name=org.apache.kafka logger.apacheKafka.level=ERROR -logger.zookeeper.name=org.apache.zookeeper -logger.zookeeper.level=ERROR - logger.kafkaClients.name=org.apache.kafka.clients logger.kafkaClients.level=ERROR diff --git a/streams/src/test/resources/log4j2.properties b/streams/src/test/resources/log4j2.properties index c2d42f2a96bae..aaf3b14f667e7 100644 --- a/streams/src/test/resources/log4j2.properties +++ b/streams/src/test/resources/log4j2.properties @@ -25,7 +25,7 @@ rootLogger.level=INFO rootLogger.appenderRefs=console rootLogger.appenderRef.stdout.ref=STDOUT -loggers=kafka,stateChangeLogger,apacheKafka,zookeeper,kafkaClients,consumer,producer,streams,producerConfig,consumerConfig,adminClientConfig,streamsConfig +loggers=kafka,stateChangeLogger,apacheKafka,kafkaClients,consumer,producer,streams,producerConfig,consumerConfig,adminClientConfig,streamsConfig logger.kafka.name=kafka logger.kafka.level=ERROR @@ -36,9 +36,6 @@ logger.stateChangeLogger.level=ERROR logger.apacheKafka.name=org.apache.kafka logger.apacheKafka.level=ERROR -logger.zookeeper.name=org.apache.zookeeper -logger.zookeeper.level=ERROR - logger.kafkaClients.name=org.apache.kafka.clients logger.kafkaClients.level=ERROR From 5f5f0ae2aebb3a3c9d32571e818202af391b55cb Mon Sep 17 00:00:00 2001 From: frankvicky Date: Sat, 16 Nov 2024 00:38:15 +0800 Subject: [PATCH 19/46] KAFKA-9366: Address comments --- README.md | 2 +- bin/connect-distributed.sh | 4 ++-- bin/connect-mirror-maker.sh | 4 ++-- bin/connect-standalone.sh | 4 ++-- bin/kafka-server-start.sh | 4 ++-- bin/windows/connect-distributed.bat | 4 +++- bin/windows/connect-standalone.bat | 4 +++- bin/windows/kafka-server-start.bat | 4 +++- build.gradle | 3 ++- config/connect-log4j2.properties | 2 +- config/log4j2.properties | 7 +----- .../kafka/connect/runtime/LoggersTest.java | 2 +- .../src/test/resources/log4j2.properties | 1 - .../api/PlaintextAdminIntegrationTest.scala | 9 +------- .../test/scala/other/kafka.log4j.properties | 22 ------------------- .../unit/kafka/network/SocketServerTest.scala | 4 ++-- gradle/dependencies.gradle | 16 +++++++------- .../templates/tools_log4j.properties | 2 +- 18 files changed, 35 insertions(+), 63 deletions(-) delete mode 100644 core/src/test/scala/other/kafka.log4j.properties diff --git a/README.md b/README.md index ef2f7c9ad267f..274a1a9d1965e 100644 --- a/README.md +++ b/README.md @@ -54,7 +54,7 @@ Follow instructions in https://kafka.apache.org/quickstart ### Running a particular unit/integration test with log4j output ### By default, there will be only small number of logs output while testing. You can adjust it by changing the `log4j2.properties` file in the module's `src/test/resources` directory. -For example, if you want to see more logs for clients project tests, you can modify [the line](https://github.com/apache/kafka/blob/trunk/clients/src/test/resources/log4j.properties#L21) in `clients/src/test/resources/log4j2.properties` +For example, if you want to see more logs for clients project tests, you can modify [the line](https://github.com/apache/kafka/blob/trunk/clients/src/test/resources/log4j2.properties#L21) in `clients/src/test/resources/log4j2.properties` to `log4j.logger.org.apache.kafka=INFO` and then run: ./gradlew cleanTest clients:test --tests NetworkClientTest diff --git a/bin/connect-distributed.sh b/bin/connect-distributed.sh index fff5e9595e2e2..3cfc07928f1df 100755 --- a/bin/connect-distributed.sh +++ b/bin/connect-distributed.sh @@ -24,8 +24,8 @@ base_dir=$(dirname $0) if [ -f "$base_dir/../config/connect-log4j.properties" ]; then echo DEPRECATED: Using Log4j 1.x configuration file \$KAFKA_HOME/config/connect-log4j.properties >&2 - echo To use a Log4j 2.x configuration, create a \$KAFKA_HOME/config/log4j2.xml file and remove the Log4j 1.x configration. >&2 - echo See https://logging.apache.org/log4j/2.x/migrate-from-log4j1.html#Log4j2ConfigurationFormat for details about Log4j configuration file migration. >&2 + echo To use a Log4j 2.x configuration, please see https://logging.apache.org/log4j/2.x/migrate-from-log4j1.html#Log4j2ConfigurationFormat for details about Log4j configuration file migration. >&2 + echo You can also use the \$KAFKA_HOME/config/connect-log4j2.properties file as a starting point. Make sure to remove the Log4j 1.x configuration after completing the migration. >&2 export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/connect-log4j.properties" elif [ -f "$base_dir/../config/connect-log4j2.properties" ]; then export KAFKA_LOG4J_OPTS="-Dlog4j2.configurationFile=$base_dir/../config/connect-log4j2.properties" diff --git a/bin/connect-mirror-maker.sh b/bin/connect-mirror-maker.sh index 56368d61391e2..65062fabc0f8c 100755 --- a/bin/connect-mirror-maker.sh +++ b/bin/connect-mirror-maker.sh @@ -24,8 +24,8 @@ base_dir=$(dirname $0) if [ -f "$base_dir/../config/connect-log4j.properties" ]; then echo DEPRECATED: Using Log4j 1.x configuration file \$KAFKA_HOME/config/connect-log4j.properties >&2 - echo To use a Log4j 2.x configuration, create a \$KAFKA_HOME/config/log4j2.xml file and remove the Log4j 1.x configration. >&2 - echo See https://logging.apache.org/log4j/2.x/migrate-from-log4j1.html#Log4j2ConfigurationFormat for details about Log4j configuration file migration. >&2 + echo To use a Log4j 2.x configuration, please see https://logging.apache.org/log4j/2.x/migrate-from-log4j1.html#Log4j2ConfigurationFormat for details about Log4j configuration file migration. >&2 + echo You can also use the \$KAFKA_HOME/config/connect-log4j2.properties file as a starting point. Make sure to remove the Log4j 1.x configuration after completing the migration. >&2 export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/connect-log4j.properties" elif [ -f "$base_dir/../config/connect-log4j2.properties" ]; then export KAFKA_LOG4J_OPTS="-Dlog4j2.configurationFile=$base_dir/../config/connect-log4j2.properties" diff --git a/bin/connect-standalone.sh b/bin/connect-standalone.sh index a1d648dc1e129..d2203426045c6 100755 --- a/bin/connect-standalone.sh +++ b/bin/connect-standalone.sh @@ -24,8 +24,8 @@ base_dir=$(dirname $0) if [ -f "$base_dir/../config/connect-log4j.properties" ]; then echo DEPRECATED: Using Log4j 1.x configuration file \$KAFKA_HOME/config/connect-log4j.properties >&2 - echo To use a Log4j 2.x configuration, create a \$KAFKA_HOME/config/log4j2.xml file and remove the Log4j 1.x configration. >&2 - echo See https://logging.apache.org/log4j/2.x/migrate-from-log4j1.html#Log4j2ConfigurationFormat for details about Log4j configuration file migration. >&2 + echo To use a Log4j 2.x configuration, please see https://logging.apache.org/log4j/2.x/migrate-from-log4j1.html#Log4j2ConfigurationFormat for details about Log4j configuration file migration. >&2 + echo You can also use the \$KAFKA_HOME/config/connect-log4j2.properties file as a starting point. Make sure to remove the Log4j 1.x configuration after completing the migration. >&2 export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/connect-log4j.properties" elif [ -f "$base_dir/../config/connect-log4j2.properties" ]; then export KAFKA_LOG4J_OPTS="-Dlog4j2.configurationFile=$base_dir/../config/connect-log4j2.properties" diff --git a/bin/kafka-server-start.sh b/bin/kafka-server-start.sh index fb54a15cdcead..4fb367e1abf09 100755 --- a/bin/kafka-server-start.sh +++ b/bin/kafka-server-start.sh @@ -23,8 +23,8 @@ base_dir=$(dirname $0) if [ -f "$base_dir/../config/log4j.properties" ]; then echo DEPRECATED: Using Log4j 1.x configuration file \$KAFKA_HOME/config/log4j.properties >&2 - echo To use a Log4j 2.x configuration, create a \$KAFKA_HOME/config/log4j2.xml file and remove the Log4j 1.x configration. >&2 - echo See https://logging.apache.org/log4j/2.x/migrate-from-log4j1.html#Log4j2ConfigurationFormat for details about Log4j configuration file migration. >&2 + echo To use a Log4j 2.x configuration, please see https://logging.apache.org/log4j/2.x/migrate-from-log4j1.html#Log4j2ConfigurationFormat for details about Log4j configuration file migration. >&2 + echo You can also use the \$KAFKA_HOME/config/log4j2.properties file as a starting point. Make sure to remove the Log4j 1.x configuration after completing the migration. >&2 export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/log4j.properties" elif [ -f "$base_dir/../config/log4j2.properties" ]; then export KAFKA_LOG4J_OPTS="-Dlog4j2.configurationFile=$base_dir/../config/log4j2.properties" diff --git a/bin/windows/connect-distributed.bat b/bin/windows/connect-distributed.bat index 9e9a07ebe1fe6..efd1d671c0bec 100644 --- a/bin/windows/connect-distributed.bat +++ b/bin/windows/connect-distributed.bat @@ -27,7 +27,9 @@ popd rem Log4j settings IF EXIST "%BASE_DIR%/config/connect-log4j.properties" ( - echo DEPRECATED: using log4j 1.x configuration. To use log4j 2.x configuration, run with: 'set KAFKA_LOG4J_OPTS=-Dlog4j.configurationFile=file:%BASE_DIR%/config/connect-log4j2.properties' + echo DEPRECATED: Using Log4j 1.x configuration file %BASE_DIR%/config/connect-log4j.properties + echo To use a Log4j 2.x configuration, please see https://logging.apache.org/log4j/2.x/migrate-from-log4j1.html#Log4j2ConfigurationFormat for details about Log4j configuration file migration. + echo You can also use the %BASE_DIR%/config/connect-log4j2.properties file as a starting point. Make sure to remove the Log4j 1.x configuration after completing the migration. set KAFKA_LOG4J_OPTS=-Dlog4j.configuration=file:%BASE_DIR%/config/connect-log4j.properties ) ELSE IF EXIST "%BASE_DIR%/config/connect-log4j2.properties" ( set KAFKA_LOG4J_OPTS=-Dlog4j2.configurationFile=%BASE_DIR%/config/connect-log4j2.properties diff --git a/bin/windows/connect-standalone.bat b/bin/windows/connect-standalone.bat index 167bfe65731f3..0dacc4e6baf84 100644 --- a/bin/windows/connect-standalone.bat +++ b/bin/windows/connect-standalone.bat @@ -27,7 +27,9 @@ popd rem Log4j settings IF EXIST "%BASE_DIR%/config/connect-log4j.properties" ( - echo DEPRECATED: using log4j 1.x configuration. To use log4j 2.x configuration, run with: 'set KAFKA_LOG4J_OPTS=-Dlog4j.configurationFile=file:%BASE_DIR%/config/connect-log4j2.properties' + echo DEPRECATED: Using Log4j 1.x configuration file %BASE_DIR%/config/connect-log4j.properties + echo To use a Log4j 2.x configuration, please see https://logging.apache.org/log4j/2.x/migrate-from-log4j1.html#Log4j2ConfigurationFormat for details about Log4j configuration file migration. + echo You can also use the %BASE_DIR%/config/connect-log4j2.properties file as a starting point. Make sure to remove the Log4j 1.x configuration after completing the migration. set KAFKA_LOG4J_OPTS=-Dlog4j.configuration=file:%BASE_DIR%/config/connect-log4j.properties ) ELSE IF EXIST "%BASE_DIR%/config/connect-log4j2.properties" ( set KAFKA_LOG4J_OPTS=-Dlog4j2.configurationFile=%BASE_DIR%/config/connect-log4j2.properties diff --git a/bin/windows/kafka-server-start.bat b/bin/windows/kafka-server-start.bat index f9b9661badf18..61c89dfb7487e 100644 --- a/bin/windows/kafka-server-start.bat +++ b/bin/windows/kafka-server-start.bat @@ -21,7 +21,9 @@ IF [%1] EQU [] ( SetLocal IF EXIST "%~dp0../../config/log4j.properties" ( - echo DEPRECATED: using log4j 1.x configuration. To use log4j 2.x configuration, run with: 'set KAFKA_LOG4J_OPTS=-Dlog4j.configurationFile=file:%~dp0../../config/log4j2.properties' + echo DEPRECATED: Using Log4j 1.x configuration file %~dp0../../config/log4j.properties + echo To use a Log4j 2.x configuration, please see https://logging.apache.org/log4j/2.x/migrate-from-log4j1.html#Log4j2ConfigurationFormat for details about Log4j configuration file migration. + echo You can also use the %~dp0../../config/log4j2.properties file as a starting point. Make sure to remove the Log4j 1.x configuration after completing the migration. set KAFKA_LOG4J_OPTS=-Dlog4j.configuration=file:%~dp0../../config/log4j.properties ) ELSE IF EXIST "%~dp0../../config/log4j2.properties" ( set KAFKA_LOG4J_OPTS=-Dlog4j2.configurationFile=%~dp0../../config/log4j2.properties diff --git a/build.gradle b/build.gradle index 0e3939b658a92..1580b4caabf29 100644 --- a/build.gradle +++ b/build.gradle @@ -2398,12 +2398,13 @@ project(':tools') { implementation libs.log4j2Api implementation libs.log4j2Core implementation libs.log4j1Bridge2Api - implementation libs.spotbugs implementation libs.joptSimple implementation libs.jose4j // for SASL/OAUTHBEARER JWT validation implementation libs.jacksonJaxrsJsonProvider + compileOnly libs.spotbugs + testImplementation project(':clients') testImplementation project(':clients').sourceSets.test.output testImplementation project(':server') diff --git a/config/connect-log4j2.properties b/config/connect-log4j2.properties index 8e57126162d46..6de453777423c 100644 --- a/config/connect-log4j2.properties +++ b/config/connect-log4j2.properties @@ -13,7 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -name=ConfigConnectConfig +name=ConnectConfig appenders=console, file diff --git a/config/log4j2.properties b/config/log4j2.properties index 0123b460da099..3e8601d5f4486 100644 --- a/config/log4j2.properties +++ b/config/log4j2.properties @@ -94,11 +94,7 @@ rootLogger.appenderRefs=stdout,kafkaAppender rootLogger.appenderRef.stdout.ref=STDOUT rootLogger.appenderRef.kafkaAppender.ref=KafkaAppender -loggers=zookeeper,kafka,apacheKafka,requestLogger,networkRequestChannel,apacheKafkaController,kafkaController,logCleaner,stateChangeLogger,authorizerLogger - -# Zookeeper logger -logger.zookeeper.name=org.apache.zookeeper -logger.zookeeper.level=INFO +loggers=kafka,apacheKafka,requestLogger,networkRequestChannel,apacheKafkaController,kafkaController,logCleaner,stateChangeLogger,authorizerLogger # Kafka logger logger.kafka.name=kafka @@ -117,7 +113,6 @@ logger.requestLogger.appenderRef.requestAppender.ref=RequestAppender # Uncomment the lines below and change log4j.logger.kafka.network.RequestChannel$ to TRACE for additional output # related to the handling of requests -#loggers=zookeeper,kafka,apacheKafka,requestLogger,networkRequestChannel,apacheKafkaController,kafkaController,logCleaner,stateChangeLogger,authorizerLogger,networkProcessor,serverKafkaApis #logger.networkProcessor.name=kafka.network.Processor #logger.networkProcessor.level=TRACE #logger.networkProcessor.appenderRef.requestAppender.ref=RequestAppender diff --git a/connect/runtime/src/test/java/org/apache/kafka/connect/runtime/LoggersTest.java b/connect/runtime/src/test/java/org/apache/kafka/connect/runtime/LoggersTest.java index 6907190069414..32d520e41737c 100644 --- a/connect/runtime/src/test/java/org/apache/kafka/connect/runtime/LoggersTest.java +++ b/connect/runtime/src/test/java/org/apache/kafka/connect/runtime/LoggersTest.java @@ -263,4 +263,4 @@ Logger rootLogger() { return rootLogger; } } -} \ No newline at end of file +} diff --git a/connect/runtime/src/test/resources/log4j2.properties b/connect/runtime/src/test/resources/log4j2.properties index ba84db34499ca..462b9d7e48132 100644 --- a/connect/runtime/src/test/resources/log4j2.properties +++ b/connect/runtime/src/test/resources/log4j2.properties @@ -45,6 +45,5 @@ logger.kafkaConnect.level=DEBUG # Troubleshooting KAFKA-17493. logger.kafkaConsumer.name=org.apache.kafka.consumer logger.kafkaConsumer.level=DEBUG - logger.coordinatorGroup.name=org.apache.kafka.coordinator.group logger.coordinatorGroup.level=DEBUG diff --git a/core/src/test/scala/integration/kafka/api/PlaintextAdminIntegrationTest.scala b/core/src/test/scala/integration/kafka/api/PlaintextAdminIntegrationTest.scala index f5ff058ad1f39..eb9fbc8bdec55 100644 --- a/core/src/test/scala/integration/kafka/api/PlaintextAdminIntegrationTest.scala +++ b/core/src/test/scala/integration/kafka/api/PlaintextAdminIntegrationTest.scala @@ -57,7 +57,7 @@ import org.apache.logging.log4j.core.config.Configurator import org.junit.jupiter.api.Assertions._ import org.junit.jupiter.api.{BeforeEach, TestInfo, Timeout} import org.junit.jupiter.params.ParameterizedTest -import org.junit.jupiter.params.provider.{Arguments, MethodSource, ValueSource} +import org.junit.jupiter.params.provider.{MethodSource, ValueSource} import org.slf4j.LoggerFactory import java.util.AbstractMap.SimpleImmutableEntry @@ -3584,13 +3584,6 @@ object PlaintextAdminIntegrationTest { assertEquals(LogConfig.DEFAULT_COMPRESSION_TYPE, configs.get(brokerResource).get(ServerConfigs.COMPRESSION_TYPE_CONFIG).value) } - def getTestQuorumAndGroupProtocolParametersAll() : java.util.stream.Stream[Arguments] = { - util.Arrays.stream(Array( - Arguments.of("kraft", "classic"), - Arguments.of("kraft", "consumer") - )) - } - /** * Resets the logging configuration after the test. */ diff --git a/core/src/test/scala/other/kafka.log4j.properties b/core/src/test/scala/other/kafka.log4j.properties deleted file mode 100644 index 35a2b23796a4e..0000000000000 --- a/core/src/test/scala/other/kafka.log4j.properties +++ /dev/null @@ -1,22 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -log4j.rootLogger=INFO, KAFKA - -log4j.appender.KAFKA=kafka.log4j.KafkaAppender - -log4j.appender.KAFKA.Port=9092 -log4j.appender.KAFKA.Host=localhost -log4j.appender.KAFKA.Topic=test-logger -log4j.appender.KAFKA.Serializer=kafka.AppenderStringSerializer \ No newline at end of file diff --git a/core/src/test/scala/unit/kafka/network/SocketServerTest.scala b/core/src/test/scala/unit/kafka/network/SocketServerTest.scala index 6c9083a7b6570..362796381c29b 100644 --- a/core/src/test/scala/unit/kafka/network/SocketServerTest.scala +++ b/core/src/test/scala/unit/kafka/network/SocketServerTest.scala @@ -45,7 +45,7 @@ import org.apache.kafka.server.metrics.KafkaYammerMetrics import org.apache.kafka.server.network.ConnectionDisconnectListener import org.apache.kafka.server.quota.{ThrottleCallback, ThrottledChannel} import org.apache.kafka.test.{TestSslUtils, TestUtils => JTestUtils} -import org.apache.logging.log4j.Level +import org.apache.logging.log4j.{Level, LogManager} import org.apache.logging.log4j.core.config.Configurator import org.junit.jupiter.api.Assertions._ import org.junit.jupiter.api._ @@ -89,7 +89,7 @@ class SocketServerTest { var server: SocketServer = _ val sockets = new ArrayBuffer[Socket] - private val kafkaLogger = org.apache.logging.log4j.LogManager.getLogger("kafka") + private val kafkaLogger = LogManager.getLogger("kafka") private var logLevelToRestore: Level = _ def endpoint: EndPoint = { KafkaConfig.fromProps(props, doLog = false).dataPlaneListeners.head diff --git a/gradle/dependencies.gradle b/gradle/dependencies.gradle index 0038b69ea0f1c..45e445bb22804 100644 --- a/gradle/dependencies.gradle +++ b/gradle/dependencies.gradle @@ -151,6 +151,7 @@ libs += [ apachedsJdbmPartition: "org.apache.directory.server:apacheds-jdbm-partition:$versions.apacheds", argparse4j: "net.sourceforge.argparse4j:argparse4j:$versions.argparse4j", bcpkix: "org.bouncycastle:bcpkix-jdk18on:$versions.bcpkix", + bndlib:"biz.aQute.bnd:biz.aQute.bndlib:$versions.bndlib", caffeine: "com.github.ben-manes.caffeine:caffeine:$versions.caffeine", classgraph: "io.github.classgraph:classgraph:$versions.classgraph", commonsCli: "commons-cli:commons-cli:$versions.commonsCli", @@ -209,6 +210,10 @@ libs += [ kafkaStreams_36: "org.apache.kafka:kafka-streams:$versions.kafka_36", kafkaStreams_37: "org.apache.kafka:kafka-streams:$versions.kafka_37", kafkaStreams_38: "org.apache.kafka:kafka-streams:$versions.kafka_38", + log4j1Bridge2Api: "org.apache.logging.log4j:log4j-1.2-api:$versions.log4j2", + log4j2Api: "org.apache.logging.log4j:log4j-api:$versions.log4j2", + log4j2Core: "org.apache.logging.log4j:log4j-core:$versions.log4j2", + log4j2CoreTest: "org.apache.logging.log4j:log4j-core-test:$versions.log4j2", lz4: "org.lz4:lz4-java:$versions.lz4", metrics: "com.yammer.metrics:metrics-core:$versions.metrics", dropwizardMetrics: "io.dropwizard.metrics:metrics-core:$versions.dropwizardMetrics", @@ -225,7 +230,9 @@ libs += [ scalaLogging: "com.typesafe.scala-logging:scala-logging_$versions.baseScala:$versions.scalaLogging", scalaReflect: "org.scala-lang:scala-reflect:$versions.scala", slf4jApi: "org.slf4j:slf4j-api:$versions.slf4j", + slf4jLog4j2: "org.apache.logging.log4j:log4j-slf4j-impl:$versions.log4j2", snappy: "org.xerial.snappy:snappy-java:$versions.snappy", + spotbugs: "com.github.spotbugs:spotbugs-annotations:$versions.spotbugs", swaggerAnnotations: "io.swagger.core.v3:swagger-annotations:$swaggerVersion", swaggerJaxrs2: "io.swagger.core.v3:swagger-jaxrs2:$swaggerVersion", zookeeper: "org.apache.zookeeper:zookeeper:$versions.zookeeper", @@ -233,12 +240,5 @@ libs += [ mavenArtifact: "org.apache.maven:maven-artifact:$versions.mavenArtifact", zstd: "com.github.luben:zstd-jni:$versions.zstd", httpclient: "org.apache.httpcomponents:httpclient:$versions.httpclient", - hdrHistogram: "org.hdrhistogram:HdrHistogram:$versions.hdrHistogram", - log4j1Bridge2Api: "org.apache.logging.log4j:log4j-1.2-api:$versions.log4j2", - log4j2Api: "org.apache.logging.log4j:log4j-api:$versions.log4j2", - log4j2Core: "org.apache.logging.log4j:log4j-core:$versions.log4j2", - log4j2CoreTest: "org.apache.logging.log4j:log4j-core-test:$versions.log4j2", - slf4jLog4j2: "org.apache.logging.log4j:log4j-slf4j-impl:$versions.log4j2", - spotbugs: "com.github.spotbugs:spotbugs-annotations:$versions.spotbugs", - bndlib:"biz.aQute.bnd:biz.aQute.bndlib:$versions.bndlib" + hdrHistogram: "org.hdrhistogram:HdrHistogram:$versions.hdrHistogram" ] diff --git a/tests/kafkatest/services/performance/templates/tools_log4j.properties b/tests/kafkatest/services/performance/templates/tools_log4j.properties index 13d7aec28681f..df10d88b8abcc 100644 --- a/tests/kafkatest/services/performance/templates/tools_log4j.properties +++ b/tests/kafkatest/services/performance/templates/tools_log4j.properties @@ -22,4 +22,4 @@ log4j.appender.FILE.ImmediateFlush=true # Set the append to false, overwrite log4j.appender.FILE.Append=false log4j.appender.FILE.layout=org.apache.log4j.PatternLayout -log4j.appender.FILE.layout.conversionPattern=[%d] %p %m (%c)%n \ No newline at end of file +log4j.appender.FILE.layout.conversionPattern=[%d] %p %m (%c)%n From c30b30968c8136b19e4b6cde79eb7d2b6c1293fd Mon Sep 17 00:00:00 2001 From: frankvicky Date: Sat, 16 Nov 2024 12:11:32 +0800 Subject: [PATCH 20/46] KAFKA-9366: Update README --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 274a1a9d1965e..12b637efd290d 100644 --- a/README.md +++ b/README.md @@ -54,7 +54,7 @@ Follow instructions in https://kafka.apache.org/quickstart ### Running a particular unit/integration test with log4j output ### By default, there will be only small number of logs output while testing. You can adjust it by changing the `log4j2.properties` file in the module's `src/test/resources` directory. -For example, if you want to see more logs for clients project tests, you can modify [the line](https://github.com/apache/kafka/blob/trunk/clients/src/test/resources/log4j2.properties#L21) in `clients/src/test/resources/log4j2.properties` +For example, if you want to see more logs for clients project tests, you can modify [the line](https://github.com/apache/kafka/blob/trunk/clients/src/test/resources/log4j2.properties#L32) in `clients/src/test/resources/log4j2.properties` to `log4j.logger.org.apache.kafka=INFO` and then run: ./gradlew cleanTest clients:test --tests NetworkClientTest From 2b6be7e8cf9fd295f81c51f08b5b71c588b2fcdb Mon Sep 17 00:00:00 2001 From: frankvicky Date: Sat, 16 Nov 2024 12:34:34 +0800 Subject: [PATCH 21/46] Update dependencies.gradle --- gradle/dependencies.gradle | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/gradle/dependencies.gradle b/gradle/dependencies.gradle index 960e2761eadad..ff184f6909b44 100644 --- a/gradle/dependencies.gradle +++ b/gradle/dependencies.gradle @@ -54,6 +54,7 @@ versions += [ apacheds: "2.0.0-M24", argparse4j: "0.7.0", bcpkix: "1.78.1", + bndlib: "7.0.0", caffeine: "2.9.3", // 3.x supports JDK 11 and above // when updating checkstyle, check whether the exclusion of // CVE-2023-2976 and CVE-2020-8908 can be dropped from @@ -109,6 +110,7 @@ versions += [ kafka_37: "3.7.1", kafka_38: "3.8.1", kafka_39: "3.9.0", + log4j2: "2.24.1", // When updating lz4 make sure the compression levels in org.apache.kafka.common.record.CompressionType are still valid lz4: "1.8.0", mavenArtifact: "3.9.6", @@ -134,9 +136,7 @@ versions += [ // Also make sure the compression levels in org.apache.kafka.common.record.CompressionType are still valid zstd: "1.5.6-6", junitPlatform: "1.10.2", - hdrHistogram: "2.2.2", - log4j2: "2.24.1", - bndlib: "7.0.0" + hdrHistogram: "2.2.2" ] libs += [ From 168557b1b2e2f6c2cf10440af9d99089b2fbe58a Mon Sep 17 00:00:00 2001 From: frankvicky Date: Sun, 17 Nov 2024 16:23:31 +0800 Subject: [PATCH 22/46] KAFKA-9366: update Loggers.java --- .../apache/kafka/connect/runtime/Loggers.java | 8 ++++++-- .../kafka/connect/runtime/LoggersTest.java | 17 +++++++++-------- 2 files changed, 15 insertions(+), 10 deletions(-) diff --git a/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/Loggers.java b/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/Loggers.java index 952cedf109d06..b6f44fb651075 100644 --- a/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/Loggers.java +++ b/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/Loggers.java @@ -50,9 +50,13 @@ public class Loggers { private static final Logger log = LoggerFactory.getLogger(Loggers.class); /** - * Log4j2 uses "" (empty string) as name of the root logger. + * Log4j uses "root" (case-insensitive) as name of the root logger. + * Note: In log4j, the root logger's name was "root" and Kafka also followed that name for dynamic logging control feature. + * + * The root logger's name is changed in log4j2 to empty string (see: [[LogManager.ROOT_LOGGER_NAME]]) but for backward- + * compatibility. Kafka keeps its original root logger name. It is why here is a dedicated definition for the root logger name. */ - private static final String ROOT_LOGGER_NAME = ""; + private static final String ROOT_LOGGER_NAME = "root"; private final Time time; private final Map lastModifiedTimes; diff --git a/connect/runtime/src/test/java/org/apache/kafka/connect/runtime/LoggersTest.java b/connect/runtime/src/test/java/org/apache/kafka/connect/runtime/LoggersTest.java index 32d520e41737c..23e5f753cdeb2 100644 --- a/connect/runtime/src/test/java/org/apache/kafka/connect/runtime/LoggersTest.java +++ b/connect/runtime/src/test/java/org/apache/kafka/connect/runtime/LoggersTest.java @@ -181,11 +181,12 @@ public void testSetRootLevel() { // that this test case is not affected by existing Log4j configurations. LoggerContext loggerContext = (LoggerContext) LogManager.getContext(false); Configuration config = loggerContext.getConfiguration(); - LoggerConfig rootConfig = new LoggerConfig("", Level.ERROR, false); - config.addLogger("", rootConfig); + String rootLoggerName = "root"; + LoggerConfig rootConfig = new LoggerConfig(rootLoggerName, Level.ERROR, false); + config.addLogger(rootLoggerName, rootConfig); loggerContext.updateLoggers(); - Logger root = LogManager.getLogger(""); + Logger root = LogManager.getLogger(rootLoggerName); Configurator.setLevel(root, Level.ERROR); Logger p = loggerContext.getLogger("a.b.c.p"); @@ -193,7 +194,7 @@ public void testSetRootLevel() { Logger y = loggerContext.getLogger("a.b.c.p.Y"); Logger z = loggerContext.getLogger("a.b.c.p.Z"); Logger w = loggerContext.getLogger("a.b.c.s.W"); - Configurator.setLevel(p, null); + Configurator.setLevel(p, Level.INFO); Configurator.setLevel(x, Level.INFO); Configurator.setLevel(y, Level.INFO); Configurator.setLevel(z, Level.INFO); @@ -201,10 +202,10 @@ public void testSetRootLevel() { Loggers loggers = new TestLoggers(root, x, y, z, w); - List modified = loggers.setLevel("", Level.DEBUG); - assertEquals(Arrays.asList("", "a.b.c.p.X", "a.b.c.p.Y", "a.b.c.p.Z", "a.b.c.s.W"), modified); + List modified = loggers.setLevel(rootLoggerName, Level.DEBUG); + assertEquals(Arrays.asList("a.b.c.p.X", "a.b.c.p.Y", "a.b.c.p.Z", "a.b.c.s.W", rootLoggerName), modified); - assertEquals(p.getLevel(), Level.DEBUG); + assertEquals(p.getLevel(), Level.INFO); assertEquals(root.getLevel(), Level.DEBUG); @@ -214,7 +215,7 @@ public void testSetRootLevel() { assertEquals(z.getLevel(), Level.DEBUG); Map expectedLevels = new HashMap<>(); - expectedLevels.put("", new LoggerLevel(Level.DEBUG.toString(), INITIAL_TIME)); + expectedLevels.put(rootLoggerName, new LoggerLevel(Level.DEBUG.toString(), INITIAL_TIME)); expectedLevels.put("a.b.c.p.X", new LoggerLevel(Level.DEBUG.toString(), INITIAL_TIME)); expectedLevels.put("a.b.c.p.Y", new LoggerLevel(Level.DEBUG.toString(), INITIAL_TIME)); expectedLevels.put("a.b.c.p.Z", new LoggerLevel(Level.DEBUG.toString(), INITIAL_TIME)); From efaab5132b71e2566cfb4e4644d6245f261f49d0 Mon Sep 17 00:00:00 2001 From: frankvicky Date: Mon, 18 Nov 2024 18:46:55 +0800 Subject: [PATCH 23/46] KAFKA-9366: Remove log4j properties in config directory --- config/connect-log4j.properties | 39 -------------- config/log4j.properties | 93 --------------------------------- 2 files changed, 132 deletions(-) delete mode 100644 config/connect-log4j.properties delete mode 100644 config/log4j.properties diff --git a/config/connect-log4j.properties b/config/connect-log4j.properties deleted file mode 100644 index 979cb3869f952..0000000000000 --- a/config/connect-log4j.properties +++ /dev/null @@ -1,39 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -log4j.rootLogger=INFO, stdout, connectAppender - -# Send the logs to the console. -# -log4j.appender.stdout=org.apache.log4j.ConsoleAppender -log4j.appender.stdout.layout=org.apache.log4j.PatternLayout - -# Send the logs to a file, rolling the file at midnight local time. For example, the `File` option specifies the -# location of the log files (e.g. ${kafka.logs.dir}/connect.log), and at midnight local time the file is closed -# and copied in the same directory but with a filename that ends in the `DatePattern` option. -# -log4j.appender.connectAppender=org.apache.log4j.DailyRollingFileAppender -log4j.appender.connectAppender.DatePattern='.'yyyy-MM-dd-HH -log4j.appender.connectAppender.File=${kafka.logs.dir}/connect.log -log4j.appender.connectAppender.layout=org.apache.log4j.PatternLayout - -# The `%X{connector.context}` parameter in the layout includes connector-specific and task-specific information -# in the log messages, where appropriate. This makes it easier to identify those log messages that apply to a -# specific connector. -# -connect.log.pattern=[%d] %p %X{connector.context}%m (%c:%L)%n - -log4j.appender.stdout.layout.ConversionPattern=${connect.log.pattern} -log4j.appender.connectAppender.layout.ConversionPattern=${connect.log.pattern} diff --git a/config/log4j.properties b/config/log4j.properties deleted file mode 100644 index bcf2b9daa4ea3..0000000000000 --- a/config/log4j.properties +++ /dev/null @@ -1,93 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Unspecified loggers and loggers with additivity=true output to server.log and stdout -# Note that INFO only applies to unspecified loggers, the log level of the child logger is used otherwise -log4j.rootLogger=INFO, stdout, kafkaAppender - -log4j.appender.stdout=org.apache.log4j.ConsoleAppender -log4j.appender.stdout.layout=org.apache.log4j.PatternLayout -log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c)%n - -log4j.appender.kafkaAppender=org.apache.log4j.DailyRollingFileAppender -log4j.appender.kafkaAppender.DatePattern='.'yyyy-MM-dd-HH -log4j.appender.kafkaAppender.File=${kafka.logs.dir}/server.log -log4j.appender.kafkaAppender.layout=org.apache.log4j.PatternLayout -log4j.appender.kafkaAppender.layout.ConversionPattern=[%d] %p %m (%c)%n - -log4j.appender.stateChangeAppender=org.apache.log4j.DailyRollingFileAppender -log4j.appender.stateChangeAppender.DatePattern='.'yyyy-MM-dd-HH -log4j.appender.stateChangeAppender.File=${kafka.logs.dir}/state-change.log -log4j.appender.stateChangeAppender.layout=org.apache.log4j.PatternLayout -log4j.appender.stateChangeAppender.layout.ConversionPattern=[%d] %p %m (%c)%n - -log4j.appender.requestAppender=org.apache.log4j.DailyRollingFileAppender -log4j.appender.requestAppender.DatePattern='.'yyyy-MM-dd-HH -log4j.appender.requestAppender.File=${kafka.logs.dir}/kafka-request.log -log4j.appender.requestAppender.layout=org.apache.log4j.PatternLayout -log4j.appender.requestAppender.layout.ConversionPattern=[%d] %p %m (%c)%n - -log4j.appender.cleanerAppender=org.apache.log4j.DailyRollingFileAppender -log4j.appender.cleanerAppender.DatePattern='.'yyyy-MM-dd-HH -log4j.appender.cleanerAppender.File=${kafka.logs.dir}/log-cleaner.log -log4j.appender.cleanerAppender.layout=org.apache.log4j.PatternLayout -log4j.appender.cleanerAppender.layout.ConversionPattern=[%d] %p %m (%c)%n - -log4j.appender.controllerAppender=org.apache.log4j.DailyRollingFileAppender -log4j.appender.controllerAppender.DatePattern='.'yyyy-MM-dd-HH -log4j.appender.controllerAppender.File=${kafka.logs.dir}/controller.log -log4j.appender.controllerAppender.layout=org.apache.log4j.PatternLayout -log4j.appender.controllerAppender.layout.ConversionPattern=[%d] %p %m (%c)%n - -log4j.appender.authorizerAppender=org.apache.log4j.DailyRollingFileAppender -log4j.appender.authorizerAppender.DatePattern='.'yyyy-MM-dd-HH -log4j.appender.authorizerAppender.File=${kafka.logs.dir}/kafka-authorizer.log -log4j.appender.authorizerAppender.layout=org.apache.log4j.PatternLayout -log4j.appender.authorizerAppender.layout.ConversionPattern=[%d] %p %m (%c)%n - -# Change the two lines below to adjust the general broker logging level (output to server.log and stdout) -log4j.logger.kafka=INFO -log4j.logger.org.apache.kafka=INFO - -# Change to DEBUG or TRACE to enable request logging -log4j.logger.kafka.request.logger=WARN, requestAppender -log4j.additivity.kafka.request.logger=false - -# Uncomment the lines below and change log4j.logger.kafka.network.RequestChannel$ to TRACE for additional output -# related to the handling of requests -#log4j.logger.kafka.network.Processor=TRACE, requestAppender -#log4j.logger.kafka.server.KafkaApis=TRACE, requestAppender -#log4j.additivity.kafka.server.KafkaApis=false -log4j.logger.kafka.network.RequestChannel$=WARN, requestAppender -log4j.additivity.kafka.network.RequestChannel$=false - -# Change the line below to adjust KRaft mode controller logging -log4j.logger.org.apache.kafka.controller=INFO, controllerAppender -log4j.additivity.org.apache.kafka.controller=false - -# Change the line below to adjust ZK mode controller logging -log4j.logger.kafka.controller=TRACE, controllerAppender -log4j.additivity.kafka.controller=false - -log4j.logger.kafka.log.LogCleaner=INFO, cleanerAppender -log4j.additivity.kafka.log.LogCleaner=false - -log4j.logger.state.change.logger=INFO, stateChangeAppender -log4j.additivity.state.change.logger=false - -# Access denials are logged at INFO level, change to DEBUG to also log allowed accesses -log4j.logger.kafka.authorizer.logger=INFO, authorizerAppender -log4j.additivity.kafka.authorizer.logger=false - From dfdf33b3022bdcb63223e939fdde387b61dfdaae Mon Sep 17 00:00:00 2001 From: frankvicky Date: Mon, 18 Nov 2024 23:08:32 +0800 Subject: [PATCH 24/46] KAFKA-9366: Address comments --- .../org/apache/kafka/connect/runtime/Loggers.java | 13 ++++--------- .../main/scala/kafka/utils/Log4jController.scala | 12 +++++++++++- 2 files changed, 15 insertions(+), 10 deletions(-) diff --git a/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/Loggers.java b/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/Loggers.java index b6f44fb651075..fec90d02379c7 100644 --- a/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/Loggers.java +++ b/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/Loggers.java @@ -34,7 +34,6 @@ import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.Set; import java.util.TreeMap; import java.util.stream.Collectors; @@ -186,15 +185,11 @@ org.apache.logging.log4j.Logger lookupLogger(String logger) { List currentLoggers() { LoggerContext context = (LoggerContext) LogManager.getContext(false); Collection loggerConfigs = context.getConfiguration().getLoggers().values(); - Set loggerNames = loggerConfigs.stream() + return loggerConfigs.stream() .map(LoggerConfig::getName) - .collect(Collectors.toSet()); - - List loggers = new ArrayList<>(); - for (String name : loggerNames) { - loggers.add(LogManager.getLogger(name)); - } - return loggers; + .distinct() + .map(LogManager::getLogger) + .collect(Collectors.toUnmodifiableList()); } // visible for testing diff --git a/core/src/main/scala/kafka/utils/Log4jController.scala b/core/src/main/scala/kafka/utils/Log4jController.scala index d45b5394cf832..50b31ee548e21 100755 --- a/core/src/main/scala/kafka/utils/Log4jController.scala +++ b/core/src/main/scala/kafka/utils/Log4jController.scala @@ -17,6 +17,7 @@ package kafka.utils +import org.apache.kafka.common.utils.Utils import org.apache.logging.log4j.core.LoggerContext import org.apache.logging.log4j.core.config.Configurator import org.apache.logging.log4j.{Level, LogManager} @@ -28,7 +29,13 @@ import scala.jdk.CollectionConverters._ object Log4jController { - val ROOT_LOGGER = "" + /** + * Note: In log4j, the root logger's name was "root" and Kafka also followed that name for dynamic logging control feature. + * + * The root logger's name is changed in log4j2 to empty string (see: [[LogManager.ROOT_LOGGER_NAME]]) but for backward- + * compatibility. Kafka keeps its original root logger name. It is why here is a dedicated definition for the root logger name. + */ + val ROOT_LOGGER = "root" /** * Returns a map of the log4j loggers and their assigned log level. @@ -63,6 +70,9 @@ object Log4jController { * @see [[Level.toLevel]] */ def logLevel(loggerName: String, logLevel: String): Boolean = { + if (Utils.isBlank(loggerName) || Utils.isBlank(logLevel)) + return false + val level = Level.toLevel(logLevel.toUpperCase(Locale.ROOT)) if (loggerName == ROOT_LOGGER) { From 4aee10fe18cca0d4c9ada101553e8562d97cc86b Mon Sep 17 00:00:00 2001 From: frankvicky Date: Tue, 19 Nov 2024 15:56:01 +0800 Subject: [PATCH 25/46] KAFKA-9366: Address comments --- build.gradle | 2 +- .../main/scala/kafka/utils/Log4jController.scala | 4 ++-- .../kafka/api/PlaintextAdminIntegrationTest.scala | 14 -------------- tests/kafkatest/services/connect.py | 6 +++--- tests/kafkatest/services/kafka/kafka.py | 2 +- 5 files changed, 7 insertions(+), 21 deletions(-) diff --git a/build.gradle b/build.gradle index a853815c6a97a..ca44b1b399837 100644 --- a/build.gradle +++ b/build.gradle @@ -3518,7 +3518,6 @@ project(':connect:runtime') { implementation libs.log4j2Api implementation libs.log4j2Core implementation libs.log4j1Bridge2Api - implementation libs.spotbugs implementation libs.jose4j // for SASL/OAUTHBEARER JWT validation implementation libs.jacksonAnnotations implementation libs.jacksonJaxrsJsonProvider @@ -3535,6 +3534,7 @@ project(':connect:runtime') { implementation libs.swaggerAnnotations compileOnly libs.bndlib + compileOnly libs.spotbugs // We use this library to generate OpenAPI docs for the REST API, but we don't want or need it at compile // or run time. So, we add it to a separate configuration, which we use later on during docs generation diff --git a/core/src/main/scala/kafka/utils/Log4jController.scala b/core/src/main/scala/kafka/utils/Log4jController.scala index 50b31ee548e21..4bc022dadfeba 100755 --- a/core/src/main/scala/kafka/utils/Log4jController.scala +++ b/core/src/main/scala/kafka/utils/Log4jController.scala @@ -48,14 +48,14 @@ object Log4jController { // Loggers defined in the configuration val configured = logContext.getConfiguration.getLoggers.asScala .values - .filter(_.getName != LogManager.ROOT_LOGGER_NAME) + .filterNot(_.getName.equals(LogManager.ROOT_LOGGER_NAME)) .map { logger => logger.getName -> logger.getLevel.toString }.toMap // Loggers actually running val actual = logContext.getLoggers.asScala - .filter(_.getName != LogManager.ROOT_LOGGER_NAME) + .filterNot(_.getName.equals(LogManager.ROOT_LOGGER_NAME)) .map { logger => logger.getName -> logger.getLevel.toString }.toMap diff --git a/core/src/test/scala/integration/kafka/api/PlaintextAdminIntegrationTest.scala b/core/src/test/scala/integration/kafka/api/PlaintextAdminIntegrationTest.scala index 9cf12f1a43f0e..c7568d111bb69 100644 --- a/core/src/test/scala/integration/kafka/api/PlaintextAdminIntegrationTest.scala +++ b/core/src/test/scala/integration/kafka/api/PlaintextAdminIntegrationTest.scala @@ -56,7 +56,6 @@ import org.apache.kafka.security.authorizer.AclEntry import org.apache.kafka.server.config.{QuotaConfig, ServerConfigs, ServerLogConfigs} import org.apache.kafka.storage.internals.log.{CleanerConfig, LogConfig, LogFileUtils} import org.apache.kafka.test.TestUtils.DEFAULT_MAX_WAIT_MS -import org.apache.log4j.PropertyConfigurator import org.apache.logging.log4j.core.config.Configurator import org.junit.jupiter.api.Assertions._ import org.junit.jupiter.api.{BeforeEach, TestInfo, Timeout} @@ -3779,17 +3778,4 @@ object PlaintextAdminIntegrationTest { assertEquals(LogConfig.DEFAULT_COMPRESSION_TYPE, configs.get(brokerResource).get(ServerConfigs.COMPRESSION_TYPE_CONFIG).value) } - - /** - * Resets the logging configuration after the test. - */ - def resetLogging(): Unit = { - org.apache.log4j.LogManager.resetConfiguration() - val stream = this.getClass.getResourceAsStream("/log4j.properties") - try { - PropertyConfigurator.configure(stream) - } finally { - stream.close() - } - } } diff --git a/tests/kafkatest/services/connect.py b/tests/kafkatest/services/connect.py index c84a3ec43c31e..4780b5e714f1b 100644 --- a/tests/kafkatest/services/connect.py +++ b/tests/kafkatest/services/connect.py @@ -38,7 +38,7 @@ class ConnectServiceBase(KafkaPathResolverMixin, Service): LOG_FILE = os.path.join(PERSISTENT_ROOT, "connect.log") STDOUT_FILE = os.path.join(PERSISTENT_ROOT, "connect.stdout") STDERR_FILE = os.path.join(PERSISTENT_ROOT, "connect.stderr") - LOG4J_CONFIG_FILE = os.path.join(PERSISTENT_ROOT, "connect-log4j.properties") + LOG4J_CONFIG_FILE = os.path.join(PERSISTENT_ROOT, "connect-log4j2.properties") PID_FILE = os.path.join(PERSISTENT_ROOT, "connect.pid") EXTERNAL_CONFIGS_FILE = os.path.join(PERSISTENT_ROOT, "connect-external-configs.properties") CONNECT_REST_PORT = 8083 @@ -364,7 +364,7 @@ def start_node(self, node, **kwargs): if self.external_config_template_func: node.account.create_file(self.EXTERNAL_CONFIGS_FILE, self.external_config_template_func(node)) node.account.create_file(self.CONFIG_FILE, self.config_template_func(node)) - node.account.create_file(self.LOG4J_CONFIG_FILE, self.render('connect_log4j.properties', log_file=self.LOG_FILE)) + node.account.create_file(self.LOG4J_CONFIG_FILE, self.render('connect_log4j2.properties', log_file=self.LOG_FILE)) remote_connector_configs = [] for idx, template in enumerate(self.connector_config_templates): target_file = os.path.join(self.PERSISTENT_ROOT, "connect-connector-" + str(idx) + ".properties") @@ -421,7 +421,7 @@ def start_node(self, node, **kwargs): if self.external_config_template_func: node.account.create_file(self.EXTERNAL_CONFIGS_FILE, self.external_config_template_func(node)) node.account.create_file(self.CONFIG_FILE, self.config_template_func(node)) - node.account.create_file(self.LOG4J_CONFIG_FILE, self.render('connect_log4j.properties', log_file=self.LOG_FILE)) + node.account.create_file(self.LOG4J_CONFIG_FILE, self.render('connect_log4j2.properties', log_file=self.LOG_FILE)) if self.connector_config_templates: raise DucktapeError("Config files are not valid in distributed mode, submit connectors via the REST API") diff --git a/tests/kafkatest/services/kafka/kafka.py b/tests/kafkatest/services/kafka/kafka.py index acfa5c7f6c2d9..0a968414cb8ce 100644 --- a/tests/kafkatest/services/kafka/kafka.py +++ b/tests/kafkatest/services/kafka/kafka.py @@ -874,7 +874,7 @@ def start_node(self, node, timeout_sec=60, **kwargs): self.logger.info("kafka.properties:") self.logger.info(prop_file) node.account.create_file(KafkaService.CONFIG_FILE, prop_file) - node.account.create_file(self.LOG4J_CONFIG, self.render('log4j.properties', log_dir=KafkaService.OPERATIONAL_LOG_DIR)) + node.account.create_file(self.LOG4J_CONFIG, self.render('log4j2.properties', log_dir=KafkaService.OPERATIONAL_LOG_DIR)) if self.quorum_info.using_kraft: # format log directories if necessary From 5e1eaf5e4ba26455bf85b8784f93b51e7c72417f Mon Sep 17 00:00:00 2001 From: frankvicky Date: Tue, 19 Nov 2024 17:33:05 +0800 Subject: [PATCH 26/46] KAFKA-9366: Address comments --- LICENSE-binary | 7 +++++-- .../apache/kafka/tools/other/ReplicationQuotasTestRig.java | 4 ++-- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/LICENSE-binary b/LICENSE-binary index 97e045fd7534e..6fad2bc439ee7 100644 --- a/LICENSE-binary +++ b/LICENSE-binary @@ -239,6 +239,11 @@ jetty-servlets-9.4.56.v20240826 jetty-util-9.4.56.v20240826 jetty-util-ajax-9.4.56.v20240826 jose4j-0.9.4 +log4j-api-2.24.1 +log4j-core-2.24.1 +log4j-core-test-2.24.1 +log4j-slf4j-impl-2.24.1 +log4j-1.2-api-2.24.1 lz4-java-1.8.0 maven-artifact-3.9.6 metrics-core-4.1.12.1 @@ -254,7 +259,6 @@ netty-transport-native-epoll-4.1.111.Final netty-transport-native-unix-common-4.1.111.Final opentelemetry-proto-1.0.0-alpha plexus-utils-3.5.1 -reload4j-1.2.25 rocksdbjni-7.9.2 scala-library-2.13.15 scala-logging_2.13-3.9.5 @@ -313,7 +317,6 @@ argparse4j-0.7.0, see: licenses/argparse-MIT classgraph-4.8.173, see: licenses/classgraph-MIT jopt-simple-5.0.4, see: licenses/jopt-simple-MIT slf4j-api-1.7.36, see: licenses/slf4j-MIT -slf4j-reload4j-1.7.36, see: licenses/slf4j-MIT pcollections-4.0.1, see: licenses/pcollections-MIT --------------------------------------- diff --git a/tools/src/test/java/org/apache/kafka/tools/other/ReplicationQuotasTestRig.java b/tools/src/test/java/org/apache/kafka/tools/other/ReplicationQuotasTestRig.java index a6b2f13e3c45d..0dedf567c49d9 100644 --- a/tools/src/test/java/org/apache/kafka/tools/other/ReplicationQuotasTestRig.java +++ b/tools/src/test/java/org/apache/kafka/tools/other/ReplicationQuotasTestRig.java @@ -41,7 +41,7 @@ import org.apache.kafka.server.quota.QuotaType; import org.apache.kafka.tools.reassign.ReassignPartitionsCommand; -import org.apache.log4j.PropertyConfigurator; +import org.apache.logging.log4j.core.config.Configurator; import org.jfree.chart.ChartFactory; import org.jfree.chart.ChartFrame; import org.jfree.chart.JFreeChart; @@ -95,7 +95,7 @@ public class ReplicationQuotasTestRig { private static final String DIR; static { - PropertyConfigurator.configure("core/src/test/resources/log4j.properties"); + Configurator.reconfigure(); new File("Experiments").mkdir(); DIR = "Experiments/Run" + Long.valueOf(System.currentTimeMillis()).toString().substring(8); From a1c22948b255b4fdb3b723f4a671ad1aa2d7755c Mon Sep 17 00:00:00 2001 From: frankvicky Date: Wed, 20 Nov 2024 00:20:56 +0800 Subject: [PATCH 27/46] KAFKA-9366: migration to yaml --- LICENSE-binary | 1 + build.gradle | 18 ++ clients/src/test/resources/log4j2.properties | 36 --- clients/src/test/resources/log4j2.yaml | 38 +++ config/connect-log4j2.properties | 39 --- config/connect-log4j2.yaml | 44 +++ config/log4j2.properties | 158 ---------- config/log4j2.yaml | 158 ++++++++++ .../file/src/test/resources/log4j2.properties | 36 --- .../file/src/test/resources/log4j2.yaml | 35 +- .../src/test/resources/log4j2.properties | 43 --- .../mirror/src/test/resources/log4j2.yaml | 40 ++- .../src/test/resources/log4j2.properties | 49 --- .../runtime/src/test/resources/log4j2.yaml | 48 +++ .../src/test/resources/log4j2.yaml | 36 ++- gradle/dependencies.gradle | 1 + .../src/test/resources/log4j2.yaml | 31 ++ .../src/test/resources/log4j2.yaml | 36 ++- raft/config/kraft-log4j2.properties | 35 -- raft/config/kraft-log4j2.yaml | 39 +++ .../src/test/resources/log4j2.yaml | 36 ++- server-common/src/test/resources/log4j2.yaml | 35 ++ .../{log4j2.properties => log4j2.yaml} | 29 +- storage/src/test/resources/log4j2.properties | 53 ---- storage/src/test/resources/log4j2.yaml | 58 ++++ .../src/test/resources/log4j2.properties | 61 ---- .../src/test/resources/log4j2.yaml | 65 ++++ .../{log4j2.properties => log4j2.yaml} | 29 +- streams/src/test/resources/log4j2.properties | 61 ---- streams/src/test/resources/log4j2.yaml | 65 ++++ .../src/test/resources/log4j2.properties | 41 --- .../src/test/resources/log4j2.yaml | 41 +++ .../test-utils/src/test/resources/log4j2.yaml | 35 ++ test-common/src/main/resources/log4j2.yaml | 35 ++ .../kafka/templates/log4j2.properties | 298 ------------------ .../services/kafka/templates/log4j2.yaml | 289 +++++++++++++++++ .../services/templates/connect_log4j2.yaml | 43 +++ .../templates/tools_log4j2.properties | 38 --- .../services/templates/tools_log4j2.yaml | 45 +-- .../trogdor/templates/log4j2.properties | 45 --- .../services/trogdor/templates/log4j2.yaml | 56 ++++ .../templates/log4j2_template.properties | 38 --- .../streams/templates/log4j2_template.yaml} | 37 ++- 43 files changed, 1291 insertions(+), 1163 deletions(-) delete mode 100644 clients/src/test/resources/log4j2.properties create mode 100644 clients/src/test/resources/log4j2.yaml delete mode 100644 config/connect-log4j2.properties create mode 100644 config/connect-log4j2.yaml delete mode 100644 config/log4j2.properties create mode 100644 config/log4j2.yaml delete mode 100644 connect/file/src/test/resources/log4j2.properties rename group-coordinator/src/test/resources/log4j2.properties => connect/file/src/test/resources/log4j2.yaml (57%) delete mode 100644 connect/mirror/src/test/resources/log4j2.properties rename raft/src/test/resources/log4j2.properties => connect/mirror/src/test/resources/log4j2.yaml (52%) delete mode 100644 connect/runtime/src/test/resources/log4j2.properties create mode 100644 connect/runtime/src/test/resources/log4j2.yaml rename metadata/src/test/resources/log4j2.properties => core/src/test/resources/log4j2.yaml (56%) create mode 100644 group-coordinator/src/test/resources/log4j2.yaml rename server-common/src/test/resources/log4j2.properties => metadata/src/test/resources/log4j2.yaml (57%) delete mode 100644 raft/config/kraft-log4j2.properties create mode 100644 raft/config/kraft-log4j2.yaml rename streams/test-utils/src/test/resources/log4j2.properties => raft/src/test/resources/log4j2.yaml (54%) create mode 100644 server-common/src/test/resources/log4j2.yaml rename shell/src/test/resources/{log4j2.properties => log4j2.yaml} (61%) delete mode 100644 storage/src/test/resources/log4j2.properties create mode 100644 storage/src/test/resources/log4j2.yaml delete mode 100644 streams/integration-tests/src/test/resources/log4j2.properties create mode 100644 streams/integration-tests/src/test/resources/log4j2.yaml rename streams/quickstart/java/src/main/resources/archetype-resources/src/main/resources/{log4j2.properties => log4j2.yaml} (58%) delete mode 100644 streams/src/test/resources/log4j2.properties create mode 100644 streams/src/test/resources/log4j2.yaml delete mode 100644 streams/streams-scala/src/test/resources/log4j2.properties create mode 100644 streams/streams-scala/src/test/resources/log4j2.yaml create mode 100644 streams/test-utils/src/test/resources/log4j2.yaml create mode 100644 test-common/src/main/resources/log4j2.yaml delete mode 100644 tests/kafkatest/services/kafka/templates/log4j2.properties create mode 100644 tests/kafkatest/services/kafka/templates/log4j2.yaml create mode 100644 tests/kafkatest/services/templates/connect_log4j2.yaml delete mode 100644 tests/kafkatest/services/templates/tools_log4j2.properties rename core/src/test/resources/log4j2.properties => tests/kafkatest/services/templates/tools_log4j2.yaml (50%) delete mode 100644 tests/kafkatest/services/trogdor/templates/log4j2.properties create mode 100644 tests/kafkatest/services/trogdor/templates/log4j2.yaml delete mode 100644 tests/kafkatest/tests/streams/templates/log4j2_template.properties rename tests/kafkatest/{services/templates/connect_log4j2.properties => tests/streams/templates/log4j2_template.yaml} (60%) diff --git a/LICENSE-binary b/LICENSE-binary index 6fad2bc439ee7..ce25eb1c24f5c 100644 --- a/LICENSE-binary +++ b/LICENSE-binary @@ -220,6 +220,7 @@ jackson-annotations-2.16.2 jackson-core-2.16.2 jackson-databind-2.16.2 jackson-dataformat-csv-2.16.2 +jackson-dataformat-yaml-2.16.2 jackson-datatype-jdk8-2.16.2 jackson-jaxrs-base-2.16.2 jackson-jaxrs-json-provider-2.16.2 diff --git a/build.gradle b/build.gradle index ca44b1b399837..9dd8ce6f0693f 100644 --- a/build.gradle +++ b/build.gradle @@ -173,6 +173,7 @@ allprojects { libs.scalaLibrary, libs.scalaReflect, libs.jacksonAnnotations, + libs.jacksonDatabindYaml, // be explicit about the Netty dependency version instead of relying on the version set by // ZooKeeper (potentially older and containing CVEs) libs.nettyHandler, @@ -1090,6 +1091,7 @@ project(':core') { implementation libs.log4j2Core implementation libs.log4j2Api implementation libs.log4j1Bridge2Api + implementation libs.jacksonDatabindYaml testImplementation project(':clients').sourceSets.test.output testImplementation project(':group-coordinator').sourceSets.test.output @@ -1366,6 +1368,7 @@ project(':metadata') { compileOnly libs.log4j2Api compileOnly libs.log4j2Core compileOnly libs.log4j1Bridge2Api + testImplementation libs.jacksonDatabindYaml testImplementation libs.junitJupiter testImplementation libs.jqwik testImplementation libs.mockitoCore @@ -1494,6 +1497,7 @@ project(':group-coordinator') { testImplementation project(':clients').sourceSets.test.output testImplementation project(':server-common').sourceSets.test.output testImplementation project(':coordinator-common').sourceSets.test.output + testImplementation libs.jacksonDatabindYaml testImplementation libs.junitJupiter testImplementation libs.mockitoCore @@ -1556,6 +1560,7 @@ project(':test-common') { implementation project(':storage') implementation project(':server-common') implementation libs.slf4jApi + implementation libs.jacksonDatabindYaml testImplementation libs.junitJupiter testImplementation libs.mockitoCore @@ -1857,6 +1862,7 @@ project(':clients') { testImplementation libs.bcpkix testImplementation libs.jacksonJaxrsJsonProvider + testImplementation libs.jacksonDatabindYaml testImplementation libs.jose4j testImplementation libs.junitJupiter testImplementation libs.log4j2Api @@ -2025,11 +2031,13 @@ project(':raft') { implementation project(':clients') implementation libs.slf4jApi implementation libs.jacksonDatabind + implementation libs.jacksonDatabindYaml testImplementation project(':server-common') testImplementation project(':server-common').sourceSets.test.output testImplementation project(':clients') testImplementation project(':clients').sourceSets.test.output + testImplementation libs.jacksonDatabindYaml testImplementation libs.junitJupiter testImplementation libs.mockitoCore testImplementation libs.jqwik @@ -2125,6 +2133,7 @@ project(':server-common') { testImplementation project(':clients') testImplementation project(':clients').sourceSets.test.output + testImplementation libs.jacksonDatabindYaml testImplementation libs.junitJupiter testImplementation libs.mockitoCore @@ -2258,6 +2267,7 @@ project(':storage') { testImplementation project(':server-common') testImplementation project(':server-common').sourceSets.test.output testImplementation libs.hamcrest + testImplementation libs.jacksonDatabindYaml testImplementation libs.junitJupiter testImplementation libs.mockitoCore testImplementation libs.bcpkix @@ -2566,6 +2576,7 @@ project(':shell') { testImplementation project(':core') testImplementation project(':server-common') testImplementation project(':server-common').sourceSets.test.output + testImplementation libs.jacksonDatabindYaml testImplementation libs.junitJupiter testRuntimeOnly runtimeTestLibs @@ -2617,6 +2628,7 @@ project(':streams') { testImplementation libs.log4j2Api testImplementation libs.log4j2Core testImplementation libs.log4j1Bridge2Api + testImplementation libs.jacksonDatabindYaml testImplementation libs.junitJupiter testImplementation libs.bcpkix testImplementation libs.hamcrest @@ -2763,6 +2775,7 @@ project(':streams:streams-scala') { testImplementation project(':clients').sourceSets.test.output testImplementation project(':streams:test-utils') + testImplementation libs.jacksonDatabindYaml testImplementation libs.junitJupiter testImplementation libs.mockitoJunitJupiter // supports MockitoExtension testRuntimeOnly runtimeTestLibs @@ -2819,6 +2832,7 @@ project(':streams:integration-tests') { testImplementation project(':transaction-coordinator') testImplementation libs.bcpkix testImplementation libs.hamcrest + testImplementation libs.jacksonDatabindYaml testImplementation libs.junitJupiter testImplementation libs.junitPlatformSuiteEngine // supports suite test testImplementation libs.mockitoCore @@ -2864,6 +2878,7 @@ project(':streams:test-utils') { implementation libs.slf4jApi testImplementation project(':clients').sourceSets.test.output + testImplementation libs.jacksonDatabindYaml testImplementation libs.junitJupiter testImplementation libs.mockitoCore testImplementation libs.hamcrest @@ -3554,6 +3569,7 @@ project(':connect:runtime') { testImplementation project(':server-common').sourceSets.test.output testImplementation project(':test-common:test-common-api') + testImplementation libs.jacksonDatabindYaml testImplementation libs.junitJupiter testImplementation libs.mockitoCore testImplementation libs.mockitoJunitJupiter @@ -3659,6 +3675,7 @@ project(':connect:file') { runtimeOnly libs.log4j2Core runtimeOnly libs.log4j1Bridge2Api + testImplementation libs.jacksonDatabindYaml testImplementation libs.junitJupiter testImplementation libs.mockitoCore @@ -3765,6 +3782,7 @@ project(':connect:mirror') { testImplementation libs.log4j2Core testImplementation libs.log4j1Bridge2Api testImplementation libs.bndlib + testImplementation libs.jacksonDatabindYaml testImplementation libs.mockitoCore testImplementation project(':clients').sourceSets.test.output testImplementation project(':connect:runtime').sourceSets.test.output diff --git a/clients/src/test/resources/log4j2.properties b/clients/src/test/resources/log4j2.properties deleted file mode 100644 index 34a7d635ecd04..0000000000000 --- a/clients/src/test/resources/log4j2.properties +++ /dev/null @@ -1,36 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -name=TestConfig -appenders=console - - -appender.console.type=Console -appender.console.name=STDOUT -appender.console.layout.type=PatternLayout -appender.console.layout.pattern=[%d] %p %m (%c:%L)%n - -# Root logger configuration -rootLogger.level=OFF -rootLogger.appenderRefs=console -rootLogger.appenderRef.stdout.ref=STDOUT - -loggers=kafkaLogger,CommonNameLoggingTrustManagerFactoryWrapperLogger - -logger.kafkaLogger.name=org.apache.kafka -logger.kafkaLogger.level=ERROR - -# We are testing for a particular INFO log message in CommonNameLoggingTrustManagerFactoryWrapper -logger.CommonNameLoggingTrustManagerFactoryWrapperLogger.name=org.apache.kafka.common.security.ssl.CommonNameLoggingTrustManagerFactoryWrapper -logger.CommonNameLoggingTrustManagerFactoryWrapperLogger.level=INFO diff --git a/clients/src/test/resources/log4j2.yaml b/clients/src/test/resources/log4j2.yaml new file mode 100644 index 0000000000000..bfe8b3835a0c6 --- /dev/null +++ b/clients/src/test/resources/log4j2.yaml @@ -0,0 +1,38 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +Configuration: + Properties: + Property: + - name: "logPattern" + value: "[%d] %p %m (%c:%L)%n" + + Appenders: + Console: + name: STDOUT + PatternLayout: + pattern: "${logPattern}" + + Loggers: + Root: + level: OFF + AppenderRef: + - ref: STDOUT + Logger: + - name: org.apache.kafka + level: ERROR + # We are testing for a particular INFO log message in CommonNameLoggingTrustManagerFactoryWrapper + - name: org.apache.kafka.common.security.ssl.CommonNameLoggingTrustManagerFactoryWrapper + level: INFO diff --git a/config/connect-log4j2.properties b/config/connect-log4j2.properties deleted file mode 100644 index 6de453777423c..0000000000000 --- a/config/connect-log4j2.properties +++ /dev/null @@ -1,39 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -name=ConnectConfig - -appenders=console, file - -appender.console.type=Console -appender.console.name=STDOUT -appender.console.layout.type=PatternLayout -appender.console.layout.pattern=[%d] %p %X{connector.context}%m (%c:%L)%n - -appender.file.type=RollingFile -appender.file.name=ConnectAppender -appender.file.fileName=${kafka.logs.dir}/connect.log -appender.file.filePattern=${kafka.logs.dir}/connect-%d{yyyy-MM-dd-HH}.log -appender.file.layout.type=PatternLayout -appender.file.layout.pattern=[%d] %p %X{connector.context}%m (%c:%L)%n -appender.file.policies.type=Policies -appender.file.policies.time.type=TimeBasedTriggeringPolicy -appender.file.policies.time.interval=1 -appender.file.policies.time.modulate=true - -rootLogger.level=INFO -rootLogger.appenderRefs=console,file -rootLogger.appenderRef.console.ref=STDOUT -rootLogger.appenderRef.file.ref=ConnectAppender diff --git a/config/connect-log4j2.yaml b/config/connect-log4j2.yaml new file mode 100644 index 0000000000000..89a9a96736587 --- /dev/null +++ b/config/connect-log4j2.yaml @@ -0,0 +1,44 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +Configuration: + Properties: + Property: + - name: "kafka.logs.dir" + value: "." + - name: "logPattern" + value: "[%d] %p %X{connector.context}%m (%c:%L)%n" + + Appenders: + Console: + name: STDOUT + PatternLayout: + pattern: "${logPattern}" + + RollingFile: + - name: ConnectAppender + fileName: "${sys:kafka.logs.dir}/connect.log" + filePattern: "${sys:kafka.logs.dir}/connect-%d{yyyy-MM-dd-HH}.log" + PatternLayout: + pattern: "${logPattern}" + TimeBasedTriggeringPolicy: + modulate: true + interval: 1 + Loggers: + Root: + level: INFO + AppenderRef: + - ref: STDOUT + - ref: ConnectAppender diff --git a/config/log4j2.properties b/config/log4j2.properties deleted file mode 100644 index 3e8601d5f4486..0000000000000 --- a/config/log4j2.properties +++ /dev/null @@ -1,158 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Unspecified loggers and loggers with additivity=true output to server.log and stdout -# Note that INFO only applies to unspecified loggers, the log level of the child logger is used otherwise -name=LogConfig -appenders=stdout,kafkaAppender,stateChangeAppender,requestAppender,cleanerAppender,controllerAppender,authorizerAppender - -# Console appender (stdout) -appender.stdout.type=Console -appender.stdout.name=STDOUT -appender.stdout.layout.type=PatternLayout -appender.stdout.layout.pattern=[%d] %p %m (%c)%n - -appender.kafkaAppender.type=RollingFile -appender.kafkaAppender.name=KafkaAppender -appender.kafkaAppender.fileName=${kafka.logs.dir}/server.log -appender.kafkaAppender.filePattern=${kafka.logs.dir}/server.log.%d{yyyy-MM-dd-HH} -appender.kafkaAppender.layout.type=PatternLayout -appender.kafkaAppender.layout.pattern=[%d] %p %m (%c)%n -appender.kafkaAppender.policies.type=TimeBasedTriggeringPolicy -appender.kafkaAppender.policies.interval=1 -appender.kafkaAppender.policies.modulate=true - -# State Change appender -appender.stateChangeAppender.type=RollingFile -appender.stateChangeAppender.name=StateChangeAppender -appender.stateChangeAppender.fileName=${kafka.logs.dir}/state-change.log -appender.stateChangeAppender.filePattern=${kafka.logs.dir}/state-change.log.%d{yyyy-MM-dd-HH} -appender.stateChangeAppender.layout.type=PatternLayout -appender.stateChangeAppender.layout.pattern=[%d] %p %m (%c)%n -appender.stateChangeAppender.policies.type=TimeBasedTriggeringPolicy -appender.stateChangeAppender.policies.interval=1 -appender.stateChangeAppender.policies.modulate=true - -# Request appender -appender.requestAppender.type=RollingFile -appender.requestAppender.name=RequestAppender -appender.requestAppender.fileName=${kafka.logs.dir}/kafka-request.log -appender.requestAppender.filePattern=${kafka.logs.dir}/kafka-request.log.%d{yyyy-MM-dd-HH} -appender.requestAppender.layout.type=PatternLayout -appender.requestAppender.layout.pattern=[%d] %p %m (%c)%n -appender.requestAppender.policies.type=TimeBasedTriggeringPolicy -appender.requestAppender.policies.interval=1 -appender.requestAppender.policies.modulate=true - -# Cleaner appender -appender.cleanerAppender.type=RollingFile -appender.cleanerAppender.name=CleanerAppender -appender.cleanerAppender.fileName=${kafka.logs.dir}/log-cleaner.log -appender.cleanerAppender.filePattern=${kafka.logs.dir}/log-cleaner.log.%d{yyyy-MM-dd-HH} -appender.cleanerAppender.layout.type=PatternLayout -appender.cleanerAppender.layout.pattern=[%d] %p %m (%c)%n -appender.cleanerAppender.policies.type=TimeBasedTriggeringPolicy -appender.cleanerAppender.policies.interval=1 -appender.cleanerAppender.policies.modulate=true - -# Controller appender -appender.controllerAppender.type=RollingFile -appender.controllerAppender.name=ControllerAppender -appender.controllerAppender.fileName=${kafka.logs.dir}/controller.log -appender.controllerAppender.filePattern=${kafka.logs.dir}/controller.log.%d{yyyy-MM-dd-HH} -appender.controllerAppender.layout.type=PatternLayout -appender.controllerAppender.layout.pattern=[%d] %p %m (%c)%n -appender.controllerAppender.policies.type=TimeBasedTriggeringPolicy -appender.controllerAppender.policies.interval=1 -appender.controllerAppender.policies.modulate=true - -# Authorizer appender -appender.authorizerAppender.type=RollingFile -appender.authorizerAppender.name=AuthorizerAppender -appender.authorizerAppender.fileName=${kafka.logs.dir}/kafka-authorizer.log -appender.authorizerAppender.filePattern=${kafka.logs.dir}/kafka-authorizer.log.%d{yyyy-MM-dd-HH} -appender.authorizerAppender.layout.type=PatternLayout -appender.authorizerAppender.layout.pattern=[%d] %p %m (%c)%n -appender.authorizerAppender.policies.type=TimeBasedTriggeringPolicy -appender.authorizerAppender.policies.interval=1 -appender.authorizerAppender.policies.modulate=true - -rootLogger.level=INFO -rootLogger.appenderRefs=stdout,kafkaAppender -rootLogger.appenderRef.stdout.ref=STDOUT -rootLogger.appenderRef.kafkaAppender.ref=KafkaAppender - -loggers=kafka,apacheKafka,requestLogger,networkRequestChannel,apacheKafkaController,kafkaController,logCleaner,stateChangeLogger,authorizerLogger - -# Kafka logger -logger.kafka.name=kafka -logger.kafka.level=INFO - -# Kafka org.apache logger -logger.apacheKafka.name=org.apache.kafka -logger.apacheKafka.level=INFO - -# Kafka request logger -logger.requestLogger.name=kafka.request.logger -logger.requestLogger.level=WARN -logger.requestLogger.additivity=false -logger.requestLogger.appenderRef.requestAppender.ref=RequestAppender - - -# Uncomment the lines below and change log4j.logger.kafka.network.RequestChannel$ to TRACE for additional output -# related to the handling of requests -#logger.networkProcessor.name=kafka.network.Processor -#logger.networkProcessor.level=TRACE -#logger.networkProcessor.appenderRef.requestAppender.ref=RequestAppender -#logger.serverKafkaApis.name=kafka.server.KafkaApis -#logger.serverKafkaApis.level=TRACE -#logger.serverKafkaApis.additivity=false -#logger.serverKafkaApis.appenderRef.requestAppender.ref=RequestAppender - -# Kafka network RequestChannel$ logger -logger.networkRequestChannel.name=kafka.network.RequestChannel$ -logger.networkRequestChannel.level=WARN -logger.networkRequestChannel.additivity=false -logger.networkRequestChannel.appenderRef.requestAppender.ref=RequestAppender - -# KRaft mode controller logger -logger.apacheKafkaController.name=org.apache.kafka.controller -logger.apacheKafkaController.level=INFO -logger.apacheKafkaController.additivity=false -logger.apacheKafkaController.appenderRef.controllerAppender.ref=ControllerAppender - -# ZK mode controller logger -logger.kafkaController.name=kafka.controller -logger.kafkaController.level=TRACE -logger.kafkaController.additivity=false -logger.kafkaController.appenderRef.controllerAppender.ref=ControllerAppender - -# LogCleaner logger -logger.logCleaner.name=kafka.log.LogCleaner -logger.logCleaner.level=INFO -logger.logCleaner.additivity=false -logger.logCleaner.appenderRef.cleanerAppender.ref=CleanerAppender - -# State change logger -logger.stateChangeLogger.name=state.change.logger -logger.stateChangeLogger.level=INFO -logger.stateChangeLogger.additivity=false -logger.stateChangeLogger.appenderRef.stateChangeAppender.ref=StateChangeAppender - -# Authorizer logger -logger.authorizerLogger.name=kafka.authorizer.logger -logger.authorizerLogger.level=INFO -logger.authorizerLogger.additivity=false -logger.authorizerLogger.appenderRef.authorizerAppender.ref=AuthorizerAppender diff --git a/config/log4j2.yaml b/config/log4j2.yaml new file mode 100644 index 0000000000000..2b000d407e2f2 --- /dev/null +++ b/config/log4j2.yaml @@ -0,0 +1,158 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Unspecified loggers and loggers with additivity=true output to server.log and stdout +# Note that INFO only applies to unspecified loggers, the log level of the child logger is used otherwise +Configuration: + Properties: + Property: + # Fallback if the system property is not set + - name: "kafka.logs.dir" + value: "." + - name: "logPattern" + value: "[%d] %p %m (%c)%n" + + # Appenders configuration + # See: https://logging.apache.org/log4j/2.x/manual/appenders.html + Appenders: + Console: + name: STDOUT + PatternLayout: + pattern: "${logPattern}" + + RollingFile: + - name: KafkaAppender + fileName: "${sys:kafka.logs.dir}/server.log" + filePattern: "${sys:kafka.logs.dir}/server.log.%d{yyyy-MM-dd-HH}" + PatternLayout: + pattern: "${logPattern}" + TimeBasedTriggeringPolicy: + modulate: true + interval: 1 + # State Change appender + - name: StateChangeAppender + fileName: "${sys:kafka.logs.dir}/state-change.log" + filePattern: "${sys:kafka.logs.dir}/stage-change.log.%d{yyyy-MM-dd-HH}" + PatternLayout: + pattern: "${logPattern}" + TimeBasedTriggeringPolicy: + modulate: true + interval: 1 + # Request appender + - name: RequestAppender + fileName: "${sys:kafka.logs.dir}/kafka-request.log" + filePattern: "${sys:kafka.logs.dir}/kafka-request.log.%d{yyyy-MM-dd-HH}" + PatternLayout: + pattern: "${logPattern}" + TimeBasedTriggeringPolicy: + modulate: true + interval: 1 + # Cleaner appender + - name: CleanerAppender + fileName: "${sys:kafka.logs.dir}/log-cleaner.log" + filePattern: "${sys:kafka.logs.dir}/log-cleaner.log.%d{yyyy-MM-dd-HH}" + PatternLayout: + pattern: "${logPattern}" + TimeBasedTriggeringPolicy: + modulate: true + interval: 1 + # Controller appender + - name: ControllerAppender + fileName: "${sys:kafka.logs.dir}/controller.log" + filePattern: "${sys:kafka.logs.dir}/controller.log.%d{yyyy-MM-dd-HH}" + PatternLayout: + pattern: "${logPattern}" + TimeBasedTriggeringPolicy: + modulate: true + interval: 1 + # Authorizer appender + - name: AuthorizerAppender + fileName: "${sys:kafka.logs.dir}/kafka-authorizer.log" + filePattern: "${sys:kafka.logs.dir}/kafka-authorizer.log.%d{yyyy-MM-dd-HH}" + PatternLayout: + pattern: "${logPattern}" + TimeBasedTriggeringPolicy: + modulate: true + interval: 1 + + # Loggers configuration + # See: https://logging.apache.org/log4j/2.x/manual/configuration.html#configuring-loggers + Loggers: + Root: + level: INFO + AppenderRef: + - ref: STDOUT + - ref: KafkaAppender + Logger: + # Kafka logger + - name: kafka + level: INFO + # Kafka org.apache logger + - name: org.apache.kafka + level: INFO + # Kafka request logger + - name: kafka.request.logger + level: WARN + additivity: false + AppenderRef: + ref: RequestAppender + # Uncomment the lines below and change log4j.logger.kafka.network.RequestChannel$ to TRACE + # for additional output related to the handling of requests +# - name: kafka.network.Processor +# level: TRACE +# additivity: false +# AppenderRef: +# ref: RequestAppender +# - name: kafka.server.KafkaApis +# level: TRACE +# additivity: false +# AppenderRef: +# ref: RequestAppender + # Kafka network RequestChannel$ logger + - name: kafka.network.RequestChannel$ + level: WARN + additivity: false + AppenderRef: + ref: RequestAppender + # KRaft mode controller logger + - name: org.apache.kafka.controller + level: INFO + additivity: false + AppenderRef: + ref: ControllerAppender + # ZK mode controller logger + - name: kafka.controller + level: TRACE + additivity: false + AppenderRef: + ref: ControllerAppender + # LogCleaner logger + - name: kafka.log.LogCleaner + level: INFO + additivity: false + AppenderRef: + ref: CleanerAppender + # State change logger + - name: state.change.logger + level: INFO + additivity: false + AppenderRef: + ref: StateChangeAppender + # Authorizer logger + - name: kafka.authorizer.logger + level: INFO + additivity: false + AppenderRef: + ref: AuthorizerAppender \ No newline at end of file diff --git a/connect/file/src/test/resources/log4j2.properties b/connect/file/src/test/resources/log4j2.properties deleted file mode 100644 index 101b56d63c71d..0000000000000 --- a/connect/file/src/test/resources/log4j2.properties +++ /dev/null @@ -1,36 +0,0 @@ -## -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -## -name=ConnectFileTestConfig - -appenders=console -appender.console.type=Console -appender.console.name=STDOUT -appender.console.layout.type=PatternLayout -# -# The `%X{connector.context}` parameter in the layout includes connector-specific and task-specific information -# in the log message, where appropriate. This makes it easier to identify those log messages that apply to a -# specific connector. Simply add this parameter to the log layout configuration below to include the contextual information. -# -appender.stdout.layout.pattern=[%d] %p %X{connector.context}%m (%c:%L)%n - -rootLogger.level=INFO -rootLogger.appenderRefs=console -rootLogger.appenderRef.stdout.ref=STDOUT - -loggers=kafka -logger.kafka.name=kafka -logger.kafka.level=WARN diff --git a/group-coordinator/src/test/resources/log4j2.properties b/connect/file/src/test/resources/log4j2.yaml similarity index 57% rename from group-coordinator/src/test/resources/log4j2.properties rename to connect/file/src/test/resources/log4j2.yaml index 0284fc8354190..1e9f550fa6d6d 100644 --- a/group-coordinator/src/test/resources/log4j2.properties +++ b/connect/file/src/test/resources/log4j2.yaml @@ -1,9 +1,9 @@ # Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with +# contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at +# the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # @@ -12,19 +12,24 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -name=TestConfig -appenders=console -appender.console.type=Console -appender.console.name=STDOUT -appender.console.layout.type=PatternLayout -appender.console.layout.pattern=[%d] %p %m (%c:%L)%n +Configuration: + Properties: + Property: + - name: "logPattern" + value: "[%d] %p %X{connector.context}%m (%c:%L)%n" -rootLogger.level=DEBUG -rootLogger.appenderRefs=console -rootLogger.appenderRef.console.ref=STDOUT + Appenders: + Console: + name: STDOUT + PatternLayout: + pattern: "${logPattern}" -loggers=kafka - -logger.kafka.name=org.apache.kafka -logger.kafka.level=DEBUG + Loggers: + Root: + level: INFO + AppenderRef: + - ref: STDOUT + Logger: + - name: kafka + level: WARN diff --git a/connect/mirror/src/test/resources/log4j2.properties b/connect/mirror/src/test/resources/log4j2.properties deleted file mode 100644 index aa5596ecd19b7..0000000000000 --- a/connect/mirror/src/test/resources/log4j2.properties +++ /dev/null @@ -1,43 +0,0 @@ -## -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -## -name=ConnectMirrorTestConfig - -appenders=console -appender.console.type=Console -appender.console.name=STDOUT -appender.console.layout.type=PatternLayout -# -# The `%X{connector.context}` parameter in the layout includes connector-specific and task-specific information -# in the log message, where appropriate. This makes it easier to identify those log messages that apply to a -# specific connector. Simply add this parameter to the log layout configuration below to include the contextual information. -# -appender.console.layout.pattern=[%d] %p %X{connector.context}%m (%c:%L)%n - -loggers=kafka,stateChangeLogger,kafkaConnectLogger - -rootLogger.level=INFO -rootLogger.appenderRefs=console -rootLogger.appenderRef.console.ref=STDOUT - -logger.kafka.name=kafka -logger.kafka.level=WARN - -logger.stateChangeLogger.name=state.change.logger -logger.stateChangeLogger.level=OFF - -logger.kafkaConnectLogger.name=org.apache.kafka.connect -logger.kafkaConnectLogger.level=DEBUG diff --git a/raft/src/test/resources/log4j2.properties b/connect/mirror/src/test/resources/log4j2.yaml similarity index 52% rename from raft/src/test/resources/log4j2.properties rename to connect/mirror/src/test/resources/log4j2.yaml index ed4c9a98da666..b63606d0ba56f 100644 --- a/raft/src/test/resources/log4j2.properties +++ b/connect/mirror/src/test/resources/log4j2.yaml @@ -1,9 +1,9 @@ # Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with +# contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at +# the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # @@ -12,22 +12,30 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -name=TestConfig -appenders=console -appender.console.type=Console -appender.console.name=STDOUT -appender.console.layout.type=PatternLayout -appender.console.layout.pattern=[%d] %p %m (%c:%L)%n +Configuration: + Properties: + Property: + - name: "logPattern" + value: "[%d] %p %X{connector.context}%m (%c:%L)%n" -rootLogger.level=OFF -rootLogger.appenderRefs=console -rootLogger.appenderRef.console.ref=STDOUT + Appenders: + Console: + name: STDOUT + PatternLayout: + pattern: "${logPattern}" -loggers=raft,snapshot + Loggers: + Root: + level: INFO + AppenderRef: + - ref: STDOUT + Logger: + - name: kafka + level: WARN -logger.raft.name=org.apache.kafka.raft -logger.raft.level=ERROR + - name: state.change.logger + level: "OFF" -logger.snapshot.name=org.apache.kafka.snapshot -logger.snapshot.level=ERROR + - name: org.apache.kafka.connect + level: DEBUG diff --git a/connect/runtime/src/test/resources/log4j2.properties b/connect/runtime/src/test/resources/log4j2.properties deleted file mode 100644 index 462b9d7e48132..0000000000000 --- a/connect/runtime/src/test/resources/log4j2.properties +++ /dev/null @@ -1,49 +0,0 @@ -## -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -## -name=ConnectRuntimeTestConfig -appenders=console - -appender.console.type=Console -appender.console.name=STDOUT -appender.console.layout.type=PatternLayout -# -# The `%X{connector.context}` parameter in the layout includes connector-specific and task-specific information -# in the log message, where appropriate. This makes it easier to identify those log messages that apply to a -# specific connector. Simply add this parameter to the log layout configuration below to include the contextual information. -# -appender.console.layout.pattern=[%d] %p %X{connector.context}%m (%c:%L)%n - -loggers=kafka,stateChangeLogger,kafkaConnect,kafkaConsumer,coordinatorGroup - -rootLogger.level=INFO -rootLogger.appenderRefs=console -rootLogger.appenderRef.console.ref=STDOUT - -logger.kafka.name=kafka -logger.kafka.level=WARN - -logger.stateChangeLogger.name=state.change.logger -logger.stateChangeLogger.level=OFF - -logger.kafkaConnect.name=org.apache.kafka.connect -logger.kafkaConnect.level=DEBUG - -# Troubleshooting KAFKA-17493. -logger.kafkaConsumer.name=org.apache.kafka.consumer -logger.kafkaConsumer.level=DEBUG -logger.coordinatorGroup.name=org.apache.kafka.coordinator.group -logger.coordinatorGroup.level=DEBUG diff --git a/connect/runtime/src/test/resources/log4j2.yaml b/connect/runtime/src/test/resources/log4j2.yaml new file mode 100644 index 0000000000000..45faa635378a9 --- /dev/null +++ b/connect/runtime/src/test/resources/log4j2.yaml @@ -0,0 +1,48 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +Configuration: + Properties: + Property: + - name: "logPattern" + value: "[%d] %p %X{connector.context}%m (%c:%L)%n" + + Appenders: + Console: + name: STDOUT + PatternLayout: + pattern: "${logPattern}" + + Loggers: + Root: + level: INFO + AppenderRef: + - ref: STDOUT + Logger: + - name: kafka + level: WARN + + - name: state.change.logger + level: "OFF" + + - name: org.apache.kafka.connect + level: DEBUG + + # Troubleshooting KAFKA-17493. + - name: org.apache.kafka.consumer + level: DEBUG + + - name: org.apache.kafka.coordinator.group + level: DEBUG diff --git a/metadata/src/test/resources/log4j2.properties b/core/src/test/resources/log4j2.yaml similarity index 56% rename from metadata/src/test/resources/log4j2.properties rename to core/src/test/resources/log4j2.yaml index 0284fc8354190..016a542689b4e 100644 --- a/metadata/src/test/resources/log4j2.properties +++ b/core/src/test/resources/log4j2.yaml @@ -1,9 +1,9 @@ # Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with +# contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at +# the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # @@ -12,19 +12,27 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -name=TestConfig -appenders=console -appender.console.type=Console -appender.console.name=STDOUT -appender.console.layout.type=PatternLayout -appender.console.layout.pattern=[%d] %p %m (%c:%L)%n +Configuration: + Properties: + Property: + - name: "logPattern" + value: "[%d] %p %m (%c:%L)%n" -rootLogger.level=DEBUG -rootLogger.appenderRefs=console -rootLogger.appenderRef.console.ref=STDOUT + Appenders: + Console: + name: STDOUT + PatternLayout: + pattern: "${logPattern}" -loggers=kafka + Loggers: + Root: + level: OFF + AppenderRef: + - ref: STDOUT + Logger: + - name: kafka + level: WARN -logger.kafka.name=org.apache.kafka -logger.kafka.level=DEBUG + - name: org.apache.kafka + level: WARN diff --git a/gradle/dependencies.gradle b/gradle/dependencies.gradle index ff184f6909b44..7414e43aa4cc4 100644 --- a/gradle/dependencies.gradle +++ b/gradle/dependencies.gradle @@ -160,6 +160,7 @@ libs += [ commonsValidator: "commons-validator:commons-validator:$versions.commonsValidator", jacksonAnnotations: "com.fasterxml.jackson.core:jackson-annotations:$versions.jackson", jacksonDatabind: "com.fasterxml.jackson.core:jackson-databind:$versions.jackson", + jacksonDatabindYaml: "com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:$versions.jackson", jacksonDataformatCsv: "com.fasterxml.jackson.dataformat:jackson-dataformat-csv:$versions.jackson", jacksonModuleScala: "com.fasterxml.jackson.module:jackson-module-scala_$versions.baseScala:$versions.jackson", jacksonJDK8Datatypes: "com.fasterxml.jackson.datatype:jackson-datatype-jdk8:$versions.jackson", diff --git a/group-coordinator/src/test/resources/log4j2.yaml b/group-coordinator/src/test/resources/log4j2.yaml new file mode 100644 index 0000000000000..59b02951909e6 --- /dev/null +++ b/group-coordinator/src/test/resources/log4j2.yaml @@ -0,0 +1,31 @@ +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +Configuration: + Properties: + Property: + - name: "logPattern" + value: "[%d] %p %m (%c:%L)%n" + + Appenders: + Console: + name: STDOUT + PatternLayout: + pattern: "${logPattern}" + + Loggers: + Root: + level: DEBUG + AppenderRef: + - ref: STDOUT + Logger: + - name: org.apache.kafka + level: DEBUG diff --git a/server-common/src/test/resources/log4j2.properties b/metadata/src/test/resources/log4j2.yaml similarity index 57% rename from server-common/src/test/resources/log4j2.properties rename to metadata/src/test/resources/log4j2.yaml index a12aced7a9c64..fd94a4974e2dc 100644 --- a/server-common/src/test/resources/log4j2.properties +++ b/metadata/src/test/resources/log4j2.yaml @@ -1,9 +1,10 @@ + # Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with +# contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at +# the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # @@ -12,19 +13,24 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -name=TestConfig -appenders=console - -appender.console.type=Console -appender.console.name=STDOUT -appender.console.layout.type=PatternLayout -appender.console.layout.pattern=[%d] %p %m (%c:%L)%n -rootLogger.level=INFO -rootLogger.appenderRefs=console -rootLogger.appenderRef.console.ref=STDOUT +Configuration: + Properties: + Property: + - name: "logPattern" + value: "[%d] %p %m (%c:%L)%n" -loggers=kafka + Appenders: + Console: + name: STDOUT + PatternLayout: + pattern: "${logPattern}" -logger.kafka.name=org.apache.kafka -logger.kafka.level=INFO + Loggers: + Root: + level: DEBUG + AppenderRef: + - ref: STDOUT + Logger: + - name: org.apache.kafka + level: DEBUG diff --git a/raft/config/kraft-log4j2.properties b/raft/config/kraft-log4j2.properties deleted file mode 100644 index 4da4ae4487695..0000000000000 --- a/raft/config/kraft-log4j2.properties +++ /dev/null @@ -1,35 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -name=RaftConfig -appenders=console - -appender.console.type=Console -appender.console.name=STDERR -appender.console.layout.type=PatternLayout -appender.console.layout.pattern=[%d] %p %m (%c)%n -appender.console.target=SYSTEM_ERR - -rootLogger.level=INFO -rootLogger.appenderRefs=console -rootLogger.appenderRef.console.ref=STDERR - -loggers=raft,snapshot - -logger.raft.name=org.apache.kafka.raft -logger.raft.level=INFO - -logger.snapshot.name=org.apache.kafka.snapshot -logger.snapshot.level=INFO diff --git a/raft/config/kraft-log4j2.yaml b/raft/config/kraft-log4j2.yaml new file mode 100644 index 0000000000000..3bfd01ca5cfcf --- /dev/null +++ b/raft/config/kraft-log4j2.yaml @@ -0,0 +1,39 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +Configuration: + Properties: + Property: + - name: "logPattern" + value: "[%d] %p %m (%c)%n" + + Appenders: + Console: + name: STDERR + target: SYSTEM_ERR + PatternLayout: + pattern: "${logPattern}" + + Loggers: + Root: + level: INFO + AppenderRef: + - ref: STDERR + Logger: + - name: org.apache.kafka.raft + level: INFO + + - name: org.apache.kafka.snapshot + level: INFO diff --git a/streams/test-utils/src/test/resources/log4j2.properties b/raft/src/test/resources/log4j2.yaml similarity index 54% rename from streams/test-utils/src/test/resources/log4j2.properties rename to raft/src/test/resources/log4j2.yaml index 51f3728b70e4a..50d9e781b8ec1 100644 --- a/streams/test-utils/src/test/resources/log4j2.properties +++ b/raft/src/test/resources/log4j2.yaml @@ -1,9 +1,9 @@ # Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with +# contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at +# the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # @@ -12,19 +12,27 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -name=StreamsTestUtilsTestConfig -appenders=console -appender.console.type=Console -appender.console.name=STDOUT -appender.console.layout.type=PatternLayout -appender.console.layout.pattern=[%d] %p %m (%c:%L)%n +Configuration: + Properties: + Property: + - name: "logPattern" + value: "[%d] %p %m (%c:%L)%n" -rootLogger.level=INFO -rootLogger.appenderRefs=console -rootLogger.appenderRef.console.ref=STDOUT + Appenders: + Console: + name: STDOUT + PatternLayout: + pattern: "${logPattern}" -loggers=kafka + Loggers: + Root: + level: OFF + AppenderRef: + - ref: STDOUT + Logger: + - name: org.apache.kafka.raft + level: ERROR -logger.kafka.name=org.apache.kafka -logger.kafka.level=INFO + - name: org.apache.kafka.snapshot + level: ERROR diff --git a/server-common/src/test/resources/log4j2.yaml b/server-common/src/test/resources/log4j2.yaml new file mode 100644 index 0000000000000..be546a18b55e6 --- /dev/null +++ b/server-common/src/test/resources/log4j2.yaml @@ -0,0 +1,35 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +Configuration: + Properties: + Property: + - name: "logPattern" + value: "[%d] %p %m (%c:%L)%n" + + Appenders: + Console: + name: STDOUT + PatternLayout: + pattern: "${logPattern}" + + Loggers: + Root: + level: INFO + AppenderRef: + - ref: STDOUT + Logger: + - name: org.apache.kafka + level: INFO diff --git a/shell/src/test/resources/log4j2.properties b/shell/src/test/resources/log4j2.yaml similarity index 61% rename from shell/src/test/resources/log4j2.properties rename to shell/src/test/resources/log4j2.yaml index 2b0b46645f1cd..c229cbce316d1 100644 --- a/shell/src/test/resources/log4j2.properties +++ b/shell/src/test/resources/log4j2.yaml @@ -1,9 +1,9 @@ # Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with +# contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at +# the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # @@ -12,14 +12,21 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -name=TestConfig -appenders=console -appender.console.type=Console -appender.console.name=STDOUT -appender.console.layout.type=PatternLayout -appender.console.layout.pattern=[%d] %p %m (%c:%L)%n +Configuration: + Properties: + Property: + - name: "logPattern" + value: "[%d] %p %m (%c:%L)%n" -rootLogger.level=DEBUG -rootLogger.appenderRefs=console -rootLogger.appenderRef.console.ref=STDOUT + Appenders: + Console: + name: STDOUT + PatternLayout: + pattern: "${logPattern}" + + Loggers: + Root: + level: DEBUG + AppenderRef: + - ref: STDOUT diff --git a/storage/src/test/resources/log4j2.properties b/storage/src/test/resources/log4j2.properties deleted file mode 100644 index 91db144ec94e9..0000000000000 --- a/storage/src/test/resources/log4j2.properties +++ /dev/null @@ -1,53 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -name=TestConfig -appenders=console,file - -appender.console.type=Console -appender.console.name=STDOUT -appender.console.layout.type=PatternLayout -appender.console.layout.pattern=[%d] %p %m (%c:%L)%n - -appender.file.type=RollingFile -appender.file.name=FileAppender -appender.file.fileName=storage.log -appender.file.layout.type=PatternLayout -appender.file.layout.pattern=%d [%t] %-5p %c %x - %m%n - -appender.file.filePattern=storage-%d{yyyy-MM-dd}.log -appender.file.policies.type=Policies -appender.file.policies.time.type=TimeBasedTriggeringPolicy -appender.file.policies.time.interval=1 - -rootLogger.level=OFF -rootLogger.appenderRefs=console -rootLogger.appenderRef.console.ref=STDOUT - -loggers=remoteStorage,remoteMetadataStorage,remote - -logger.remoteStorage.name=org.apache.kafka.server.log.remote.storage -logger.remoteStorage.level=INFO -logger.remoteStorage.appenderRefs=file -logger.remoteStorage.appenderRef.fileAppender.ref=FileAppender - -logger.remoteMetadataStorage.name=org.apache.kafka.server.log.remote.metadata.storage -logger.remoteMetadataStorage.level=INFO -logger.remoteMetadataStorage.appenderRefs=file -logger.remoteMetadataStorage.appenderRef.fileAppender.ref=FileAppender - -logger.remote.name=kafka.log.remote -logger.remote.level=INFO -logger.remote.appenderRefs=file -logger.remote.appenderRef.fileAppender.ref=FileAppender diff --git a/storage/src/test/resources/log4j2.yaml b/storage/src/test/resources/log4j2.yaml new file mode 100644 index 0000000000000..4117e2f148611 --- /dev/null +++ b/storage/src/test/resources/log4j2.yaml @@ -0,0 +1,58 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +Configuration: + Properties: + Property: + - name: "logPattern" + value: "[%d] %p %m (%c:%L)%n" + - name: "fileLogPattern" + value: "%d [%t] %-5p %c %x - %m%n" + + Appenders: + Console: + name: STDOUT + PatternLayout: + pattern: "${logPattern}" + RollingFile: + - name: FileAppender + fileName: storage.log + filePattern: "storage-%d{yyyy-MM-dd}.log" + PatternLayout: + pattern: "${fileLogPattern}" + Policies: + TimeBasedTriggeringPolicy: + interval: 1 + + Loggers: + Root: + level: OFF + AppenderRef: + - ref: STDOUT + Logger: + - name: org.apache.kafka.server.log.remote.storage + level: INFO + AppenderRef: + - ref: FileAppender + + - name: org.apache.kafka.server.log.remote.metadata.storage + level: INFO + AppenderRef: + - ref: FileAppender + + - name: kafka.log.remote + level: INFO + AppenderRef: + - ref: FileAppender diff --git a/streams/integration-tests/src/test/resources/log4j2.properties b/streams/integration-tests/src/test/resources/log4j2.properties deleted file mode 100644 index aaf3b14f667e7..0000000000000 --- a/streams/integration-tests/src/test/resources/log4j2.properties +++ /dev/null @@ -1,61 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# Appender Configuration -name=TestConfig -appenders=console - -appender.console.type=Console -appender.console.name=STDOUT -appender.console.layout.type=PatternLayout -appender.console.layout.pattern=[%d] %p %m (%c:%L)%n - -rootLogger.level=INFO -rootLogger.appenderRefs=console -rootLogger.appenderRef.stdout.ref=STDOUT - -loggers=kafka,stateChangeLogger,apacheKafka,kafkaClients,consumer,producer,streams,producerConfig,consumerConfig,adminClientConfig,streamsConfig - -logger.kafka.name=kafka -logger.kafka.level=ERROR - -logger.stateChangeLogger.name=state.change.logger -logger.stateChangeLogger.level=ERROR - -logger.apacheKafka.name=org.apache.kafka -logger.apacheKafka.level=ERROR - -logger.kafkaClients.name=org.apache.kafka.clients -logger.kafkaClients.level=ERROR - -logger.consumer.name=org.apache.kafka.clients.consumer -logger.consumer.level=INFO - -logger.producer.name=org.apache.kafka.clients.producer -logger.producer.level=INFO - -logger.streams.name=org.apache.kafka.streams -logger.streams.level=INFO - -logger.producerConfig.name=org.apache.kafka.clients.producer.ProducerConfig -logger.producerConfig.level=ERROR - -logger.consumerConfig.name=org.apache.kafka.clients.consumer.ConsumerConfig -logger.consumerConfig.level=ERROR - -logger.adminClientConfig.name=org.apache.kafka.clients.admin.AdminClientConfig -logger.adminClientConfig.level=ERROR - -logger.streamsConfig.name=org.apache.kafka.streams.StreamsConfig -logger.streamsConfig.level=ERROR diff --git a/streams/integration-tests/src/test/resources/log4j2.yaml b/streams/integration-tests/src/test/resources/log4j2.yaml new file mode 100644 index 0000000000000..0942036a33c80 --- /dev/null +++ b/streams/integration-tests/src/test/resources/log4j2.yaml @@ -0,0 +1,65 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +Configuration: + Properties: + Property: + - name: "logPattern" + value: "[%d] %p %m (%c:%L)%n" + + Appenders: + Console: + name: STDOUT + PatternLayout: + pattern: "${logPattern}" + + Loggers: + Root: + level: INFO + AppenderRef: + - ref: STDOUT + Logger: + - name: kafka + level: ERROR + + - name: state.change.logger + level: ERROR + + - name: org.apache.kafka + level: ERROR + + - name: org.apache.kafka.clients + level: ERROR + + - name: org.apache.kafka.clients.consumer + level: INFO + + - name: org.apache.kafka.clients.producer + level: INFO + + - name: org.apache.kafka.streams + level: INFO + + - name: org.apache.kafka.clients.producer.ProducerConfig + level: ERROR + + - name: org.apache.kafka.clients.consumer.ConsumerConfig + level: ERROR + + - name: org.apache.kafka.clients.admin.AdminClientConfig + level: ERROR + + - name: org.apache.kafka.streams.StreamsConfig + level: ERROR diff --git a/streams/quickstart/java/src/main/resources/archetype-resources/src/main/resources/log4j2.properties b/streams/quickstart/java/src/main/resources/archetype-resources/src/main/resources/log4j2.yaml similarity index 58% rename from streams/quickstart/java/src/main/resources/archetype-resources/src/main/resources/log4j2.properties rename to streams/quickstart/java/src/main/resources/archetype-resources/src/main/resources/log4j2.yaml index f656fe5e7abdb..0c112dd06d6ec 100644 --- a/streams/quickstart/java/src/main/resources/archetype-resources/src/main/resources/log4j2.properties +++ b/streams/quickstart/java/src/main/resources/archetype-resources/src/main/resources/log4j2.yaml @@ -1,9 +1,9 @@ # Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with +# contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at +# the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # @@ -12,14 +12,21 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -name=StreamsQuickstartConfig -appenders=console -appender.console.type=Console -appender.console.name=ConsoleAppender -appender.console.layout.type=PatternLayout -appender.console.layout.pattern=%d{HH:mm:ss,SSS} %-5p %-60c %x - %m%n +Configuration: + Properties: + Property: + - name: "logPattern" + value: "%d{HH:mm:ss,SSS} %-5p %-60c %x - %m%n" -rootLogger.level=INFO -rootLogger.appenderRefs=console -rootLogger.appenderRef.console.ref=ConsoleAppender + Appenders: + Console: + name: STDOUT + PatternLayout: + pattern: "${logPattern}" + + Loggers: + Root: + level: INFO + AppenderRef: + - ref: STDOUT diff --git a/streams/src/test/resources/log4j2.properties b/streams/src/test/resources/log4j2.properties deleted file mode 100644 index aaf3b14f667e7..0000000000000 --- a/streams/src/test/resources/log4j2.properties +++ /dev/null @@ -1,61 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# Appender Configuration -name=TestConfig -appenders=console - -appender.console.type=Console -appender.console.name=STDOUT -appender.console.layout.type=PatternLayout -appender.console.layout.pattern=[%d] %p %m (%c:%L)%n - -rootLogger.level=INFO -rootLogger.appenderRefs=console -rootLogger.appenderRef.stdout.ref=STDOUT - -loggers=kafka,stateChangeLogger,apacheKafka,kafkaClients,consumer,producer,streams,producerConfig,consumerConfig,adminClientConfig,streamsConfig - -logger.kafka.name=kafka -logger.kafka.level=ERROR - -logger.stateChangeLogger.name=state.change.logger -logger.stateChangeLogger.level=ERROR - -logger.apacheKafka.name=org.apache.kafka -logger.apacheKafka.level=ERROR - -logger.kafkaClients.name=org.apache.kafka.clients -logger.kafkaClients.level=ERROR - -logger.consumer.name=org.apache.kafka.clients.consumer -logger.consumer.level=INFO - -logger.producer.name=org.apache.kafka.clients.producer -logger.producer.level=INFO - -logger.streams.name=org.apache.kafka.streams -logger.streams.level=INFO - -logger.producerConfig.name=org.apache.kafka.clients.producer.ProducerConfig -logger.producerConfig.level=ERROR - -logger.consumerConfig.name=org.apache.kafka.clients.consumer.ConsumerConfig -logger.consumerConfig.level=ERROR - -logger.adminClientConfig.name=org.apache.kafka.clients.admin.AdminClientConfig -logger.adminClientConfig.level=ERROR - -logger.streamsConfig.name=org.apache.kafka.streams.StreamsConfig -logger.streamsConfig.level=ERROR diff --git a/streams/src/test/resources/log4j2.yaml b/streams/src/test/resources/log4j2.yaml new file mode 100644 index 0000000000000..0942036a33c80 --- /dev/null +++ b/streams/src/test/resources/log4j2.yaml @@ -0,0 +1,65 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +Configuration: + Properties: + Property: + - name: "logPattern" + value: "[%d] %p %m (%c:%L)%n" + + Appenders: + Console: + name: STDOUT + PatternLayout: + pattern: "${logPattern}" + + Loggers: + Root: + level: INFO + AppenderRef: + - ref: STDOUT + Logger: + - name: kafka + level: ERROR + + - name: state.change.logger + level: ERROR + + - name: org.apache.kafka + level: ERROR + + - name: org.apache.kafka.clients + level: ERROR + + - name: org.apache.kafka.clients.consumer + level: INFO + + - name: org.apache.kafka.clients.producer + level: INFO + + - name: org.apache.kafka.streams + level: INFO + + - name: org.apache.kafka.clients.producer.ProducerConfig + level: ERROR + + - name: org.apache.kafka.clients.consumer.ConsumerConfig + level: ERROR + + - name: org.apache.kafka.clients.admin.AdminClientConfig + level: ERROR + + - name: org.apache.kafka.streams.StreamsConfig + level: ERROR diff --git a/streams/streams-scala/src/test/resources/log4j2.properties b/streams/streams-scala/src/test/resources/log4j2.properties deleted file mode 100644 index 9284c02f92f8b..0000000000000 --- a/streams/streams-scala/src/test/resources/log4j2.properties +++ /dev/null @@ -1,41 +0,0 @@ -# Copyright (C) 2018 Lightbend Inc. -# Copyright (C) 2017-2018 Alexis Seigneurin. -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -name=StreamsScalaTestConfig -appenders=console,rolling - -appender.console.type=Console -appender.console.name=A1 - -appender.rolling.type=RollingFile -appender.rolling.name=R -appender.rolling.fileName=logs/kafka-streams-scala.log -appender.rolling.filePattern=logs/kafka-streams-scala.log.%i -appender.rolling.layout.type=PatternLayout -appender.rolling.layout.pattern=%-4r [%t] %-5p %c %x - %m%n - -appender.rolling.policies.type=Policies -appender.rolling.policies.size.type=SizeBasedTriggeringPolicy -appender.rolling.policies.size.size=100KB - -appender.rolling.strategy.type=DefaultRolloverStrategy -appender.rolling.strategy.max=1 - -rootLogger.level=INFO -rootLogger.appenderRefs=R -rootLogger.appenderRef.R.ref=R diff --git a/streams/streams-scala/src/test/resources/log4j2.yaml b/streams/streams-scala/src/test/resources/log4j2.yaml new file mode 100644 index 0000000000000..d34b13ba7038d --- /dev/null +++ b/streams/streams-scala/src/test/resources/log4j2.yaml @@ -0,0 +1,41 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +Configuration: + Properties: + Property: + - name: "logPattern" + value: "%-4r [%t] %-5p %c %x - %m%n" + + Appenders: + Console: + name: A1 + RollingFile: + - name: R + fileName: logs/kafka-streams-scala.log + filePattern: "${logPattern}" + PatternLayout: + pattern: "%-4r [%t] %-5p %c %x - %m%n" + Policies: + SizeBasedTriggeringPolicy: + size: "100KB" + DefaultRolloverStrategy: + max: 1 + + Loggers: + Root: + level: INFO + AppenderRef: + - ref: R diff --git a/streams/test-utils/src/test/resources/log4j2.yaml b/streams/test-utils/src/test/resources/log4j2.yaml new file mode 100644 index 0000000000000..be546a18b55e6 --- /dev/null +++ b/streams/test-utils/src/test/resources/log4j2.yaml @@ -0,0 +1,35 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +Configuration: + Properties: + Property: + - name: "logPattern" + value: "[%d] %p %m (%c:%L)%n" + + Appenders: + Console: + name: STDOUT + PatternLayout: + pattern: "${logPattern}" + + Loggers: + Root: + level: INFO + AppenderRef: + - ref: STDOUT + Logger: + - name: org.apache.kafka + level: INFO diff --git a/test-common/src/main/resources/log4j2.yaml b/test-common/src/main/resources/log4j2.yaml new file mode 100644 index 0000000000000..be546a18b55e6 --- /dev/null +++ b/test-common/src/main/resources/log4j2.yaml @@ -0,0 +1,35 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +Configuration: + Properties: + Property: + - name: "logPattern" + value: "[%d] %p %m (%c:%L)%n" + + Appenders: + Console: + name: STDOUT + PatternLayout: + pattern: "${logPattern}" + + Loggers: + Root: + level: INFO + AppenderRef: + - ref: STDOUT + Logger: + - name: org.apache.kafka + level: INFO diff --git a/tests/kafkatest/services/kafka/templates/log4j2.properties b/tests/kafkatest/services/kafka/templates/log4j2.properties deleted file mode 100644 index 52934ed373634..0000000000000 --- a/tests/kafkatest/services/kafka/templates/log4j2.properties +++ /dev/null @@ -1,298 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -name=KafkaTestsServicesConfig -appenders=stdout,kafkaInfoAppender,kafkaDebugAppender,stateChangeInfoAppender,stateChangeDebugAppender,requestInfoAppender,requestDebugAppender,cleanerInfoAppender,cleanerDebugAppender,controllerInfoAppender,controllerDebugAppender,authorizerInfoAppender,authorizerDebugAppender - -# Console appender -appender.stdout.type=Console -appender.stdout.name=STDOUT -appender.stdout.layout.type=PatternLayout -appender.stdout.layout.pattern=[%d] %p %m (%c)%n - -# Kafka INFO level appender (RollingFileAppender) -appender.kafkaInfoAppender.type=RollingFile -appender.kafkaInfoAppender.name=KafkaInfoAppender -appender.kafkaInfoAppender.fileName={{ log_dir }}/info/server.log -appender.kafkaInfoAppender.filePattern={{ log_dir }}/info/server.log.%d{yyyy-MM-dd-HH} -appender.kafkaInfoAppender.layout.type=PatternLayout -appender.kafkaInfoAppender.layout.pattern=[%d] %p %m (%c)%n -appender.kafkaInfoAppender.policies.type=Policies -appender.kafkaInfoAppender.policies.time.type=TimeBasedTriggeringPolicy -appender.kafkaInfoAppender.policies.time.interval=1 -appender.kafkaInfoAppender.filter.threshold.type=ThresholdFilter -appender.kafkaInfoAppender.filter.threshold.level=INFO - -# Kafka DEBUG level appender (RollingFileAppender) -appender.kafkaDebugAppender.type=RollingFile -appender.kafkaDebugAppender.name=KafkaDebugAppender -appender.kafkaDebugAppender.fileName={{ log_dir }}/debug/server.log -appender.kafkaDebugAppender.filePattern={{ log_dir }}/debug/server.log.%d{yyyy-MM-dd-HH} -appender.kafkaDebugAppender.layout.type=PatternLayout -appender.kafkaDebugAppender.layout.pattern=[%d] %p %m (%c)%n -appender.kafkaDebugAppender.policies.type=Policies -appender.kafkaDebugAppender.policies.time.type=TimeBasedTriggeringPolicy -appender.kafkaDebugAppender.policies.time.interval=1 -appender.kafkaDebugAppender.filter.threshold.type=ThresholdFilter -appender.kafkaDebugAppender.filter.threshold.level=DEBUG - -# State Change INFO level appender -appender.stateChangeInfoAppender.type=RollingFile -appender.stateChangeInfoAppender.name=StateChangeInfoAppender -appender.stateChangeInfoAppender.fileName={{ log_dir }}/info/state-change.log -appender.stateChangeInfoAppender.filePattern={{ log_dir }}/info/state-change.log.%d{yyyy-MM-dd-HH} -appender.stateChangeInfoAppender.layout.type=PatternLayout -appender.stateChangeInfoAppender.layout.pattern=[%d] %p %m (%c)%n -appender.stateChangeInfoAppender.policies.type=Policies -appender.stateChangeInfoAppender.policies.time.type=TimeBasedTriggeringPolicy -appender.stateChangeInfoAppender.policies.time.interval=1 -appender.stateChangeInfoAppender.filter.threshold.type=ThresholdFilter -appender.stateChangeInfoAppender.filter.threshold.level=INFO - -# State Change DEBUG level appender -appender.stateChangeDebugAppender.type=RollingFile -appender.stateChangeDebugAppender.name=StateChangeDebugAppender -appender.stateChangeDebugAppender.fileName={{ log_dir }}/debug/state-change.log -appender.stateChangeDebugAppender.filePattern={{ log_dir }}/debug/state-change.log.%d{yyyy-MM-dd-HH} -appender.stateChangeDebugAppender.layout.type=PatternLayout -appender.stateChangeDebugAppender.layout.pattern=[%d] %p %m (%c)%n -appender.stateChangeDebugAppender.policies.type=Policies -appender.stateChangeDebugAppender.policies.time.type=TimeBasedTriggeringPolicy -appender.stateChangeDebugAppender.policies.time.interval=1 -appender.stateChangeDebugAppender.filter.threshold.type=ThresholdFilter -appender.stateChangeDebugAppender.filter.threshold.level=DEBUG - -# Request INFO level appender -appender.requestInfoAppender.type=RollingFile -appender.requestInfoAppender.name=RequestInfoAppender -appender.requestInfoAppender.fileName={{ log_dir }}/info/kafka-request.log -appender.requestInfoAppender.filePattern={{ log_dir }}/info/kafka-request.log.%d{yyyy-MM-dd-HH} -appender.requestInfoAppender.layout.type=PatternLayout -appender.requestInfoAppender.layout.pattern=[%d] %p %m (%c)%n -appender.requestInfoAppender.policies.type=Policies -appender.requestInfoAppender.policies.time.type=TimeBasedTriggeringPolicy -appender.requestInfoAppender.policies.time.interval=1 -appender.requestInfoAppender.filter.threshold.type=ThresholdFilter -appender.requestInfoAppender.filter.threshold.level=INFO - -# Request DEBUG level appender -appender.requestDebugAppender.type=RollingFile -appender.requestDebugAppender.name=RequestDebugAppender -appender.requestDebugAppender.fileName={{ log_dir }}/debug/kafka-request.log -appender.requestDebugAppender.filePattern={{ log_dir }}/debug/kafka-request.log.%d{yyyy-MM-dd-HH} -appender.requestDebugAppender.layout.type=PatternLayout -appender.requestDebugAppender.layout.pattern=[%d] %p %m (%c)%n -appender.requestDebugAppender.policies.type=Policies -appender.requestDebugAppender.policies.time.type=TimeBasedTriggeringPolicy -appender.requestDebugAppender.policies.time.interval=1 -appender.requestDebugAppender.filter.threshold.type=ThresholdFilter -appender.requestDebugAppender.filter.threshold.level=DEBUG - -# Cleaner INFO level appender -appender.cleanerInfoAppender.type=RollingFile -appender.cleanerInfoAppender.name=CleanerInfoAppender -appender.cleanerInfoAppender.fileName={{ log_dir }}/info/log-cleaner.log -appender.cleanerInfoAppender.filePattern={{ log_dir }}/info/log-cleaner.log.%d{yyyy-MM-dd-HH} -appender.cleanerInfoAppender.layout.type=PatternLayout -appender.cleanerInfoAppender.layout.pattern=[%d] %p %m (%c)%n -appender.cleanerInfoAppender.policies.type=Policies -appender.cleanerInfoAppender.policies.time.type=TimeBasedTriggeringPolicy -appender.cleanerInfoAppender.policies.time.interval=1 -appender.cleanerInfoAppender.filter.threshold.type=ThresholdFilter -appender.cleanerInfoAppender.filter.threshold.level=INFO - -# Cleaner DEBUG level appender -appender.cleanerDebugAppender.type=RollingFile -appender.cleanerDebugAppender.name=CleanerDebugAppender -appender.cleanerDebugAppender.fileName={{ log_dir }}/debug/log-cleaner.log -appender.cleanerDebugAppender.filePattern={{ log_dir }}/debug/log-cleaner.log.%d{yyyy-MM-dd-HH} -appender.cleanerDebugAppender.layout.type=PatternLayout -appender.cleanerDebugAppender.layout.pattern=[%d] %p %m (%c)%n -appender.cleanerDebugAppender.policies.type=Policies -appender.cleanerDebugAppender.policies.time.type=TimeBasedTriggeringPolicy -appender.cleanerDebugAppender.policies.time.interval=1 -appender.cleanerDebugAppender.filter.threshold.type=ThresholdFilter -appender.cleanerDebugAppender.filter.threshold.level=DEBUG - -# Controller INFO level appender -appender.controllerInfoAppender.type=RollingFile -appender.controllerInfoAppender.name=ControllerInfoAppender -appender.controllerInfoAppender.fileName={{ log_dir }}/info/controller.log -appender.controllerInfoAppender.filePattern={{ log_dir }}/info/controller.log.%d{yyyy-MM-dd-HH} -appender.controllerInfoAppender.layout.type=PatternLayout -appender.controllerInfoAppender.layout.pattern=[%d] %p %m (%c)%n -appender.controllerInfoAppender.policies.type=Policies -appender.controllerInfoAppender.policies.time.type=TimeBasedTriggeringPolicy -appender.controllerInfoAppender.policies.time.interval=1 -appender.controllerInfoAppender.filter.threshold.type=ThresholdFilter -appender.controllerInfoAppender.filter.threshold.level=INFO - -# Controller DEBUG level appender -appender.controllerDebugAppender.type=RollingFile -appender.controllerDebugAppender.name=ControllerDebugAppender -appender.controllerDebugAppender.fileName={{ log_dir }}/debug/controller.log -appender.controllerDebugAppender.filePattern={{ log_dir }}/debug/controller.log.%d{yyyy-MM-dd-HH} -appender.controllerDebugAppender.layout.type=PatternLayout -appender.controllerDebugAppender.layout.pattern=[%d] %p %m (%c)%n -appender.controllerDebugAppender.policies.type=Policies -appender.controllerDebugAppender.policies.time.type=TimeBasedTriggeringPolicy -appender.controllerDebugAppender.policies.time.interval=1 -appender.controllerDebugAppender.filter.threshold.type=ThresholdFilter -appender.controllerDebugAppender.filter.threshold.level=DEBUG - -# Authorizer INFO level appender -appender.authorizerInfoAppender.type=RollingFile -appender.authorizerInfoAppender.name=AuthorizerInfoAppender -appender.authorizerInfoAppender.fileName={{ log_dir }}/info/kafka-authorizer.log -appender.authorizerInfoAppender.filePattern={{ log_dir }}/info/kafka-authorizer.log.%d{yyyy-MM-dd-HH} -appender.authorizerInfoAppender.layout.type=PatternLayout -appender.authorizerInfoAppender.layout.pattern=[%d] %p %m (%c)%n -appender.authorizerInfoAppender.policies.type=Policies -appender.authorizerInfoAppender.policies.time.type=TimeBasedTriggeringPolicy -appender.authorizerInfoAppender.policies.time.interval=1 -appender.authorizerInfoAppender.filter.threshold.type=ThresholdFilter -appender.authorizerInfoAppender.filter.threshold.level=INFO - -# Authorizer DEBUG level appender -appender.authorizerDebugAppender.type=RollingFile -appender.authorizerDebugAppender.name=AuthorizerDebugAppender -appender.authorizerDebugAppender.fileName={{ log_dir }}/debug/kafka-authorizer.log -appender.authorizerDebugAppender.filePattern={{ log_dir }}/debug/kafka-authorizer.log.%d{yyyy-MM-dd-HH} -appender.authorizerDebugAppender.layout.type=PatternLayout -appender.authorizerDebugAppender.layout.pattern=[%d] %p %m (%c)%n -appender.authorizerDebugAppender.policies.type=Policies -appender.authorizerDebugAppender.policies.time.type=TimeBasedTriggeringPolicy -appender.authorizerDebugAppender.policies.time.interval=1 -appender.authorizerDebugAppender.filter.threshold.type=ThresholdFilter -appender.authorizerDebugAppender.filter.threshold.level=DEBUG - -# Root logger configuration -rootLogger.level={{ log_level|default("DEBUG") }} -rootLogger.appenderRefs=stdout -rootLogger.appenderRef.stdout.ref=STDOUT - -# Define loggers for specific packages and components -loggers=kafka.producer.async.DefaultEventHandler,kafka.client.ClientUtils,kafka.perf,kafka.perf.ProducerPerformance$ProducerThread,kafka,kafka.network.RequestChannel$,kafka.network.Processor,kafka.server.KafkaApis,kafka.request.logger,org.apache.kafka.raft,org.apache.kafka.controller,kafka.controller,kafka.log.LogCleaner,state.change.logger,kafka.authorizer.logger,org.apache.kafka.coordinator.group - -# Kafka loggers -logger.kafka.producer.async.DefaultEventHandler.name=kafka.producer.async.DefaultEventHandler -logger.kafka.producer.async.DefaultEventHandler.level={{ log_level|default("DEBUG") }} -logger.kafka.producer.async.DefaultEventHandler.appenderRefs=kafkaInfoAppender,kafkaDebugAppender -logger.kafka.producer.async.DefaultEventHandler.appenderRef.kafkaInfoAppender.ref=KafkaInfoAppender -logger.kafka.producer.async.DefaultEventHandler.appenderRef.kafkaDebugAppender.ref=KafkaDebugAppender - -logger.kafka.client.ClientUtils.name=kafka.client.ClientUtils -logger.kafka.client.ClientUtils.level={{ log_level|default("DEBUG") }} -logger.kafka.client.ClientUtils.appenderRefs=kafkaInfoAppender,kafkaDebugAppender -logger.kafka.client.ClientUtils.appenderRef.kafkaInfoAppender.ref=KafkaInfoAppender -logger.kafka.client.ClientUtils.appenderRef.kafkaDebugAppender.ref=KafkaDebugAppender - -logger.kafka.perf.name=kafka.perf -logger.kafka.perf.level={{ log_level|default("DEBUG") }} -logger.kafka.perf.appenderRefs=kafkaInfoAppender,kafkaDebugAppender -logger.kafka.perf.appenderRef.kafkaInfoAppender.ref=KafkaInfoAppender -logger.kafka.perf.appenderRef.kafkaDebugAppender.ref=KafkaDebugAppender - -logger.kafka.perf.ProducerPerformance$ProducerThread.name=kafka.perf.ProducerPerformance$ProducerThread -logger.kafka.perf.ProducerPerformance$ProducerThread.level={{ log_level|default("DEBUG") }} -logger.kafka.perf.ProducerPerformance$ProducerThread.appenderRefs=kafkaInfoAppender,kafkaDebugAppender -logger.kafka.perf.ProducerPerformance$ProducerThread.appenderRef.kafkaInfoAppender.ref=KafkaInfoAppender -logger.kafka.perf.ProducerPerformance$ProducerThread.appenderRef.kafkaDebugAppender.ref=KafkaDebugAppender - -logger.kafka.name=kafka -logger.kafka.level={{ log_level|default("DEBUG") }} -logger.kafka.appenderRefs=kafkaInfoAppender,kafkaDebugAppender -logger.kafka.appenderRef.kafkaInfoAppender.ref=KafkaInfoAppender -logger.kafka.appenderRef.kafkaDebugAppender.ref=KafkaDebugAppender - -# Request Channel and Server loggers -logger.kafka.network.RequestChannel$.name=kafka.network.RequestChannel$ -logger.kafka.network.RequestChannel$.level={{ log_level|default("DEBUG") }} -logger.kafka.network.RequestChannel$.appenderRefs=requestInfoAppender,requestDebugAppender -logger.kafka.network.RequestChannel$.appenderRef.requestInfoAppender.ref=RequestInfoAppender -logger.kafka.network.RequestChannel$.appenderRef.requestDebugAppender.ref=RequestDebugAppender -logger.kafka.network.RequestChannel$.additivity=false - -logger.kafka.network.Processor.name=kafka.network.Processor -logger.kafka.network.Processor.level={{ log_level|default("DEBUG") }} -logger.kafka.network.Processor.appenderRefs=requestInfoAppender,requestDebugAppender -logger.kafka.network.Processor.appenderRef.requestInfoAppender.ref=RequestInfoAppender -logger.kafka.network.Processor.appenderRef.requestDebugAppender.ref=RequestDebugAppender - -logger.kafka.server.KafkaApis.name=kafka.server.KafkaApis -logger.kafka.server.KafkaApis.level={{ log_level|default("DEBUG") }} -logger.kafka.server.KafkaApis.appenderRefs=requestInfoAppender,requestDebugAppender -logger.kafka.server.KafkaApis.appenderRef.requestInfoAppender.ref=RequestInfoAppender -logger.kafka.server.KafkaApis.appenderRef.requestDebugAppender.ref=RequestDebugAppender -logger.kafka.server.KafkaApis.additivity=false - -logger.kafka.request.logger.name=kafka.request.logger -logger.kafka.request.logger.level={{ log_level|default("DEBUG") }} -logger.kafka.request.logger.appenderRefs=requestInfoAppender,requestDebugAppender -logger.kafka.request.logger.appenderRef.requestInfoAppender.ref=RequestInfoAppender -logger.kafka.request.logger.appenderRef.requestDebugAppender.ref=RequestDebugAppender -logger.kafka.request.logger.additivity=false - -# Controller loggers -logger.org.apache.kafka.raft.name=org.apache.kafka.raft -logger.org.apache.kafka.raft.level={{ log_level|default("DEBUG") }} -logger.org.apache.kafka.raft.appenderRefs=controllerInfoAppender,controllerDebugAppender -logger.org.apache.kafka.raft.appenderRef.controllerInfoAppender.ref=ControllerInfoAppender -logger.org.apache.kafka.raft.appenderRef.controllerDebugAppender.ref=ControllerDebugAppender - -logger.org.apache.kafka.controller.name=org.apache.kafka.controller -logger.org.apache.kafka.controller.level={{ log_level|default("DEBUG") }} -logger.org.apache.kafka.controller.appenderRefs=controllerInfoAppender,controllerDebugAppender -logger.org.apache.kafka.controller.appenderRef.controllerInfoAppender.ref=ControllerInfoAppender -logger.org.apache.kafka.controller.appenderRef.controllerDebugAppender.ref=ControllerDebugAppender - -logger.kafka.controller.name=kafka.controller -logger.kafka.controller.level={{ log_level|default("DEBUG") }} -logger.kafka.controller.appenderRefs=controllerInfoAppender,controllerDebugAppender -logger.kafka.controller.appenderRef.controllerInfoAppender.ref=ControllerInfoAppender -logger.kafka.controller.appenderRef.controllerDebugAppender.ref=ControllerDebugAppender -logger.kafka.controller.additivity=false - -# Log Cleaner logger -logger.kafka.log.LogCleaner.name=kafka.log.LogCleaner -logger.kafka.log.LogCleaner.level={{ log_level|default("DEBUG") }} -logger.kafka.log.LogCleaner.appenderRefs=cleanerInfoAppender,cleanerDebugAppender -logger.kafka.log.LogCleaner.appenderRef.cleanerInfoAppender.ref=CleanerInfoAppender -logger.kafka.log.LogCleaner.appenderRef.cleanerDebugAppender.ref=CleanerDebugAppender -logger.kafka.log.LogCleaner.additivity=false - -# State change logger -logger.state.change.logger.name=state.change.logger -logger.state.change.logger.level={{ log_level|default("DEBUG") }} -logger.state.change.logger.appenderRefs=stateChangeInfoAppender,stateChangeDebugAppender -logger.state.change.logger.appenderRef.stateChangeInfoAppender.ref=StateChangeInfoAppender -logger.state.change.logger.appenderRef.stateChangeDebugAppender.ref=StateChangeDebugAppender -logger.state.change.logger.additivity=false - -# Authorizer logger -logger.kafka.authorizer.logger.name=kafka.authorizer.logger -logger.kafka.authorizer.logger.level={{ log_level|default("DEBUG") }} -logger.kafka.authorizer.logger.appenderRefs=authorizerInfoAppender,authorizerDebugAppender -logger.kafka.authorizer.logger.appenderRef.authorizerInfoAppender.ref=AuthorizerInfoAppender -logger.kafka.authorizer.logger.appenderRef.authorizerDebugAppender.ref=AuthorizerDebugAppender -logger.kafka.authorizer.logger.additivity=false - -# Coordinator logger -logger.org.apache.kafka.coordinator.group.name=org.apache.kafka.coordinator.group -logger.org.apache.kafka.coordinator.group.level={{ log_level|default("DEBUG") }} -logger.org.apache.kafka.coordinator.group.appenderRefs=kafkaInfoAppender,kafkaDebugAppender -logger.org.apache.kafka.coordinator.group.appenderRef.kafkaInfoAppender.ref=KafkaInfoAppender -logger.org.apache.kafka.coordinator.group.appenderRef.kafkaDebugAppender.ref=KafkaDebugAppender -logger.org.apache.kafka.coordinator.group.additivity=false diff --git a/tests/kafkatest/services/kafka/templates/log4j2.yaml b/tests/kafkatest/services/kafka/templates/log4j2.yaml new file mode 100644 index 0000000000000..d3aad72df9569 --- /dev/null +++ b/tests/kafkatest/services/kafka/templates/log4j2.yaml @@ -0,0 +1,289 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +Configuration: + Properties: + Property: + - name: "log_dir" + value: "/var/log/kafka" + - name: "log_level" + value: "DEBUG" + - name: "logPattern" + value: "[%d] %p %m (%c)%n" + + Appenders: + Console: + name: STDOUT + PatternLayout: + pattern: "${logPattern}" + + RollingFile: + - name: KafkaInfoAppender + fileName: "${sys:log_dir}/info/server.log" + filePattern: "${sys:log_dir}/info/server.log.%d{yyyy-MM-dd-HH}" + PatternLayout: + pattern: "${logPattern}" + Policies: + TimeBasedTriggeringPolicy: + interval: 1 + Filters: + ThresholdFilter: + level: INFO + + - name: KafkaDebugAppender + fileName: "${sys:log_dir}/debug/server.log" + filePattern: "${sys:log_dir}/debug/server.log.%d{yyyy-MM-dd-HH}" + PatternLayout: + pattern: "${logPattern}" + Policies: + TimeBasedTriggeringPolicy: + interval: 1 + Filters: + ThresholdFilter: + level: DEBUG + + - name: StateChangeInfoAppender + fileName: "${sys:log_dir}/info/state-change.log" + filePattern: "${sys:log_dir}/info/state-change.log.%d{yyyy-MM-dd-HH}" + PatternLayout: + pattern: "${logPattern}" + Policies: + TimeBasedTriggeringPolicy: + interval: 1 + Filters: + ThresholdFilter: + level: INFO + + - name: StateChangeDebugAppender + fileName: "${sys:log_dir}/debug/state-change.log" + filePattern: "${sys:log_dir}/debug/state-change.log.%d{yyyy-MM-dd-HH}" + PatternLayout: + pattern: "${logPattern}" + Policies: + TimeBasedTriggeringPolicy: + interval: 1 + Filters: + ThresholdFilter: + level: DEBUG + + - name: RequestInfoAppender + fileName: "${sys:log_dir}/info/kafka-request.log" + filePattern: "${sys:log_dir}/info/kafka-request.log.%d{yyyy-MM-dd-HH}" + PatternLayout: + pattern: "${logPattern}" + Policies: + TimeBasedTriggeringPolicy: + interval: 1 + Filters: + ThresholdFilter: + level: INFO + + - name: RequestDebugAppender + fileName: "${sys:log_dir}/debug/kafka-request.log" + filePattern: "${sys:log_dir}/debug/kafka-request.log.%d{yyyy-MM-dd-HH}" + PatternLayout: + pattern: "${logPattern}" + Policies: + TimeBasedTriggeringPolicy: + interval: 1 + Filters: + ThresholdFilter: + level: DEBUG + + - name: CleanerInfoAppender + fileName: "${sys:log_dir}/info/log-cleaner.log" + filePattern: "${sys:log_dir}/info/log-cleaner.log.%d{yyyy-MM-dd-HH}" + PatternLayout: + pattern: "${logPattern}" + Policies: + TimeBasedTriggeringPolicy: + interval: 1 + Filters: + ThresholdFilter: + level: INFO + + - name: CleanerDebugAppender + fileName: "${sys:log_dir}/debug/log-cleaner.log" + filePattern: "${sys:log_dir}/debug/log-cleaner.log.%d{yyyy-MM-dd-HH}" + PatternLayout: + pattern: "${logPattern}" + Policies: + TimeBasedTriggeringPolicy: + interval: 1 + Filters: + ThresholdFilter: + level: DEBUG + + - name: ControllerInfoAppender + fileName: "${sys:log_dir}/info/controller.log" + filePattern: "${sys:log_dir}/info/controller.log.%d{yyyy-MM-dd-HH}" + PatternLayout: + pattern: "${logPattern}" + Policies: + TimeBasedTriggeringPolicy: + interval: 1 + Filters: + ThresholdFilter: + level: INFO + + - name: ControllerDebugAppender + fileName: "${sys:log_dir}/debug/controller.log" + filePattern: "${sys:log_dir}/debug/controller.log.%d{yyyy-MM-dd-HH}" + PatternLayout: + pattern: "${logPattern}" + Policies: + TimeBasedTriggeringPolicy: + interval: 1 + Filters: + ThresholdFilter: + level: DEBUG + + - name: AuthorizerInfoAppender + fileName: "${sys:log_dir}/info/kafka-authorizer.log" + filePattern: "${sys:log_dir}/info/kafka-authorizer.log.%d{yyyy-MM-dd-HH}" + PatternLayout: + pattern: "${logPattern}" + Policies: + TimeBasedTriggeringPolicy: + interval: 1 + Filters: + ThresholdFilter: + level: INFO + + - name: AuthorizerDebugAppender + fileName: "${sys:log_dir}/debug/kafka-authorizer.log" + filePattern: "${sys:log_dir}/debug/kafka-authorizer.log.%d{yyyy-MM-dd-HH}" + PatternLayout: + pattern: "${logPattern}" + Policies: + TimeBasedTriggeringPolicy: + interval: 1 + Filters: + ThresholdFilter: + level: DEBUG + + Loggers: + Root: + level: "${sys:log_level}" + AppenderRef: + - ref: STDOUT + + Logger: + - name: kafka.producer.async.DefaultEventHandler + level: "${sys:log_level}" + additivity: false + AppenderRef: + - ref: KafkaInfoAppender + - ref: KafkaDebugAppender + + - name: kafka.client.ClientUtils + level: "${sys:log_level}" + additivity: false + AppenderRef: + - ref: KafkaInfoAppender + - ref: KafkaDebugAppender + + - name: kafka.perf + level: "${sys:log_level}" + additivity: false + AppenderRef: + - ref: KafkaInfoAppender + - ref: KafkaDebugAppender + + - name: kafka.perf.ProducerPerformance$ProducerThread + level: "${sys:log_level}" + additivity: false + AppenderRef: + - ref: KafkaInfoAppender + - ref: KafkaDebugAppender + + - name: kafka + level: "${sys:log_level}" + AppenderRef: + - ref: KafkaInfoAppender + - ref: KafkaDebugAppender + + - name: kafka.network.RequestChannel$ + level: "${sys:log_level}" + additivity: false + AppenderRef: + - ref: RequestInfoAppender + - ref: RequestDebugAppender + + - name: kafka.network.Processor + level: "${sys:log_level}" + AppenderRef: + - ref: RequestInfoAppender + - ref: RequestDebugAppender + + - name: kafka.server.KafkaApis + level: "${sys:log_level}" + additivity: false + AppenderRef: + - ref: RequestInfoAppender + - ref: RequestDebugAppender + + - name: kafka.request.logger + level: "${sys:log_level}" + additivity: false + AppenderRef: + - ref: RequestInfoAppender + - ref: RequestDebugAppender + + - name: org.apache.kafka.raft + level: "${sys:log_level}" + AppenderRef: + - ref: ControllerInfoAppender + - ref: ControllerDebugAppender + + - name: org.apache.kafka.controller + level: "${sys:log_level}" + AppenderRef: + - ref: ControllerInfoAppender + - ref: ControllerDebugAppender + + - name: kafka.controller + level: "${sys:log_level}" + additivity: false + AppenderRef: + - ref: ControllerInfoAppender + - ref: ControllerDebugAppender + + - name: kafka.log.LogCleaner + level: "${sys:log_level}" + additivity: false + AppenderRef: + - ref: CleanerInfoAppender + - ref: CleanerDebugAppender + + - name: state.change.logger + level: "${sys:log_level}" + additivity: false + AppenderRef: + - ref: StateChangeInfoAppender + - ref: StateChangeDebugAppender + + - name: kafka.authorizer.logger + level: "${sys:log_level}" + additivity: false + AppenderRef: + - ref: AuthorizerInfoAppender + - ref: AuthorizerDebugAppender + + - name: org.apache.kafka.coordinator.group + level: "${sys:log_level}" + additivity: false + AppenderRef: + - ref: KafkaInfoAppender + - ref: KafkaDebugAppender diff --git a/tests/kafkatest/services/templates/connect_log4j2.yaml b/tests/kafkatest/services/templates/connect_log4j2.yaml new file mode 100644 index 0000000000000..873fbd31ece65 --- /dev/null +++ b/tests/kafkatest/services/templates/connect_log4j2.yaml @@ -0,0 +1,43 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +Configuration: + Properties: + Property: + - name: "logPattern" + value: "[%d] %p %m (%c)%n" + + Appenders: + Console: + name: STDOUT + PatternLayout: + pattern: "${logPattern}" + + RollingFile: + - name: FILE + fileName: {{ log_file }} + append: true + immediateFlush: true + PatternLayout: + pattern: "${logPattern}" + TimeBasedTriggeringPolicy: + modulate: true + interval: 1 + + Loggers: + Root: + level: {{ log_level|default("INFO") }} + AppenderRef: + - ref: FILE diff --git a/tests/kafkatest/services/templates/tools_log4j2.properties b/tests/kafkatest/services/templates/tools_log4j2.properties deleted file mode 100644 index e906667582fc9..0000000000000 --- a/tests/kafkatest/services/templates/tools_log4j2.properties +++ /dev/null @@ -1,38 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Define the root logger with appender file -name=KafkaTestsTemplatesToolConfig - -appenders=file - -appender.file.type=File -appender.file.name=FILE -appender.file.fileName={{ log_file }} -appender.file.append=true -appender.file.immediateFlush=true -appender.file.layout.type=PatternLayout -appender.file.layout.pattern=[%d] %p %m (%c)%n - -rootLogger.level={{ log_level|default("INFO") }} -rootLogger.appenderRefs=file -rootLogger.appenderRef.file.ref=FILE - -{% if loggers is defined %} -{% for logger, log_level in loggers.items() %} -logger.{{ logger }}.name={{ logger }} -logger.{{ logger }}.level={{ log_level }} -{% endfor %} -{% endif %} diff --git a/core/src/test/resources/log4j2.properties b/tests/kafkatest/services/templates/tools_log4j2.yaml similarity index 50% rename from core/src/test/resources/log4j2.properties rename to tests/kafkatest/services/templates/tools_log4j2.yaml index 18f04dd600475..081c0b53a282d 100644 --- a/core/src/test/resources/log4j2.properties +++ b/tests/kafkatest/services/templates/tools_log4j2.yaml @@ -1,9 +1,9 @@ # Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with +# contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at +# the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # @@ -12,25 +12,28 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -# Root logger configuration -name=TestConfig -# Appender configuration -appender=console -appender.console.type=Console -appender.console.name=STDOUT -appender.console.layout.type=PatternLayout -appender.console.layout.pattern=[%d] %p %m (%c:%L)%n +Configuration: + Appenders: + File: + name: FILE + fileName: {{ log_file }} + append: true + immediateFlush: true + PatternLayout: + pattern: "[%d] %p %m (%c)%n" -rootLogger.level=OFF -rootLogger.appenderRefs=console -rootLogger.appenderRef.console.ref=STDOUT + Loggers: + Root: + level: {{ log_level|default("INFO") }} + AppenderRef: + - ref: FILE -loggers=kafka,apacheKafka - -# Logger configurations -logger.kafka.name=kafka -logger.kafka.level=WARN - -logger.apacheKafka.name=org.apache.kafka -logger.apacheKafka.level=WARN + Logger: + # Add additional loggers dynamically if defined + {% if loggers is defined %} + {% for logger, log_level in loggers.items() %} +- name: {{ logger }} + level: {{ log_level }} + {% endfor %} + {% endif %} \ No newline at end of file diff --git a/tests/kafkatest/services/trogdor/templates/log4j2.properties b/tests/kafkatest/services/trogdor/templates/log4j2.properties deleted file mode 100644 index c9978b28e3ac5..0000000000000 --- a/tests/kafkatest/services/trogdor/templates/log4j2.properties +++ /dev/null @@ -1,45 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -name=KafkaTestsTrogdorConfig -appenders=mylogger - -appender.mylogger.type=File -appender.mylogger.name=MyFileLogger -appender.mylogger.fileName={{ log_path }} -appender.mylogger.layout.type=PatternLayout -appender.mylogger.layout.pattern=[%d] %p %m (%c)%n - -rootLogger.level=DEBUG -rootLogger.appenderRefs=mylogger -rootLogger.appenderRef.mylogger.ref=MyFileLogger - -logger.kafka.name=kafka -logger.kafka.level=DEBUG -logger.kafka.additivity=false -logger.kafka.appenderRefs=mylogger -logger.kafka.appenderRef.mylogger.ref=MyFileLogger - -logger.org.apache.kafka.name=org.apache.kafka -logger.org.apache.kafka.level=DEBUG -logger.org.apache.kafka.additivity=false -logger.org.apache.kafka.appenderRefs=mylogger -logger.org.apache.kafka.appenderRef.mylogger.ref=MyFileLogger - -logger.org.eclipse.name=org.eclipse -logger.org.eclipse.level=INFO -logger.org.eclipse.additivity=false -logger.org.eclipse.appenderRefs=mylogger -logger.org.eclipse.appenderRef.mylogger.ref=MyFileLogger diff --git a/tests/kafkatest/services/trogdor/templates/log4j2.yaml b/tests/kafkatest/services/trogdor/templates/log4j2.yaml new file mode 100644 index 0000000000000..b6367ff2774a6 --- /dev/null +++ b/tests/kafkatest/services/trogdor/templates/log4j2.yaml @@ -0,0 +1,56 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +Configuration: + Properties: + Property: + - name: "logPattern" + value: "[%d] %p %m (%c)%n" + + Appenders: + Console: + name: STDOUT + PatternLayout: + pattern: "${logPattern}" + + RollingFile: + - name: FILE + fileName: {{ log_file }} + filePattern: "${sys:kafka.logs.dir}/connect-%d{yyyy-MM-dd-HH}.log" + PatternLayout: + pattern: "${logPattern}" + Loggers: + Root: + level: DEBUG + AppenderRef: + - ref: MyFileLogger + Logger: + - name: kafka + level: DEBUG + additivity: false + AppenderRef: + - ref: MyFileLogger + + - name: org.apache.kafka + level: DEBUG + additivity: false + AppenderRef: + - ref: MyFileLogger + + - name: org.eclipse + level: DEBUG + additivity: false + AppenderRef: + - ref: MyFileLogger diff --git a/tests/kafkatest/tests/streams/templates/log4j2_template.properties b/tests/kafkatest/tests/streams/templates/log4j2_template.properties deleted file mode 100644 index 45dbbf0502c0d..0000000000000 --- a/tests/kafkatest/tests/streams/templates/log4j2_template.properties +++ /dev/null @@ -1,38 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Define the root logger with appender file -name=KafkaTestsStreamsTemplateConfig - -appenders=file - -appender.file.type=File -appender.file.name=FILE -appender.file.fileName={{ log_file }} -appender.file.append=true -appender.file.immediateFlush=true -appender.file.layout.type=PatternLayout -appender.file.layout.pattern=[%d] %p %m (%c)%n - -rootLogger.level={{ log_level|default("INFO") }} -rootLogger.appenderRefs=file -rootLogger.appenderRef.file.ref=FILE - -{% if loggers is defined %} -{% for logger, log_level in loggers.items() %} -logger.{{ logger }}.name={{ logger }} -logger.{{ logger }}.level={{ log_level }} -{% endfor %} -{% endif %} diff --git a/tests/kafkatest/services/templates/connect_log4j2.properties b/tests/kafkatest/tests/streams/templates/log4j2_template.yaml similarity index 60% rename from tests/kafkatest/services/templates/connect_log4j2.properties rename to tests/kafkatest/tests/streams/templates/log4j2_template.yaml index 2dd5ec2696bc9..757a4ec4421bb 100644 --- a/tests/kafkatest/services/templates/connect_log4j2.properties +++ b/tests/kafkatest/tests/streams/templates/log4j2_template.yaml @@ -1,4 +1,3 @@ -## # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. @@ -13,21 +12,29 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -## # Define the root logger with appender file -name=TestsTemplatesConnectConfig +Configuration: + Appenders: + File: + name: FILE + fileName: {{ log_file }} + append: true + immediateFlush: true + PatternLayout: + pattern: "[%d] %p %m (%c)%n" -appenders=file + Loggers: + Root: + level: {{ log_level|default("INFO") }} + AppenderRef: + - ref: FILE -appender.file.type=File -appender.file.name=FILE -appender.file.fileName={{ log_file }} -appender.file.append=true -appender.file.immediateFlush=true -appender.file.layout.type=PatternLayout -appender.file.layout.pattern=[%d] %p %m (%c)%n - -rootLogger.level={{ log_level|default("INFO") }} -rootLogger.appenderRefs=file -rootLogger.appenderRef.file.ref=FILE + Logger: + # 動態生成的 Logger 區域 + {% if loggers is defined %} + {% for logger, log_level in loggers.items() %} + - name: {{ logger }} + level: {{ log_level }} + {% endfor %} + {% endif %} \ No newline at end of file From 7672fa5e2b0bdfbceb21d4783570e7ba64095a0e Mon Sep 17 00:00:00 2001 From: frankvicky Date: Wed, 20 Nov 2024 19:40:41 +0800 Subject: [PATCH 28/46] KAFKA-9366: Update script and readme --- README.md | 4 ++-- bin/connect-distributed.sh | 6 +++--- bin/connect-mirror-maker.sh | 6 +++--- bin/connect-standalone.sh | 6 +++--- bin/kafka-server-start.sh | 6 +++--- bin/windows/connect-distributed.bat | 6 +++--- bin/windows/connect-standalone.bat | 6 +++--- bin/windows/kafka-server-start.bat | 6 +++--- raft/bin/test-kraft-server-start.sh | 2 +- 9 files changed, 24 insertions(+), 24 deletions(-) diff --git a/README.md b/README.md index 12b637efd290d..585356ea5e2a3 100644 --- a/README.md +++ b/README.md @@ -52,9 +52,9 @@ Follow instructions in https://kafka.apache.org/quickstart ./gradlew clients:test --tests org.apache.kafka.clients.MetadataTest.testTimeToNextUpdate ### Running a particular unit/integration test with log4j output ### -By default, there will be only small number of logs output while testing. You can adjust it by changing the `log4j2.properties` file in the module's `src/test/resources` directory. +By default, there will be only small number of logs output while testing. You can adjust it by changing the `log4j2.yml` file in the module's `src/test/resources` directory. -For example, if you want to see more logs for clients project tests, you can modify [the line](https://github.com/apache/kafka/blob/trunk/clients/src/test/resources/log4j2.properties#L32) in `clients/src/test/resources/log4j2.properties` +For example, if you want to see more logs for clients project tests, you can modify [the line](https://github.com/apache/kafka/blob/trunk/clients/src/test/resources/log4j2.yml#L35) in `clients/src/test/resources/log4j2.yml` to `log4j.logger.org.apache.kafka=INFO` and then run: ./gradlew cleanTest clients:test --tests NetworkClientTest diff --git a/bin/connect-distributed.sh b/bin/connect-distributed.sh index 3cfc07928f1df..7d77fce737f45 100755 --- a/bin/connect-distributed.sh +++ b/bin/connect-distributed.sh @@ -25,10 +25,10 @@ base_dir=$(dirname $0) if [ -f "$base_dir/../config/connect-log4j.properties" ]; then echo DEPRECATED: Using Log4j 1.x configuration file \$KAFKA_HOME/config/connect-log4j.properties >&2 echo To use a Log4j 2.x configuration, please see https://logging.apache.org/log4j/2.x/migrate-from-log4j1.html#Log4j2ConfigurationFormat for details about Log4j configuration file migration. >&2 - echo You can also use the \$KAFKA_HOME/config/connect-log4j2.properties file as a starting point. Make sure to remove the Log4j 1.x configuration after completing the migration. >&2 + echo You can also use the \$KAFKA_HOME/config/connect-log4j2.yml file as a starting point. Make sure to remove the Log4j 1.x configuration after completing the migration. >&2 export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/connect-log4j.properties" -elif [ -f "$base_dir/../config/connect-log4j2.properties" ]; then - export KAFKA_LOG4J_OPTS="-Dlog4j2.configurationFile=$base_dir/../config/connect-log4j2.properties" +elif [ -f "$base_dir/../config/connect-log4j2.yml" ]; then + export KAFKA_LOG4J_OPTS="-Dlog4j2.configurationFile=$base_dir/../config/connect-log4j2.yml" elif [ -f "$base_dir/../config/connect-log4j2.xml" ]; then export KAFKA_LOG4J_OPTS="-Dlog4j2.configurationFile=$base_dir/../config/connect-log4j2.xml" fi diff --git a/bin/connect-mirror-maker.sh b/bin/connect-mirror-maker.sh index 65062fabc0f8c..511509ea438fb 100755 --- a/bin/connect-mirror-maker.sh +++ b/bin/connect-mirror-maker.sh @@ -25,10 +25,10 @@ base_dir=$(dirname $0) if [ -f "$base_dir/../config/connect-log4j.properties" ]; then echo DEPRECATED: Using Log4j 1.x configuration file \$KAFKA_HOME/config/connect-log4j.properties >&2 echo To use a Log4j 2.x configuration, please see https://logging.apache.org/log4j/2.x/migrate-from-log4j1.html#Log4j2ConfigurationFormat for details about Log4j configuration file migration. >&2 - echo You can also use the \$KAFKA_HOME/config/connect-log4j2.properties file as a starting point. Make sure to remove the Log4j 1.x configuration after completing the migration. >&2 + echo You can also use the \$KAFKA_HOME/config/connect-log4j2.yml file as a starting point. Make sure to remove the Log4j 1.x configuration after completing the migration. >&2 export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/connect-log4j.properties" -elif [ -f "$base_dir/../config/connect-log4j2.properties" ]; then - export KAFKA_LOG4J_OPTS="-Dlog4j2.configurationFile=$base_dir/../config/connect-log4j2.properties" +elif [ -f "$base_dir/../config/connect-log4j2.yml" ]; then + export KAFKA_LOG4J_OPTS="-Dlog4j2.configurationFile=$base_dir/../config/connect-log4j2.yml" elif [ -f "$base_dir/../config/connect-log4j2.xml" ]; then export KAFKA_LOG4J_OPTS="-Dlog4j2.configurationFile=$base_dir/../config/connect-log4j2.xml" fi diff --git a/bin/connect-standalone.sh b/bin/connect-standalone.sh index d2203426045c6..383f545dd57f8 100755 --- a/bin/connect-standalone.sh +++ b/bin/connect-standalone.sh @@ -25,10 +25,10 @@ base_dir=$(dirname $0) if [ -f "$base_dir/../config/connect-log4j.properties" ]; then echo DEPRECATED: Using Log4j 1.x configuration file \$KAFKA_HOME/config/connect-log4j.properties >&2 echo To use a Log4j 2.x configuration, please see https://logging.apache.org/log4j/2.x/migrate-from-log4j1.html#Log4j2ConfigurationFormat for details about Log4j configuration file migration. >&2 - echo You can also use the \$KAFKA_HOME/config/connect-log4j2.properties file as a starting point. Make sure to remove the Log4j 1.x configuration after completing the migration. >&2 + echo You can also use the \$KAFKA_HOME/config/connect-log4j2.yml file as a starting point. Make sure to remove the Log4j 1.x configuration after completing the migration. >&2 export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/connect-log4j.properties" -elif [ -f "$base_dir/../config/connect-log4j2.properties" ]; then - export KAFKA_LOG4J_OPTS="-Dlog4j2.configurationFile=$base_dir/../config/connect-log4j2.properties" +elif [ -f "$base_dir/../config/connect-log4j2.yml" ]; then + export KAFKA_LOG4J_OPTS="-Dlog4j2.configurationFile=$base_dir/../config/connect-log4j2.yml" elif [ -f "$base_dir/../config/connect-log4j2.xml" ]; then export KAFKA_LOG4J_OPTS="-Dlog4j2.configurationFile=$base_dir/../config/connect-log4j2.xml" fi diff --git a/bin/kafka-server-start.sh b/bin/kafka-server-start.sh index 4fb367e1abf09..6dc290c0b826a 100755 --- a/bin/kafka-server-start.sh +++ b/bin/kafka-server-start.sh @@ -24,10 +24,10 @@ base_dir=$(dirname $0) if [ -f "$base_dir/../config/log4j.properties" ]; then echo DEPRECATED: Using Log4j 1.x configuration file \$KAFKA_HOME/config/log4j.properties >&2 echo To use a Log4j 2.x configuration, please see https://logging.apache.org/log4j/2.x/migrate-from-log4j1.html#Log4j2ConfigurationFormat for details about Log4j configuration file migration. >&2 - echo You can also use the \$KAFKA_HOME/config/log4j2.properties file as a starting point. Make sure to remove the Log4j 1.x configuration after completing the migration. >&2 + echo You can also use the \$KAFKA_HOME/config/log4j2.yml file as a starting point. Make sure to remove the Log4j 1.x configuration after completing the migration. >&2 export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/log4j.properties" -elif [ -f "$base_dir/../config/log4j2.properties" ]; then - export KAFKA_LOG4J_OPTS="-Dlog4j2.configurationFile=$base_dir/../config/log4j2.properties" +elif [ -f "$base_dir/../config/log4j2.yml" ]; then + export KAFKA_LOG4J_OPTS="-Dlog4j2.configurationFile=$base_dir/../config/log4j2.yml" elif [ -f "$base_dir/../config/log4j2.xml" ]; then export KAFKA_LOG4J_OPTS="-Dlog4j2.configurationFile=$base_dir/../config/log4j2.xml" fi diff --git a/bin/windows/connect-distributed.bat b/bin/windows/connect-distributed.bat index efd1d671c0bec..be1dd014a8302 100644 --- a/bin/windows/connect-distributed.bat +++ b/bin/windows/connect-distributed.bat @@ -29,10 +29,10 @@ rem Log4j settings IF EXIST "%BASE_DIR%/config/connect-log4j.properties" ( echo DEPRECATED: Using Log4j 1.x configuration file %BASE_DIR%/config/connect-log4j.properties echo To use a Log4j 2.x configuration, please see https://logging.apache.org/log4j/2.x/migrate-from-log4j1.html#Log4j2ConfigurationFormat for details about Log4j configuration file migration. - echo You can also use the %BASE_DIR%/config/connect-log4j2.properties file as a starting point. Make sure to remove the Log4j 1.x configuration after completing the migration. + echo You can also use the %BASE_DIR%/config/connect-log4j2.yml file as a starting point. Make sure to remove the Log4j 1.x configuration after completing the migration. set KAFKA_LOG4J_OPTS=-Dlog4j.configuration=file:%BASE_DIR%/config/connect-log4j.properties -) ELSE IF EXIST "%BASE_DIR%/config/connect-log4j2.properties" ( - set KAFKA_LOG4J_OPTS=-Dlog4j2.configurationFile=%BASE_DIR%/config/connect-log4j2.properties +) ELSE IF EXIST "%BASE_DIR%/config/connect-log4j2.yml" ( + set KAFKA_LOG4J_OPTS=-Dlog4j2.configurationFile=%BASE_DIR%/config/connect-log4j2.yml ) ELSE IF EXIST "%BASE_DIR%/config/connect-log4j2.xml" ( set KAFKA_LOG4J_OPTS=-Dlog4j2.configurationFile=%BASE_DIR%/config/connect-log4j2.xml ) diff --git a/bin/windows/connect-standalone.bat b/bin/windows/connect-standalone.bat index 0dacc4e6baf84..56ae686adf5c7 100644 --- a/bin/windows/connect-standalone.bat +++ b/bin/windows/connect-standalone.bat @@ -29,10 +29,10 @@ rem Log4j settings IF EXIST "%BASE_DIR%/config/connect-log4j.properties" ( echo DEPRECATED: Using Log4j 1.x configuration file %BASE_DIR%/config/connect-log4j.properties echo To use a Log4j 2.x configuration, please see https://logging.apache.org/log4j/2.x/migrate-from-log4j1.html#Log4j2ConfigurationFormat for details about Log4j configuration file migration. - echo You can also use the %BASE_DIR%/config/connect-log4j2.properties file as a starting point. Make sure to remove the Log4j 1.x configuration after completing the migration. + echo You can also use the %BASE_DIR%/config/connect-log4j2.yml file as a starting point. Make sure to remove the Log4j 1.x configuration after completing the migration. set KAFKA_LOG4J_OPTS=-Dlog4j.configuration=file:%BASE_DIR%/config/connect-log4j.properties -) ELSE IF EXIST "%BASE_DIR%/config/connect-log4j2.properties" ( - set KAFKA_LOG4J_OPTS=-Dlog4j2.configurationFile=%BASE_DIR%/config/connect-log4j2.properties +) ELSE IF EXIST "%BASE_DIR%/config/connect-log4j2.yml" ( + set KAFKA_LOG4J_OPTS=-Dlog4j2.configurationFile=%BASE_DIR%/config/connect-log4j2.yml ) ELSE IF EXIST "%BASE_DIR%/config/connect-log4j2.xml" ( set KAFKA_LOG4J_OPTS=-Dlog4j2.configurationFile=%BASE_DIR%/config/connect-log4j2.xml ) diff --git a/bin/windows/kafka-server-start.bat b/bin/windows/kafka-server-start.bat index 61c89dfb7487e..594f9f4087358 100644 --- a/bin/windows/kafka-server-start.bat +++ b/bin/windows/kafka-server-start.bat @@ -23,10 +23,10 @@ SetLocal IF EXIST "%~dp0../../config/log4j.properties" ( echo DEPRECATED: Using Log4j 1.x configuration file %~dp0../../config/log4j.properties echo To use a Log4j 2.x configuration, please see https://logging.apache.org/log4j/2.x/migrate-from-log4j1.html#Log4j2ConfigurationFormat for details about Log4j configuration file migration. - echo You can also use the %~dp0../../config/log4j2.properties file as a starting point. Make sure to remove the Log4j 1.x configuration after completing the migration. + echo You can also use the %~dp0../../config/log4j2.yml file as a starting point. Make sure to remove the Log4j 1.x configuration after completing the migration. set KAFKA_LOG4J_OPTS=-Dlog4j.configuration=file:%~dp0../../config/log4j.properties -) ELSE IF EXIST "%~dp0../../config/log4j2.properties" ( - set KAFKA_LOG4J_OPTS=-Dlog4j2.configurationFile=%~dp0../../config/log4j2.properties +) ELSE IF EXIST "%~dp0../../config/log4j2.yml" ( + set KAFKA_LOG4J_OPTS=-Dlog4j2.configurationFile=%~dp0../../config/log4j2.yml ) ELSE IF EXIST "%~dp0../../config/log4j2.xml" ( set KAFKA_LOG4J_OPTS=-Dlog4j2.configurationFile=%~dp0../../config/log4j2.xml ) diff --git a/raft/bin/test-kraft-server-start.sh b/raft/bin/test-kraft-server-start.sh index 2eefdd4912a6e..ad7d755752f72 100755 --- a/raft/bin/test-kraft-server-start.sh +++ b/raft/bin/test-kraft-server-start.sh @@ -17,7 +17,7 @@ base_dir=$(dirname $0) if [ "x$KAFKA_LOG4J_OPTS" = "x" ]; then - echo "DEPRECATED: using log4j 1.x configuration. To use log4j 2.x configuration, run with: 'export KAFKA_LOG4J_OPTS=\"-Dlog4j.configurationFile=file:$base_dir/../config/kraft-log4j2.properties\"'" + echo "DEPRECATED: using log4j 1.x configuration. To use log4j 2.x configuration, run with: 'export KAFKA_LOG4J_OPTS=\"-Dlog4j.configurationFile=file:$base_dir/../config/kraft-log4j2.yml\"'" export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/kraft-log4j.properties" fi From f3a68e1b9be3c24205e437e8b4231d119ec01f89 Mon Sep 17 00:00:00 2001 From: frankvicky Date: Wed, 20 Nov 2024 20:48:28 +0800 Subject: [PATCH 29/46] KAFKA-9366: update e2e --- tests/kafkatest/services/connect.py | 9 ++++--- tests/kafkatest/services/kafka/kafka.py | 5 ++-- .../templates/connect_log4j.properties | 26 +++++++++++++++++++ 3 files changed, 35 insertions(+), 5 deletions(-) create mode 100644 tests/kafkatest/services/templates/connect_log4j.properties diff --git a/tests/kafkatest/services/connect.py b/tests/kafkatest/services/connect.py index 4780b5e714f1b..d4078500754b5 100644 --- a/tests/kafkatest/services/connect.py +++ b/tests/kafkatest/services/connect.py @@ -26,6 +26,7 @@ from kafkatest.directory_layout.kafka_path import KafkaPathResolverMixin from kafkatest.services.kafka.util import fix_opts_for_new_jvm +from kafkatest.version import LATEST_3_9 class ConnectServiceBase(KafkaPathResolverMixin, Service): @@ -38,7 +39,7 @@ class ConnectServiceBase(KafkaPathResolverMixin, Service): LOG_FILE = os.path.join(PERSISTENT_ROOT, "connect.log") STDOUT_FILE = os.path.join(PERSISTENT_ROOT, "connect.stdout") STDERR_FILE = os.path.join(PERSISTENT_ROOT, "connect.stderr") - LOG4J_CONFIG_FILE = os.path.join(PERSISTENT_ROOT, "connect-log4j2.properties") + LOG4J_CONFIG_FILE = os.path.join(PERSISTENT_ROOT, "connect-log4j.properties") PID_FILE = os.path.join(PERSISTENT_ROOT, "connect.pid") EXTERNAL_CONFIGS_FILE = os.path.join(PERSISTENT_ROOT, "connect-external-configs.properties") CONNECT_REST_PORT = 8083 @@ -364,7 +365,8 @@ def start_node(self, node, **kwargs): if self.external_config_template_func: node.account.create_file(self.EXTERNAL_CONFIGS_FILE, self.external_config_template_func(node)) node.account.create_file(self.CONFIG_FILE, self.config_template_func(node)) - node.account.create_file(self.LOG4J_CONFIG_FILE, self.render('connect_log4j2.properties', log_file=self.LOG_FILE)) + log4j_path = 'connect_log4j2.yaml' if node.version > LATEST_3_9 else 'connect_log4j.properties' + node.account.create_file(self.LOG4J_CONFIG_FILE, self.render(log4j_path, log_file=self.LOG_FILE)) remote_connector_configs = [] for idx, template in enumerate(self.connector_config_templates): target_file = os.path.join(self.PERSISTENT_ROOT, "connect-connector-" + str(idx) + ".properties") @@ -421,7 +423,8 @@ def start_node(self, node, **kwargs): if self.external_config_template_func: node.account.create_file(self.EXTERNAL_CONFIGS_FILE, self.external_config_template_func(node)) node.account.create_file(self.CONFIG_FILE, self.config_template_func(node)) - node.account.create_file(self.LOG4J_CONFIG_FILE, self.render('connect_log4j2.properties', log_file=self.LOG_FILE)) + log4j_path = 'connect_log4j2.yaml' if node.version > LATEST_3_9 else 'connect_log4j.properties' + node.account.create_file(self.LOG4J_CONFIG_FILE, self.render(log4j_path, log_file=self.LOG_FILE)) if self.connector_config_templates: raise DucktapeError("Config files are not valid in distributed mode, submit connectors via the REST API") diff --git a/tests/kafkatest/services/kafka/kafka.py b/tests/kafkatest/services/kafka/kafka.py index 0a968414cb8ce..796985c8fc822 100644 --- a/tests/kafkatest/services/kafka/kafka.py +++ b/tests/kafkatest/services/kafka/kafka.py @@ -31,7 +31,7 @@ from kafkatest.services.security.minikdc import MiniKdc from kafkatest.services.security.listener_security_config import ListenerSecurityConfig from kafkatest.services.security.security_config import SecurityConfig -from kafkatest.version import DEV_BRANCH +from kafkatest.version import DEV_BRANCH, LATEST_3_9 from kafkatest.version import KafkaVersion from kafkatest.services.kafka.util import fix_opts_for_new_jvm @@ -874,7 +874,8 @@ def start_node(self, node, timeout_sec=60, **kwargs): self.logger.info("kafka.properties:") self.logger.info(prop_file) node.account.create_file(KafkaService.CONFIG_FILE, prop_file) - node.account.create_file(self.LOG4J_CONFIG, self.render('log4j2.properties', log_dir=KafkaService.OPERATIONAL_LOG_DIR)) + log4j_path = 'connect_log4j2.yaml' if node.version > LATEST_3_9 else 'connect_log4j.properties' + node.account.create_file(self.LOG4J_CONFIG, self.render(log4j_path, log_dir=KafkaService.OPERATIONAL_LOG_DIR)) if self.quorum_info.using_kraft: # format log directories if necessary diff --git a/tests/kafkatest/services/templates/connect_log4j.properties b/tests/kafkatest/services/templates/connect_log4j.properties new file mode 100644 index 0000000000000..0585cc5afacfb --- /dev/null +++ b/tests/kafkatest/services/templates/connect_log4j.properties @@ -0,0 +1,26 @@ +## +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +## + +# Define the root logger with appender file +log4j.rootLogger = {{ log_level|default("INFO") }}, FILE + +log4j.appender.FILE=org.apache.log4j.FileAppender +log4j.appender.FILE.File={{ log_file }} +log4j.appender.FILE.ImmediateFlush=true +log4j.appender.FILE.Append=true +log4j.appender.FILE.layout=org.apache.log4j.PatternLayout +log4j.appender.FILE.layout.conversionPattern=[%d] %p %m (%c)%n \ No newline at end of file From 9343fc3eb8dde956bab20242e312ca319f7e18c7 Mon Sep 17 00:00:00 2001 From: frankvicky Date: Wed, 20 Nov 2024 23:13:42 +0800 Subject: [PATCH 30/46] KAFKA-9366: fix yaml --- config/connect-log4j2.yaml | 7 ++-- config/log4j2.yaml | 42 +++++++++++-------- .../src/test/resources/log4j2.yaml | 4 +- .../services/kafka/templates/log4j2.yaml | 2 +- tests/kafkatest/services/kafka/util.py | 3 +- .../templates/connect_log4j.properties | 2 +- .../services/templates/connect_log4j2.yaml | 8 ++-- .../services/templates/tools_log4j2.yaml | 12 +++--- .../streams/templates/log4j2_template.yaml | 2 +- 9 files changed, 46 insertions(+), 36 deletions(-) diff --git a/config/connect-log4j2.yaml b/config/connect-log4j2.yaml index 89a9a96736587..44e95c55fb537 100644 --- a/config/connect-log4j2.yaml +++ b/config/connect-log4j2.yaml @@ -33,9 +33,10 @@ Configuration: filePattern: "${sys:kafka.logs.dir}/connect-%d{yyyy-MM-dd-HH}.log" PatternLayout: pattern: "${logPattern}" - TimeBasedTriggeringPolicy: - modulate: true - interval: 1 + Policies: + TimeBasedTriggeringPolicy: + modulate: true + interval: 1 Loggers: Root: level: INFO diff --git a/config/log4j2.yaml b/config/log4j2.yaml index 2b000d407e2f2..031ca4e0dba7d 100644 --- a/config/log4j2.yaml +++ b/config/log4j2.yaml @@ -38,54 +38,60 @@ Configuration: filePattern: "${sys:kafka.logs.dir}/server.log.%d{yyyy-MM-dd-HH}" PatternLayout: pattern: "${logPattern}" - TimeBasedTriggeringPolicy: - modulate: true - interval: 1 + Polices: + TimeBasedTriggeringPolicy: + modulate: true + interval: 1 # State Change appender - name: StateChangeAppender fileName: "${sys:kafka.logs.dir}/state-change.log" filePattern: "${sys:kafka.logs.dir}/stage-change.log.%d{yyyy-MM-dd-HH}" PatternLayout: pattern: "${logPattern}" - TimeBasedTriggeringPolicy: - modulate: true - interval: 1 + Polices: + TimeBasedTriggeringPolicy: + modulate: true + interval: 1 # Request appender - name: RequestAppender fileName: "${sys:kafka.logs.dir}/kafka-request.log" filePattern: "${sys:kafka.logs.dir}/kafka-request.log.%d{yyyy-MM-dd-HH}" PatternLayout: pattern: "${logPattern}" - TimeBasedTriggeringPolicy: - modulate: true - interval: 1 + Polices: + TimeBasedTriggeringPolicy: + modulate: true + interval: 1 # Cleaner appender - name: CleanerAppender fileName: "${sys:kafka.logs.dir}/log-cleaner.log" filePattern: "${sys:kafka.logs.dir}/log-cleaner.log.%d{yyyy-MM-dd-HH}" PatternLayout: pattern: "${logPattern}" - TimeBasedTriggeringPolicy: - modulate: true - interval: 1 + Polices: + TimeBasedTriggeringPolicy: + modulate: true + interval: 1 # Controller appender - name: ControllerAppender fileName: "${sys:kafka.logs.dir}/controller.log" filePattern: "${sys:kafka.logs.dir}/controller.log.%d{yyyy-MM-dd-HH}" PatternLayout: pattern: "${logPattern}" - TimeBasedTriggeringPolicy: - modulate: true - interval: 1 + Polices: + TimeBasedTriggeringPolicy: + modulate: true + interval: 1 # Authorizer appender - name: AuthorizerAppender fileName: "${sys:kafka.logs.dir}/kafka-authorizer.log" filePattern: "${sys:kafka.logs.dir}/kafka-authorizer.log.%d{yyyy-MM-dd-HH}" PatternLayout: pattern: "${logPattern}" - TimeBasedTriggeringPolicy: - modulate: true - interval: 1 + Polices: + TimeBasedTriggeringPolicy: + modulate: true + interval: 1 # Loggers configuration # See: https://logging.apache.org/log4j/2.x/manual/configuration.html#configuring-loggers diff --git a/streams/streams-scala/src/test/resources/log4j2.yaml b/streams/streams-scala/src/test/resources/log4j2.yaml index d34b13ba7038d..708c85050d081 100644 --- a/streams/streams-scala/src/test/resources/log4j2.yaml +++ b/streams/streams-scala/src/test/resources/log4j2.yaml @@ -25,9 +25,9 @@ Configuration: RollingFile: - name: R fileName: logs/kafka-streams-scala.log - filePattern: "${logPattern}" + filePattern: "streams-scala-%d{yyyy-MM-dd}.log" PatternLayout: - pattern: "%-4r [%t] %-5p %c %x - %m%n" + pattern: "${logPattern}" Policies: SizeBasedTriggeringPolicy: size: "100KB" diff --git a/tests/kafkatest/services/kafka/templates/log4j2.yaml b/tests/kafkatest/services/kafka/templates/log4j2.yaml index d3aad72df9569..85a0b7b1d36a0 100644 --- a/tests/kafkatest/services/kafka/templates/log4j2.yaml +++ b/tests/kafkatest/services/kafka/templates/log4j2.yaml @@ -16,7 +16,7 @@ Configuration: Properties: Property: - name: "log_dir" - value: "/var/log/kafka" + value: {{ log_dir }} - name: "log_level" value: "DEBUG" - name: "logPattern" diff --git a/tests/kafkatest/services/kafka/util.py b/tests/kafkatest/services/kafka/util.py index 0965fd9d4e4c3..3da6b794acd53 100644 --- a/tests/kafkatest/services/kafka/util.py +++ b/tests/kafkatest/services/kafka/util.py @@ -30,4 +30,5 @@ def fix_opts_for_new_jvm(node): return "" - +# -D +# fileName diff --git a/tests/kafkatest/services/templates/connect_log4j.properties b/tests/kafkatest/services/templates/connect_log4j.properties index 0585cc5afacfb..9c90543ebe982 100644 --- a/tests/kafkatest/services/templates/connect_log4j.properties +++ b/tests/kafkatest/services/templates/connect_log4j.properties @@ -23,4 +23,4 @@ log4j.appender.FILE.File={{ log_file }} log4j.appender.FILE.ImmediateFlush=true log4j.appender.FILE.Append=true log4j.appender.FILE.layout=org.apache.log4j.PatternLayout -log4j.appender.FILE.layout.conversionPattern=[%d] %p %m (%c)%n \ No newline at end of file +log4j.appender.FILE.layout.conversionPattern=[%d] %p %m (%c)%n diff --git a/tests/kafkatest/services/templates/connect_log4j2.yaml b/tests/kafkatest/services/templates/connect_log4j2.yaml index 873fbd31ece65..e05bc6b8d9a4c 100644 --- a/tests/kafkatest/services/templates/connect_log4j2.yaml +++ b/tests/kafkatest/services/templates/connect_log4j2.yaml @@ -28,13 +28,15 @@ Configuration: RollingFile: - name: FILE fileName: {{ log_file }} + filePattern: "test.log.%d{yyyy-MM-dd-HH}" append: true immediateFlush: true PatternLayout: pattern: "${logPattern}" - TimeBasedTriggeringPolicy: - modulate: true - interval: 1 + Policies: + TimeBasedTriggeringPolicy: + modulate: true + interval: 1 Loggers: Root: diff --git a/tests/kafkatest/services/templates/tools_log4j2.yaml b/tests/kafkatest/services/templates/tools_log4j2.yaml index 081c0b53a282d..ff3bbde954df9 100644 --- a/tests/kafkatest/services/templates/tools_log4j2.yaml +++ b/tests/kafkatest/services/templates/tools_log4j2.yaml @@ -31,9 +31,9 @@ Configuration: Logger: # Add additional loggers dynamically if defined - {% if loggers is defined %} - {% for logger, log_level in loggers.items() %} -- name: {{ logger }} - level: {{ log_level }} - {% endfor %} - {% endif %} \ No newline at end of file + {% if loggers is defined %} + {% for logger, log_level in loggers.items() %} + - name: {{ logger }} + level: {{ log_level }} + {% endfor %} + {% endif %} \ No newline at end of file diff --git a/tests/kafkatest/tests/streams/templates/log4j2_template.yaml b/tests/kafkatest/tests/streams/templates/log4j2_template.yaml index 757a4ec4421bb..9ac96af9d1691 100644 --- a/tests/kafkatest/tests/streams/templates/log4j2_template.yaml +++ b/tests/kafkatest/tests/streams/templates/log4j2_template.yaml @@ -19,6 +19,7 @@ Configuration: File: name: FILE fileName: {{ log_file }} + filePattern: "streams-templates-%d{yyyy-MM-dd}.log" append: true immediateFlush: true PatternLayout: @@ -31,7 +32,6 @@ Configuration: - ref: FILE Logger: - # 動態生成的 Logger 區域 {% if loggers is defined %} {% for logger, log_level in loggers.items() %} - name: {{ logger }} From 5761828af61a4103e69f381d8b7262d4f4a89abd Mon Sep 17 00:00:00 2001 From: frankvicky Date: Wed, 20 Nov 2024 23:31:24 +0800 Subject: [PATCH 31/46] KAFKA-9366: fix typo and compact the single policy --- config/connect-log4j2.yaml | 7 +-- config/log4j2.yaml | 42 ++++++------- storage/src/test/resources/log4j2.yaml | 5 +- .../src/test/resources/log4j2.yaml | 5 +- .../services/kafka/templates/log4j2.yaml | 60 ++++++++----------- .../services/templates/connect_log4j2.yaml | 7 +-- 6 files changed, 52 insertions(+), 74 deletions(-) diff --git a/config/connect-log4j2.yaml b/config/connect-log4j2.yaml index 44e95c55fb537..89a9a96736587 100644 --- a/config/connect-log4j2.yaml +++ b/config/connect-log4j2.yaml @@ -33,10 +33,9 @@ Configuration: filePattern: "${sys:kafka.logs.dir}/connect-%d{yyyy-MM-dd-HH}.log" PatternLayout: pattern: "${logPattern}" - Policies: - TimeBasedTriggeringPolicy: - modulate: true - interval: 1 + TimeBasedTriggeringPolicy: + modulate: true + interval: 1 Loggers: Root: level: INFO diff --git a/config/log4j2.yaml b/config/log4j2.yaml index 031ca4e0dba7d..2b000d407e2f2 100644 --- a/config/log4j2.yaml +++ b/config/log4j2.yaml @@ -38,60 +38,54 @@ Configuration: filePattern: "${sys:kafka.logs.dir}/server.log.%d{yyyy-MM-dd-HH}" PatternLayout: pattern: "${logPattern}" - Polices: - TimeBasedTriggeringPolicy: - modulate: true - interval: 1 + TimeBasedTriggeringPolicy: + modulate: true + interval: 1 # State Change appender - name: StateChangeAppender fileName: "${sys:kafka.logs.dir}/state-change.log" filePattern: "${sys:kafka.logs.dir}/stage-change.log.%d{yyyy-MM-dd-HH}" PatternLayout: pattern: "${logPattern}" - Polices: - TimeBasedTriggeringPolicy: - modulate: true - interval: 1 + TimeBasedTriggeringPolicy: + modulate: true + interval: 1 # Request appender - name: RequestAppender fileName: "${sys:kafka.logs.dir}/kafka-request.log" filePattern: "${sys:kafka.logs.dir}/kafka-request.log.%d{yyyy-MM-dd-HH}" PatternLayout: pattern: "${logPattern}" - Polices: - TimeBasedTriggeringPolicy: - modulate: true - interval: 1 + TimeBasedTriggeringPolicy: + modulate: true + interval: 1 # Cleaner appender - name: CleanerAppender fileName: "${sys:kafka.logs.dir}/log-cleaner.log" filePattern: "${sys:kafka.logs.dir}/log-cleaner.log.%d{yyyy-MM-dd-HH}" PatternLayout: pattern: "${logPattern}" - Polices: - TimeBasedTriggeringPolicy: - modulate: true - interval: 1 + TimeBasedTriggeringPolicy: + modulate: true + interval: 1 # Controller appender - name: ControllerAppender fileName: "${sys:kafka.logs.dir}/controller.log" filePattern: "${sys:kafka.logs.dir}/controller.log.%d{yyyy-MM-dd-HH}" PatternLayout: pattern: "${logPattern}" - Polices: - TimeBasedTriggeringPolicy: - modulate: true - interval: 1 + TimeBasedTriggeringPolicy: + modulate: true + interval: 1 # Authorizer appender - name: AuthorizerAppender fileName: "${sys:kafka.logs.dir}/kafka-authorizer.log" filePattern: "${sys:kafka.logs.dir}/kafka-authorizer.log.%d{yyyy-MM-dd-HH}" PatternLayout: pattern: "${logPattern}" - Polices: - TimeBasedTriggeringPolicy: - modulate: true - interval: 1 + TimeBasedTriggeringPolicy: + modulate: true + interval: 1 # Loggers configuration # See: https://logging.apache.org/log4j/2.x/manual/configuration.html#configuring-loggers diff --git a/storage/src/test/resources/log4j2.yaml b/storage/src/test/resources/log4j2.yaml index 4117e2f148611..e2050ad723d29 100644 --- a/storage/src/test/resources/log4j2.yaml +++ b/storage/src/test/resources/log4j2.yaml @@ -32,9 +32,8 @@ Configuration: filePattern: "storage-%d{yyyy-MM-dd}.log" PatternLayout: pattern: "${fileLogPattern}" - Policies: - TimeBasedTriggeringPolicy: - interval: 1 + TimeBasedTriggeringPolicy: + interval: 1 Loggers: Root: diff --git a/streams/streams-scala/src/test/resources/log4j2.yaml b/streams/streams-scala/src/test/resources/log4j2.yaml index 708c85050d081..6e1d18834bed8 100644 --- a/streams/streams-scala/src/test/resources/log4j2.yaml +++ b/streams/streams-scala/src/test/resources/log4j2.yaml @@ -28,9 +28,8 @@ Configuration: filePattern: "streams-scala-%d{yyyy-MM-dd}.log" PatternLayout: pattern: "${logPattern}" - Policies: - SizeBasedTriggeringPolicy: - size: "100KB" + SizeBasedTriggeringPolicy: + size: "100KB" DefaultRolloverStrategy: max: 1 diff --git a/tests/kafkatest/services/kafka/templates/log4j2.yaml b/tests/kafkatest/services/kafka/templates/log4j2.yaml index 85a0b7b1d36a0..01a96705b1f32 100644 --- a/tests/kafkatest/services/kafka/templates/log4j2.yaml +++ b/tests/kafkatest/services/kafka/templates/log4j2.yaml @@ -34,9 +34,8 @@ Configuration: filePattern: "${sys:log_dir}/info/server.log.%d{yyyy-MM-dd-HH}" PatternLayout: pattern: "${logPattern}" - Policies: - TimeBasedTriggeringPolicy: - interval: 1 + TimeBasedTriggeringPolicy: + interval: 1 Filters: ThresholdFilter: level: INFO @@ -46,9 +45,8 @@ Configuration: filePattern: "${sys:log_dir}/debug/server.log.%d{yyyy-MM-dd-HH}" PatternLayout: pattern: "${logPattern}" - Policies: - TimeBasedTriggeringPolicy: - interval: 1 + TimeBasedTriggeringPolicy: + interval: 1 Filters: ThresholdFilter: level: DEBUG @@ -58,9 +56,8 @@ Configuration: filePattern: "${sys:log_dir}/info/state-change.log.%d{yyyy-MM-dd-HH}" PatternLayout: pattern: "${logPattern}" - Policies: - TimeBasedTriggeringPolicy: - interval: 1 + TimeBasedTriggeringPolicy: + interval: 1 Filters: ThresholdFilter: level: INFO @@ -70,9 +67,8 @@ Configuration: filePattern: "${sys:log_dir}/debug/state-change.log.%d{yyyy-MM-dd-HH}" PatternLayout: pattern: "${logPattern}" - Policies: - TimeBasedTriggeringPolicy: - interval: 1 + TimeBasedTriggeringPolicy: + interval: 1 Filters: ThresholdFilter: level: DEBUG @@ -82,9 +78,8 @@ Configuration: filePattern: "${sys:log_dir}/info/kafka-request.log.%d{yyyy-MM-dd-HH}" PatternLayout: pattern: "${logPattern}" - Policies: - TimeBasedTriggeringPolicy: - interval: 1 + TimeBasedTriggeringPolicy: + interval: 1 Filters: ThresholdFilter: level: INFO @@ -94,9 +89,8 @@ Configuration: filePattern: "${sys:log_dir}/debug/kafka-request.log.%d{yyyy-MM-dd-HH}" PatternLayout: pattern: "${logPattern}" - Policies: - TimeBasedTriggeringPolicy: - interval: 1 + TimeBasedTriggeringPolicy: + interval: 1 Filters: ThresholdFilter: level: DEBUG @@ -106,9 +100,8 @@ Configuration: filePattern: "${sys:log_dir}/info/log-cleaner.log.%d{yyyy-MM-dd-HH}" PatternLayout: pattern: "${logPattern}" - Policies: - TimeBasedTriggeringPolicy: - interval: 1 + TimeBasedTriggeringPolicy: + interval: 1 Filters: ThresholdFilter: level: INFO @@ -118,9 +111,8 @@ Configuration: filePattern: "${sys:log_dir}/debug/log-cleaner.log.%d{yyyy-MM-dd-HH}" PatternLayout: pattern: "${logPattern}" - Policies: - TimeBasedTriggeringPolicy: - interval: 1 + TimeBasedTriggeringPolicy: + interval: 1 Filters: ThresholdFilter: level: DEBUG @@ -130,9 +122,8 @@ Configuration: filePattern: "${sys:log_dir}/info/controller.log.%d{yyyy-MM-dd-HH}" PatternLayout: pattern: "${logPattern}" - Policies: - TimeBasedTriggeringPolicy: - interval: 1 + TimeBasedTriggeringPolicy: + interval: 1 Filters: ThresholdFilter: level: INFO @@ -142,9 +133,8 @@ Configuration: filePattern: "${sys:log_dir}/debug/controller.log.%d{yyyy-MM-dd-HH}" PatternLayout: pattern: "${logPattern}" - Policies: - TimeBasedTriggeringPolicy: - interval: 1 + TimeBasedTriggeringPolicy: + interval: 1 Filters: ThresholdFilter: level: DEBUG @@ -154,9 +144,8 @@ Configuration: filePattern: "${sys:log_dir}/info/kafka-authorizer.log.%d{yyyy-MM-dd-HH}" PatternLayout: pattern: "${logPattern}" - Policies: - TimeBasedTriggeringPolicy: - interval: 1 + TimeBasedTriggeringPolicy: + interval: 1 Filters: ThresholdFilter: level: INFO @@ -166,9 +155,8 @@ Configuration: filePattern: "${sys:log_dir}/debug/kafka-authorizer.log.%d{yyyy-MM-dd-HH}" PatternLayout: pattern: "${logPattern}" - Policies: - TimeBasedTriggeringPolicy: - interval: 1 + TimeBasedTriggeringPolicy: + interval: 1 Filters: ThresholdFilter: level: DEBUG diff --git a/tests/kafkatest/services/templates/connect_log4j2.yaml b/tests/kafkatest/services/templates/connect_log4j2.yaml index e05bc6b8d9a4c..5029452447e02 100644 --- a/tests/kafkatest/services/templates/connect_log4j2.yaml +++ b/tests/kafkatest/services/templates/connect_log4j2.yaml @@ -33,10 +33,9 @@ Configuration: immediateFlush: true PatternLayout: pattern: "${logPattern}" - Policies: - TimeBasedTriggeringPolicy: - modulate: true - interval: 1 + TimeBasedTriggeringPolicy: + modulate: true + interval: 1 Loggers: Root: From 40a80f46ec4135cf775f389cc74cb1d24e612ad1 Mon Sep 17 00:00:00 2001 From: frankvicky Date: Thu, 21 Nov 2024 14:26:09 +0800 Subject: [PATCH 32/46] KAFKA-9366: update e2e --- tests/kafkatest/services/connect.py | 13 +++----- tests/kafkatest/services/console_consumer.py | 7 +++-- tests/kafkatest/services/kafka/kafka.py | 9 +++--- tests/kafkatest/services/kafka/util.py | 14 +++++++-- .../performance/consumer_performance.py | 6 ++-- .../performance/end_to_end_latency.py | 7 ++--- .../performance/producer_performance.py | 6 ++-- tests/kafkatest/services/streams.py | 24 +++++++++----- .../services/templates/tools_log4j.properties | 2 +- .../services/transactional_message_copier.py | 7 +++-- tests/kafkatest/services/trogdor/trogdor.py | 7 +++-- .../kafkatest/services/verifiable_consumer.py | 5 +-- .../kafkatest/services/verifiable_producer.py | 6 ++-- .../streams/streams_relational_smoke_test.py | 10 ++++-- .../templates/log4j_template.properties | 31 +++++++++++++++++++ 15 files changed, 104 insertions(+), 50 deletions(-) create mode 100644 tests/kafkatest/tests/streams/templates/log4j_template.properties diff --git a/tests/kafkatest/services/connect.py b/tests/kafkatest/services/connect.py index d4078500754b5..deb7cf0c7d761 100644 --- a/tests/kafkatest/services/connect.py +++ b/tests/kafkatest/services/connect.py @@ -25,8 +25,7 @@ from ducktape.utils.util import wait_until from kafkatest.directory_layout.kafka_path import KafkaPathResolverMixin -from kafkatest.services.kafka.util import fix_opts_for_new_jvm -from kafkatest.version import LATEST_3_9 +from kafkatest.services.kafka.util import fix_opts_for_new_jvm, get_log4j_config_param, get_log4j_config_for_connect class ConnectServiceBase(KafkaPathResolverMixin, Service): @@ -341,7 +340,7 @@ def node(self): return self.nodes[0] def start_cmd(self, node, connector_configs): - cmd = "( export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%s\"; " % self.LOG4J_CONFIG_FILE + cmd = "( export KAFKA_LOG4J_OPTS=\"%s%s\"; " % (get_log4j_config_param(node), self.LOG4J_CONFIG_FILE) heap_kafka_opts = "-XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=%s" % \ self.logs["connect_heap_dump_file"]["path"] other_kafka_opts = self.security_config.kafka_opts.strip('\"') @@ -365,8 +364,7 @@ def start_node(self, node, **kwargs): if self.external_config_template_func: node.account.create_file(self.EXTERNAL_CONFIGS_FILE, self.external_config_template_func(node)) node.account.create_file(self.CONFIG_FILE, self.config_template_func(node)) - log4j_path = 'connect_log4j2.yaml' if node.version > LATEST_3_9 else 'connect_log4j.properties' - node.account.create_file(self.LOG4J_CONFIG_FILE, self.render(log4j_path, log_file=self.LOG_FILE)) + node.account.create_file(self.LOG4J_CONFIG_FILE, self.render(get_log4j_config_for_connect(node), log_file=self.LOG_FILE)) remote_connector_configs = [] for idx, template in enumerate(self.connector_config_templates): target_file = os.path.join(self.PERSISTENT_ROOT, "connect-connector-" + str(idx) + ".properties") @@ -402,7 +400,7 @@ def __init__(self, context, num_nodes, kafka, files, offsets_topic="connect-offs # connector_configs argument is intentionally ignored in distributed service. def start_cmd(self, node, connector_configs): - cmd = "( export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%s\"; " % self.LOG4J_CONFIG_FILE + cmd = "( export KAFKA_LOG4J_OPTS=\"%s%s\"; " % (get_log4j_config_param(node), self.LOG4J_CONFIG_FILE) heap_kafka_opts = "-XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=%s" % \ self.logs["connect_heap_dump_file"]["path"] other_kafka_opts = self.security_config.kafka_opts.strip('\"') @@ -423,8 +421,7 @@ def start_node(self, node, **kwargs): if self.external_config_template_func: node.account.create_file(self.EXTERNAL_CONFIGS_FILE, self.external_config_template_func(node)) node.account.create_file(self.CONFIG_FILE, self.config_template_func(node)) - log4j_path = 'connect_log4j2.yaml' if node.version > LATEST_3_9 else 'connect_log4j.properties' - node.account.create_file(self.LOG4J_CONFIG_FILE, self.render(log4j_path, log_file=self.LOG_FILE)) + node.account.create_file(self.LOG4J_CONFIG_FILE, self.render(get_log4j_config_for_connect(node), log_file=self.LOG_FILE)) if self.connector_config_templates: raise DucktapeError("Config files are not valid in distributed mode, submit connectors via the REST API") diff --git a/tests/kafkatest/services/console_consumer.py b/tests/kafkatest/services/console_consumer.py index 3e65efc3483a5..5035c07728983 100644 --- a/tests/kafkatest/services/console_consumer.py +++ b/tests/kafkatest/services/console_consumer.py @@ -22,7 +22,7 @@ from kafkatest.directory_layout.kafka_path import KafkaPathResolverMixin from kafkatest.services.monitor.jmx import JmxMixin, JmxTool from kafkatest.version import DEV_BRANCH, LATEST_3_7 -from kafkatest.services.kafka.util import fix_opts_for_new_jvm +from kafkatest.services.kafka.util import fix_opts_for_new_jvm, get_log4j_config_param, get_log4j_config_for_tools """ The console consumer is a tool that reads data from Kafka and outputs it to standard output. @@ -146,6 +146,7 @@ def start_cmd(self, node): args['stdout'] = ConsoleConsumer.STDOUT_CAPTURE args['stderr'] = ConsoleConsumer.STDERR_CAPTURE args['log_dir'] = ConsoleConsumer.LOG_DIR + args['log4j_param'] = get_log4j_config_param(node) args['log4j_config'] = ConsoleConsumer.LOG4J_CONFIG args['config_file'] = ConsoleConsumer.CONFIG_FILE args['stdout'] = ConsoleConsumer.STDOUT_CAPTURE @@ -160,7 +161,7 @@ def start_cmd(self, node): cmd = fix_opts_for_new_jvm(node) cmd += "export JMX_PORT=%(jmx_port)s; " \ "export LOG_DIR=%(log_dir)s; " \ - "export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%(log4j_config)s\"; " \ + "export KAFKA_LOG4J_OPTS=\"%(log4j_param)s%(log4j_config)s\"; " \ "export KAFKA_OPTS=%(kafka_opts)s; " \ "%(console_consumer)s " \ "--topic %(topic)s " \ @@ -226,7 +227,7 @@ def _worker(self, idx, node): node.account.create_file(ConsoleConsumer.CONFIG_FILE, prop_file) # Create and upload log properties - log_config = self.render('tools_log4j.properties', log_file=ConsoleConsumer.LOG_FILE) + log_config = self.render(get_log4j_config_for_tools(node), log_file=ConsoleConsumer.LOG_FILE) node.account.create_file(ConsoleConsumer.LOG4J_CONFIG, log_config) # Run and capture output diff --git a/tests/kafkatest/services/kafka/kafka.py b/tests/kafkatest/services/kafka/kafka.py index 796985c8fc822..60e6811f54ef7 100644 --- a/tests/kafkatest/services/kafka/kafka.py +++ b/tests/kafkatest/services/kafka/kafka.py @@ -31,9 +31,9 @@ from kafkatest.services.security.minikdc import MiniKdc from kafkatest.services.security.listener_security_config import ListenerSecurityConfig from kafkatest.services.security.security_config import SecurityConfig -from kafkatest.version import DEV_BRANCH, LATEST_3_9 +from kafkatest.version import DEV_BRANCH from kafkatest.version import KafkaVersion -from kafkatest.services.kafka.util import fix_opts_for_new_jvm +from kafkatest.services.kafka.util import fix_opts_for_new_jvm, get_log4j_config_param, get_log4j_config_for_connect class KafkaListener: @@ -805,7 +805,7 @@ def start_cmd(self, node): kafka_mode = self.context.globals.get("kafka_mode", "") cmd = f"export KAFKA_MODE={kafka_mode}; " cmd += "export JMX_PORT=%d; " % self.jmx_port - cmd += "export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%s\"; " % self.LOG4J_CONFIG + cmd += "export KAFKA_LOG4J_OPTS=\"%s%s\"; " % (get_log4j_config_param(node), self.LOG4J_CONFIG) heap_kafka_opts = "-XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=%s" % \ self.logs["kafka_heap_dump_file"]["path"] security_kafka_opts = self.security_config.kafka_opts.strip('\"') @@ -874,8 +874,7 @@ def start_node(self, node, timeout_sec=60, **kwargs): self.logger.info("kafka.properties:") self.logger.info(prop_file) node.account.create_file(KafkaService.CONFIG_FILE, prop_file) - log4j_path = 'connect_log4j2.yaml' if node.version > LATEST_3_9 else 'connect_log4j.properties' - node.account.create_file(self.LOG4J_CONFIG, self.render(log4j_path, log_dir=KafkaService.OPERATIONAL_LOG_DIR)) + node.account.create_file(self.LOG4J_CONFIG, self.render(get_log4j_config_for_connect(node), log_dir=KafkaService.OPERATIONAL_LOG_DIR)) if self.quorum_info.using_kraft: # format log directories if necessary diff --git a/tests/kafkatest/services/kafka/util.py b/tests/kafkatest/services/kafka/util.py index 3da6b794acd53..adadc0ed1e3af 100644 --- a/tests/kafkatest/services/kafka/util.py +++ b/tests/kafkatest/services/kafka/util.py @@ -16,6 +16,7 @@ from collections import namedtuple from kafkatest.utils.remote_account import java_version +from kafkatest.version import LATEST_4_0 TopicPartition = namedtuple('TopicPartition', ['topic', 'partition']) @@ -30,5 +31,14 @@ def fix_opts_for_new_jvm(node): return "" -# -D -# fileName +def get_log4j_config_param(node): + return '-Dlog4j2.configurationFile=' if node.version >= LATEST_4_0 else '-Dlog4j.configuration=file:' + +def get_log4j_config(node): + return 'log4j2.yaml' if node.version >= LATEST_4_0 else 'log4j.properties' + +def get_log4j_config_for_connect(node): + return 'connect_log4j2.yaml' if node.version >= LATEST_4_0 else 'connect_log4j.properties' + +def get_log4j_config_for_tools(node): + return 'tools_log4j2.yaml' if node.version >= LATEST_4_0 else 'tools_log4j.properties' diff --git a/tests/kafkatest/services/performance/consumer_performance.py b/tests/kafkatest/services/performance/consumer_performance.py index eea91cbfd90c5..7ad4f1ff0eef8 100644 --- a/tests/kafkatest/services/performance/consumer_performance.py +++ b/tests/kafkatest/services/performance/consumer_performance.py @@ -16,7 +16,7 @@ import os -from kafkatest.services.kafka.util import fix_opts_for_new_jvm +from kafkatest.services.kafka.util import fix_opts_for_new_jvm, get_log4j_config_param, get_log4j_config_for_tools from kafkatest.services.performance import PerformanceService from kafkatest.version import V_2_5_0, DEV_BRANCH @@ -111,7 +111,7 @@ def start_cmd(self, node): cmd = fix_opts_for_new_jvm(node) cmd += "export LOG_DIR=%s;" % ConsumerPerformanceService.LOG_DIR cmd += " export KAFKA_OPTS=%s;" % self.security_config.kafka_opts - cmd += " export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%s\";" % ConsumerPerformanceService.LOG4J_CONFIG + cmd += " export KAFKA_LOG4J_OPTS=\"%s%s\";" % (get_log4j_config_param(node), ConsumerPerformanceService.LOG4J_CONFIG) cmd += " %s" % self.path.script("kafka-consumer-perf-test.sh", node) for key, value in self.args(node.version).items(): cmd += " --%s %s" % (key, value) @@ -128,7 +128,7 @@ def start_cmd(self, node): def _worker(self, idx, node): node.account.ssh("mkdir -p %s" % ConsumerPerformanceService.PERSISTENT_ROOT, allow_fail=False) - log_config = self.render('tools_log4j.properties', log_file=ConsumerPerformanceService.LOG_FILE) + log_config = self.render(get_log4j_config_for_tools(node), log_file=ConsumerPerformanceService.LOG_FILE) node.account.create_file(ConsumerPerformanceService.LOG4J_CONFIG, log_config) node.account.create_file(ConsumerPerformanceService.CONFIG_FILE, str(self.security_config)) self.security_config.setup_node(node) diff --git a/tests/kafkatest/services/performance/end_to_end_latency.py b/tests/kafkatest/services/performance/end_to_end_latency.py index e7e0100e5114c..6d5312a09b21a 100644 --- a/tests/kafkatest/services/performance/end_to_end_latency.py +++ b/tests/kafkatest/services/performance/end_to_end_latency.py @@ -15,9 +15,8 @@ import os -from kafkatest.services.kafka.util import fix_opts_for_new_jvm +from kafkatest.services.kafka.util import fix_opts_for_new_jvm, get_log4j_config_param, get_log4j_config_for_tools from kafkatest.services.performance import PerformanceService -from kafkatest.services.security.security_config import SecurityConfig from kafkatest.version import get_version, V_3_4_0, DEV_BRANCH @@ -76,7 +75,7 @@ def start_cmd(self, node): }) cmd = fix_opts_for_new_jvm(node) - cmd += "export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%s\"; " % EndToEndLatencyService.LOG4J_CONFIG + cmd += "export KAFKA_LOG4J_OPTS=\"%s%s\"; " % (get_log4j_config_param(node), EndToEndLatencyService.LOG4J_CONFIG) cmd += "KAFKA_OPTS=%(kafka_opts)s %(kafka_run_class)s %(java_class_name)s " % args cmd += "%(bootstrap_servers)s %(topic)s %(num_records)d %(acks)d %(message_bytes)d %(config_file)s" % args @@ -88,7 +87,7 @@ def start_cmd(self, node): def _worker(self, idx, node): node.account.ssh("mkdir -p %s" % EndToEndLatencyService.PERSISTENT_ROOT, allow_fail=False) - log_config = self.render('tools_log4j.properties', log_file=EndToEndLatencyService.LOG_FILE) + log_config = self.render(get_log4j_config_for_tools(node), log_file=EndToEndLatencyService.LOG_FILE) node.account.create_file(EndToEndLatencyService.LOG4J_CONFIG, log_config) client_config = str(self.security_config) diff --git a/tests/kafkatest/services/performance/producer_performance.py b/tests/kafkatest/services/performance/producer_performance.py index acb0aec865085..e11e6412b6c9c 100644 --- a/tests/kafkatest/services/performance/producer_performance.py +++ b/tests/kafkatest/services/performance/producer_performance.py @@ -19,7 +19,7 @@ from ducktape.cluster.remoteaccount import RemoteCommandError from kafkatest.directory_layout.kafka_path import TOOLS_JAR_NAME, TOOLS_DEPENDANT_TEST_LIBS_JAR_NAME -from kafkatest.services.kafka.util import fix_opts_for_new_jvm +from kafkatest.services.kafka.util import fix_opts_for_new_jvm, get_log4j_config_param, get_log4j_config_for_tools from kafkatest.services.monitor.http import HttpMetricsCollector from kafkatest.services.performance import PerformanceService from kafkatest.services.security.security_config import SecurityConfig @@ -90,7 +90,7 @@ def start_cmd(self, node): cmd += "for file in %s; do CLASSPATH=$CLASSPATH:$file; done; " % jar cmd += "export CLASSPATH; " - cmd += " export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%s\"; " % ProducerPerformanceService.LOG4J_CONFIG + cmd += " export KAFKA_LOG4J_OPTS=\"%s%s\"; " % (get_log4j_config_param(node), ProducerPerformanceService.LOG4J_CONFIG) cmd += "KAFKA_OPTS=%(kafka_opts)s KAFKA_HEAP_OPTS=\"-XX:+HeapDumpOnOutOfMemoryError\" %(kafka_run_class)s org.apache.kafka.tools.ProducerPerformance " \ "--topic %(topic)s --num-records %(num_records)d --record-size %(record_size)d --throughput %(throughput)d --producer-props bootstrap.servers=%(bootstrap_servers)s client.id=%(client_id)s %(metrics_props)s" % args @@ -119,7 +119,7 @@ def _worker(self, idx, node): node.account.ssh("mkdir -p %s" % ProducerPerformanceService.PERSISTENT_ROOT, allow_fail=False) # Create and upload log properties - log_config = self.render('tools_log4j.properties', log_file=ProducerPerformanceService.LOG_FILE) + log_config = self.render(get_log4j_config_for_tools(node), log_file=ProducerPerformanceService.LOG_FILE) node.account.create_file(ProducerPerformanceService.LOG4J_CONFIG, log_config) cmd = self.start_cmd(node) diff --git a/tests/kafkatest/services/streams.py b/tests/kafkatest/services/streams.py index 3848fea686dd2..a455032f86dfe 100644 --- a/tests/kafkatest/services/streams.py +++ b/tests/kafkatest/services/streams.py @@ -22,6 +22,7 @@ from kafkatest.directory_layout.kafka_path import KafkaPathResolverMixin from kafkatest.services.kafka import KafkaConfig from kafkatest.services.monitor.jmx import JmxMixin +from .kafka.util import get_log4j_config_param, get_log4j_config_for_tools STATE_DIR = "state.dir" @@ -285,10 +286,11 @@ def start_cmd(self, node): args['stdout'] = self.STDOUT_FILE args['stderr'] = self.STDERR_FILE args['pidfile'] = self.PID_FILE + args['log4j_param'] = get_log4j_config_param(node) args['log4j'] = self.LOG4J_CONFIG_FILE args['kafka_run_class'] = self.path.script("kafka-run-class.sh", node) - cmd = "( export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%(log4j)s\"; " \ + cmd = "( export KAFKA_LOG4J_OPTS=\"%(log4j_param)s%(log4j)s\"; " \ "INCLUDE_TEST_JARS=true %(kafka_run_class)s %(streams_class_name)s " \ " %(config_file)s %(user_test_args1)s %(user_test_args2)s %(user_test_args3)s" \ " %(user_test_args4)s & echo $! >&3 ) 1>> %(stdout)s 2>> %(stderr)s 3> %(pidfile)s" % args @@ -305,7 +307,7 @@ def start_node(self, node): node.account.mkdirs(self.PERSISTENT_ROOT) prop_file = self.prop_file() node.account.create_file(self.CONFIG_FILE, prop_file) - node.account.create_file(self.LOG4J_CONFIG_FILE, self.render('tools_log4j.properties', log_file=self.LOG_FILE)) + node.account.create_file(self.LOG4J_CONFIG_FILE, self.render(get_log4j_config_for_tools(node), log_file=self.LOG_FILE)) self.logger.info("Starting StreamsTest process on " + str(node.account)) with node.account.monitor_log(self.STDOUT_FILE) as monitor: @@ -363,11 +365,12 @@ def start_cmd(self, node): args['stdout'] = self.STDOUT_FILE args['stderr'] = self.STDERR_FILE args['pidfile'] = self.PID_FILE + args['log4j_param'] = get_log4j_config_param(node) args['log4j'] = self.LOG4J_CONFIG_FILE args['version'] = self.KAFKA_STREAMS_VERSION args['kafka_run_class'] = self.path.script("kafka-run-class.sh", node) - cmd = "( export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%(log4j)s\";" \ + cmd = "( export KAFKA_LOG4J_OPTS=\"%(log4j_param)s%(log4j)s\";" \ " INCLUDE_TEST_JARS=true UPGRADE_KAFKA_STREAMS_TEST_VERSION=%(version)s" \ " %(kafka_run_class)s %(streams_class_name)s" \ " %(config_file)s %(user_test_args1)s" \ @@ -419,11 +422,12 @@ def start_cmd(self, node): args['stdout'] = self.STDOUT_FILE args['stderr'] = self.STDERR_FILE args['pidfile'] = self.PID_FILE + args['log4j_param'] = get_log4j_config_param(node) args['log4j'] = self.LOG4J_CONFIG_FILE args['disable_auto_terminate'] = self.DISABLE_AUTO_TERMINATE args['kafka_run_class'] = self.path.script("kafka-run-class.sh", node) - cmd = "( export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%(log4j)s\"; " \ + cmd = "( export KAFKA_LOG4J_OPTS=\"%(log4j_param)s%(log4j)s\";" \ "INCLUDE_TEST_JARS=true %(kafka_run_class)s %(streams_class_name)s " \ " %(config_file)s %(user_test_args1)s %(disable_auto_terminate)s" \ " & echo $! >&3 ) 1>> %(stdout)s 2>> %(stderr)s 3> %(pidfile)s" % args @@ -496,10 +500,11 @@ def start_cmd(self, node): args['stdout'] = self.STDOUT_FILE args['stderr'] = self.STDERR_FILE args['pidfile'] = self.PID_FILE + args['log4j_param'] = get_log4j_config_param(node) args['log4j'] = self.LOG4J_CONFIG_FILE args['kafka_run_class'] = self.path.script("kafka-run-class.sh", node) - cmd = "( export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%(log4j)s\"; " \ + cmd = "( export KAFKA_LOG4J_OPTS=\"%(log4j_param)s%(log4j)s\";" \ "INCLUDE_TEST_JARS=true %(kafka_run_class)s %(streams_class_name)s " \ " %(config_file)s %(user_test_args1)s %(user_test_args2)s %(user_test_args3)s" \ " %(user_test_args4)s & echo $! >&3 ) 1>> %(stdout)s 2>> %(stderr)s 3> %(pidfile)s" % args @@ -535,12 +540,13 @@ def start_cmd(self, node): args['stdout'] = self.STDOUT_FILE args['stderr'] = self.STDERR_FILE args['pidfile'] = self.PID_FILE + args['log4j_param'] = get_log4j_config_param(node) args['log4j'] = self.LOG4J_CONFIG_FILE args['application.id'] = self.applicationId args['input.topics'] = self.topic args['kafka_run_class'] = self.path.script("kafka-run-class.sh", node) - cmd = "(export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%(log4j)s\"; " \ + cmd = "( export KAFKA_LOG4J_OPTS=\"%(log4j_param)s%(log4j)s\";" \ "%(kafka_run_class)s %(streams_class_name)s " \ "--bootstrap-server %(bootstrap.servers)s " \ "--force " \ @@ -630,11 +636,12 @@ def start_cmd(self, node): args['stdout'] = self.STDOUT_FILE args['stderr'] = self.STDERR_FILE args['pidfile'] = self.PID_FILE + args['log4j_param'] = get_log4j_config_param(node) args['log4j'] = self.LOG4J_CONFIG_FILE args['version'] = self.KAFKA_STREAMS_VERSION args['kafka_run_class'] = self.path.script("kafka-run-class.sh", node) - cmd = "( export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%(log4j)s\"; " \ + cmd = "( export KAFKA_LOG4J_OPTS=\"%(log4j_param)s%(log4j)s\";" \ "INCLUDE_TEST_JARS=true UPGRADE_KAFKA_STREAMS_TEST_VERSION=%(version)s " \ " %(kafka_run_class)s %(streams_class_name)s %(config_file)s " \ " & echo $! >&3 ) 1>> %(stdout)s 2>> %(stderr)s 3> %(pidfile)s" % args @@ -730,11 +737,12 @@ def start_cmd(self, node): args['stdout'] = self.STDOUT_FILE args['stderr'] = self.STDERR_FILE args['pidfile'] = self.PID_FILE + args['log4j_param'] = get_log4j_config_param(node) args['log4j'] = self.LOG4J_CONFIG_FILE args['version'] = self.KAFKA_STREAMS_VERSION args['kafka_run_class'] = self.path.script("kafka-run-class.sh", node) - cmd = "( export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%(log4j)s\"; " \ + cmd = "( export KAFKA_LOG4J_OPTS=\"%(log4j_param)s%(log4j)s\";" \ "INCLUDE_TEST_JARS=true UPGRADE_KAFKA_STREAMS_TEST_VERSION=%(version)s " \ " %(kafka_run_class)s %(streams_class_name)s %(config_file)s " \ " & echo $! >&3 ) 1>> %(stdout)s 2>> %(stderr)s 3> %(pidfile)s" % args diff --git a/tests/kafkatest/services/templates/tools_log4j.properties b/tests/kafkatest/services/templates/tools_log4j.properties index 117fc919f99bd..3f83b4220a1f5 100644 --- a/tests/kafkatest/services/templates/tools_log4j.properties +++ b/tests/kafkatest/services/templates/tools_log4j.properties @@ -28,4 +28,4 @@ log4j.appender.FILE.ImmediateFlush=true # Set the append to true log4j.appender.FILE.Append=true log4j.appender.FILE.layout=org.apache.log4j.PatternLayout -log4j.appender.FILE.layout.conversionPattern=[%d] %p %m (%c)%n \ No newline at end of file +log4j.appender.FILE.layout.conversionPattern=[%d] %p %m (%c)%n diff --git a/tests/kafkatest/services/transactional_message_copier.py b/tests/kafkatest/services/transactional_message_copier.py index 564a23fdcc389..75646dad52ed1 100644 --- a/tests/kafkatest/services/transactional_message_copier.py +++ b/tests/kafkatest/services/transactional_message_copier.py @@ -22,6 +22,9 @@ from kafkatest.directory_layout.kafka_path import KafkaPathResolverMixin from ducktape.cluster.remoteaccount import RemoteCommandError +from kafkatest.services.kafka.util import get_log4j_config_param, get_log4j_config_for_tools + + class TransactionalMessageCopier(KafkaPathResolverMixin, BackgroundThreadService): """This service wraps org.apache.kafka.tools.TransactionalMessageCopier for use in system testing. @@ -75,7 +78,7 @@ def _worker(self, idx, node): node.account.ssh("mkdir -p %s" % TransactionalMessageCopier.PERSISTENT_ROOT, allow_fail=False) # Create and upload log properties - log_config = self.render('tools_log4j.properties', + log_config = self.render(get_log4j_config_for_tools(node), log_file=TransactionalMessageCopier.LOG_FILE) node.account.create_file(TransactionalMessageCopier.LOG4J_CONFIG, log_config) # Configure security @@ -114,7 +117,7 @@ def _worker(self, idx, node): def start_cmd(self, node, idx): cmd = "export LOG_DIR=%s;" % TransactionalMessageCopier.LOG_DIR cmd += " export KAFKA_OPTS=%s;" % self.security_config.kafka_opts - cmd += " export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%s\"; " % TransactionalMessageCopier.LOG4J_CONFIG + cmd += " export KAFKA_LOG4J_OPTS=\"%s%s\"; " % (get_log4j_config_param(node), TransactionalMessageCopier.LOG4J_CONFIG) cmd += self.path.script("kafka-run-class.sh", node) + " org.apache.kafka.tools." + "TransactionalMessageCopier" cmd += " --broker-list %s" % self.kafka.bootstrap_servers(self.security_config.security_protocol) cmd += " --transactional-id %s" % self.transactional_id diff --git a/tests/kafkatest/services/trogdor/trogdor.py b/tests/kafkatest/services/trogdor/trogdor.py index 3b941fe9059eb..f796cdeeb5dec 100644 --- a/tests/kafkatest/services/trogdor/trogdor.py +++ b/tests/kafkatest/services/trogdor/trogdor.py @@ -22,6 +22,7 @@ from ducktape.services.service import Service from ducktape.utils.util import wait_until from kafkatest.directory_layout.kafka_path import KafkaPathResolverMixin +from kafkatest.services.kafka.util import get_log4j_config_param, get_log4j_config class TrogdorService(KafkaPathResolverMixin, Service): @@ -142,7 +143,7 @@ def start_node(self, node): def _start_coordinator_node(self, node): node.account.create_file(TrogdorService.COORDINATOR_LOG4J_PROPERTIES, - self.render('log4j.properties', + self.render(get_log4j_config(node), log_path=TrogdorService.COORDINATOR_LOG)) self._start_trogdor_daemon("coordinator", TrogdorService.COORDINATOR_STDOUT_STDERR, TrogdorService.COORDINATOR_LOG4J_PROPERTIES, @@ -151,7 +152,7 @@ def _start_coordinator_node(self, node): def _start_agent_node(self, node): node.account.create_file(TrogdorService.AGENT_LOG4J_PROPERTIES, - self.render('log4j.properties', + self.render(get_log4j_config(node), log_path=TrogdorService.AGENT_LOG)) self._start_trogdor_daemon("agent", TrogdorService.AGENT_STDOUT_STDERR, TrogdorService.AGENT_LOG4J_PROPERTIES, @@ -160,7 +161,7 @@ def _start_agent_node(self, node): def _start_trogdor_daemon(self, daemon_name, stdout_stderr_capture_path, log4j_properties_path, log_path, node): - cmd = "export KAFKA_LOG4J_OPTS='-Dlog4j.configuration=file:%s'; " % log4j_properties_path + cmd = "export KAFKA_LOG4J_OPTS='%s=file:%s'; " % (get_log4j_config_param(node), log4j_properties_path) cmd += "%s %s --%s.config %s --node-name %s 1>> %s 2>> %s &" % \ (self.path.script("trogdor.sh", node), daemon_name, diff --git a/tests/kafkatest/services/verifiable_consumer.py b/tests/kafkatest/services/verifiable_consumer.py index 7e81ca1f7ceea..101e507f7f564 100644 --- a/tests/kafkatest/services/verifiable_consumer.py +++ b/tests/kafkatest/services/verifiable_consumer.py @@ -20,6 +20,7 @@ from kafkatest.directory_layout.kafka_path import KafkaPathResolverMixin from kafkatest.services.kafka import TopicPartition, consumer_group +from kafkatest.services.kafka.util import get_log4j_config_param, get_log4j_config_for_tools from kafkatest.services.verifiable_client import VerifiableClientMixin from kafkatest.version import DEV_BRANCH, V_2_3_0, V_2_3_1, V_3_7_0, V_4_0_0 @@ -298,7 +299,7 @@ def _worker(self, idx, node): node.account.ssh("mkdir -p %s" % VerifiableConsumer.PERSISTENT_ROOT, allow_fail=False) # Create and upload log properties - log_config = self.render('tools_log4j.properties', log_file=VerifiableConsumer.LOG_FILE) + log_config = self.render(get_log4j_config_for_tools(node), log_file=VerifiableConsumer.LOG_FILE) node.account.create_file(VerifiableConsumer.LOG4J_CONFIG, log_config) # Create and upload config file @@ -382,7 +383,7 @@ def start_cmd(self, node): cmd = "" cmd += "export LOG_DIR=%s;" % VerifiableConsumer.LOG_DIR cmd += " export KAFKA_OPTS=%s;" % self.security_config.kafka_opts - cmd += " export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%s\"; " % VerifiableConsumer.LOG4J_CONFIG + cmd += " export KAFKA_LOG4J_OPTS=\"%s%s\"; " % (get_log4j_config_param(node), VerifiableConsumer.LOG4J_CONFIG) cmd += self.impl.exec_cmd(node) if self.on_record_consumed: cmd += " --verbose" diff --git a/tests/kafkatest/services/verifiable_producer.py b/tests/kafkatest/services/verifiable_producer.py index ea6292d57725e..d7a12c91f171d 100644 --- a/tests/kafkatest/services/verifiable_producer.py +++ b/tests/kafkatest/services/verifiable_producer.py @@ -24,7 +24,7 @@ from kafkatest.services.verifiable_client import VerifiableClientMixin from kafkatest.utils import is_int, is_int_with_prefix from kafkatest.version import get_version, V_2_5_0, DEV_BRANCH -from kafkatest.services.kafka.util import fix_opts_for_new_jvm +from kafkatest.services.kafka.util import fix_opts_for_new_jvm, get_log4j_config_param, get_log4j_config_for_tools class VerifiableProducer(KafkaPathResolverMixin, VerifiableClientMixin, BackgroundThreadService): @@ -127,7 +127,7 @@ def _worker(self, idx, node): node.account.ssh("mkdir -p %s" % VerifiableProducer.PERSISTENT_ROOT, allow_fail=False) # Create and upload log properties - log_config = self.render('tools_log4j.properties', log_file=VerifiableProducer.LOG_FILE) + log_config = self.render(get_log4j_config_for_tools(node), log_file=VerifiableProducer.LOG_FILE) node.account.create_file(VerifiableProducer.LOG4J_CONFIG, log_config) # Configure security @@ -222,7 +222,7 @@ def start_cmd(self, node, idx): cmd += " export KAFKA_OPTS=%s;" % self.security_config.kafka_opts cmd += fix_opts_for_new_jvm(node) - cmd += " export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%s\"; " % VerifiableProducer.LOG4J_CONFIG + cmd += " export KAFKA_LOG4J_OPTS=\"%s%s\"; " % (get_log4j_config_param(node), VerifiableProducer.LOG4J_CONFIG) cmd += self.impl.exec_cmd(node) version = get_version(node) if version >= V_2_5_0: diff --git a/tests/kafkatest/tests/streams/streams_relational_smoke_test.py b/tests/kafkatest/tests/streams/streams_relational_smoke_test.py index 0e7c4e921ffba..4488d24ac6521 100644 --- a/tests/kafkatest/tests/streams/streams_relational_smoke_test.py +++ b/tests/kafkatest/tests/streams/streams_relational_smoke_test.py @@ -18,8 +18,10 @@ from ducktape.mark.resource import cluster from ducktape.utils.util import wait_until from kafkatest.services.kafka import quorum +from kafkatest.services.kafka.util import get_log4j_config_param from kafkatest.services.streams import StreamsTestBaseService from kafkatest.tests.kafka_test import KafkaTest +from kafkatest.version import LATEST_4_0 class StreamsRelationalSmokeTestService(StreamsTestBaseService): @@ -33,13 +35,14 @@ def __init__(self, test_context, kafka, mode, nodeId, processing_guarantee): self.mode = mode self.nodeId = nodeId self.processing_guarantee = processing_guarantee - self.log4j_template = 'log4j2_template.properties' + self.log4j_template = "log4j2_template.yaml" if (self.node.version >= LATEST_4_0) else "log4j_template.properties" def start_cmd(self, node): - return "( export KAFKA_LOG4J_OPTS=\"-Dlog4j.configuration=file:%(log4j)s\"; " \ + return "( export KAFKA_LOG4J_OPTS=\"%(log4j_param)s%(log4j)s\"; " \ "INCLUDE_TEST_JARS=true %(kafka_run_class)s org.apache.kafka.streams.tests.RelationalSmokeTest " \ " %(mode)s %(kafka)s %(nodeId)s %(processing_guarantee)s %(state_dir)s" \ " & echo $! >&3 ) 1>> %(stdout)s 2>> %(stderr)s 3> %(pidfile)s" % { + "log4j_param": get_log4j_config_param(node), "log4j": self.LOG4J_CONFIG_FILE, "kafka_run_class": self.path.script("kafka-run-class.sh", node), "mode": self.mode, @@ -55,7 +58,8 @@ def start_cmd(self, node): def start_node(self, node): node.account.mkdirs(self.PERSISTENT_ROOT) node.account.create_file(self.LOG4J_CONFIG_FILE, - self.render("log4j2_template.properties", log_file=self.LOG_FILE)) + self.render("log4j2_template.yaml" if node.version >= LATEST_4_0 else "log4j_template.properties", + log_file=self.LOG_FILE)) self.logger.info("Starting process on " + str(node.account)) node.account.ssh(self.start_cmd(node)) diff --git a/tests/kafkatest/tests/streams/templates/log4j_template.properties b/tests/kafkatest/tests/streams/templates/log4j_template.properties new file mode 100644 index 0000000000000..3f83b4220a1f5 --- /dev/null +++ b/tests/kafkatest/tests/streams/templates/log4j_template.properties @@ -0,0 +1,31 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Define the root logger with appender file +log4j.rootLogger = {{ log_level|default("INFO") }}, FILE + +{% if loggers is defined %} +{% for logger, log_level in loggers.items() %} +log4j.logger.{{ logger }}={{ log_level }} +{% endfor %} +{% endif %} + +log4j.appender.FILE=org.apache.log4j.FileAppender +log4j.appender.FILE.File={{ log_file }} +log4j.appender.FILE.ImmediateFlush=true +# Set the append to true +log4j.appender.FILE.Append=true +log4j.appender.FILE.layout=org.apache.log4j.PatternLayout +log4j.appender.FILE.layout.conversionPattern=[%d] %p %m (%c)%n From a6fa0ace48efc0fbcdab98eefe28198a0566bf47 Mon Sep 17 00:00:00 2001 From: frankvicky Date: Thu, 21 Nov 2024 16:04:11 +0800 Subject: [PATCH 33/46] KAFKA-9366: update script --- bin/connect-distributed.sh | 6 +++--- bin/connect-mirror-maker.sh | 6 +++--- bin/connect-standalone.sh | 6 +++--- bin/kafka-server-start.sh | 6 +++--- bin/windows/connect-distributed.bat | 6 +++--- bin/windows/connect-standalone.bat | 6 +++--- bin/windows/kafka-server-start.bat | 6 +++--- 7 files changed, 21 insertions(+), 21 deletions(-) diff --git a/bin/connect-distributed.sh b/bin/connect-distributed.sh index 7d77fce737f45..5074ca2bef7bc 100755 --- a/bin/connect-distributed.sh +++ b/bin/connect-distributed.sh @@ -25,10 +25,10 @@ base_dir=$(dirname $0) if [ -f "$base_dir/../config/connect-log4j.properties" ]; then echo DEPRECATED: Using Log4j 1.x configuration file \$KAFKA_HOME/config/connect-log4j.properties >&2 echo To use a Log4j 2.x configuration, please see https://logging.apache.org/log4j/2.x/migrate-from-log4j1.html#Log4j2ConfigurationFormat for details about Log4j configuration file migration. >&2 - echo You can also use the \$KAFKA_HOME/config/connect-log4j2.yml file as a starting point. Make sure to remove the Log4j 1.x configuration after completing the migration. >&2 + echo You can also use the \$KAFKA_HOME/config/connect-log4j2.yaml file as a starting point. Make sure to remove the Log4j 1.x configuration after completing the migration. >&2 export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/connect-log4j.properties" -elif [ -f "$base_dir/../config/connect-log4j2.yml" ]; then - export KAFKA_LOG4J_OPTS="-Dlog4j2.configurationFile=$base_dir/../config/connect-log4j2.yml" +elif [ -f "$base_dir/../config/connect-log4j2.yaml" ]; then + export KAFKA_LOG4J_OPTS="-Dlog4j2.configurationFile=$base_dir/../config/connect-log4j2.yaml" elif [ -f "$base_dir/../config/connect-log4j2.xml" ]; then export KAFKA_LOG4J_OPTS="-Dlog4j2.configurationFile=$base_dir/../config/connect-log4j2.xml" fi diff --git a/bin/connect-mirror-maker.sh b/bin/connect-mirror-maker.sh index 511509ea438fb..daf10e5a99b50 100755 --- a/bin/connect-mirror-maker.sh +++ b/bin/connect-mirror-maker.sh @@ -25,10 +25,10 @@ base_dir=$(dirname $0) if [ -f "$base_dir/../config/connect-log4j.properties" ]; then echo DEPRECATED: Using Log4j 1.x configuration file \$KAFKA_HOME/config/connect-log4j.properties >&2 echo To use a Log4j 2.x configuration, please see https://logging.apache.org/log4j/2.x/migrate-from-log4j1.html#Log4j2ConfigurationFormat for details about Log4j configuration file migration. >&2 - echo You can also use the \$KAFKA_HOME/config/connect-log4j2.yml file as a starting point. Make sure to remove the Log4j 1.x configuration after completing the migration. >&2 + echo You can also use the \$KAFKA_HOME/config/connect-log4j2.yaml file as a starting point. Make sure to remove the Log4j 1.x configuration after completing the migration. >&2 export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/connect-log4j.properties" -elif [ -f "$base_dir/../config/connect-log4j2.yml" ]; then - export KAFKA_LOG4J_OPTS="-Dlog4j2.configurationFile=$base_dir/../config/connect-log4j2.yml" +elif [ -f "$base_dir/../config/connect-log4j2.yaml" ]; then + export KAFKA_LOG4J_OPTS="-Dlog4j2.configurationFile=$base_dir/../config/connect-log4j2.yaml" elif [ -f "$base_dir/../config/connect-log4j2.xml" ]; then export KAFKA_LOG4J_OPTS="-Dlog4j2.configurationFile=$base_dir/../config/connect-log4j2.xml" fi diff --git a/bin/connect-standalone.sh b/bin/connect-standalone.sh index 383f545dd57f8..6787c9b4db672 100755 --- a/bin/connect-standalone.sh +++ b/bin/connect-standalone.sh @@ -25,10 +25,10 @@ base_dir=$(dirname $0) if [ -f "$base_dir/../config/connect-log4j.properties" ]; then echo DEPRECATED: Using Log4j 1.x configuration file \$KAFKA_HOME/config/connect-log4j.properties >&2 echo To use a Log4j 2.x configuration, please see https://logging.apache.org/log4j/2.x/migrate-from-log4j1.html#Log4j2ConfigurationFormat for details about Log4j configuration file migration. >&2 - echo You can also use the \$KAFKA_HOME/config/connect-log4j2.yml file as a starting point. Make sure to remove the Log4j 1.x configuration after completing the migration. >&2 + echo You can also use the \$KAFKA_HOME/config/connect-log4j2.yaml file as a starting point. Make sure to remove the Log4j 1.x configuration after completing the migration. >&2 export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/connect-log4j.properties" -elif [ -f "$base_dir/../config/connect-log4j2.yml" ]; then - export KAFKA_LOG4J_OPTS="-Dlog4j2.configurationFile=$base_dir/../config/connect-log4j2.yml" +elif [ -f "$base_dir/../config/connect-log4j2.yaml" ]; then + export KAFKA_LOG4J_OPTS="-Dlog4j2.configurationFile=$base_dir/../config/connect-log4j2.yaml" elif [ -f "$base_dir/../config/connect-log4j2.xml" ]; then export KAFKA_LOG4J_OPTS="-Dlog4j2.configurationFile=$base_dir/../config/connect-log4j2.xml" fi diff --git a/bin/kafka-server-start.sh b/bin/kafka-server-start.sh index 6dc290c0b826a..f0e5336e91b57 100755 --- a/bin/kafka-server-start.sh +++ b/bin/kafka-server-start.sh @@ -24,10 +24,10 @@ base_dir=$(dirname $0) if [ -f "$base_dir/../config/log4j.properties" ]; then echo DEPRECATED: Using Log4j 1.x configuration file \$KAFKA_HOME/config/log4j.properties >&2 echo To use a Log4j 2.x configuration, please see https://logging.apache.org/log4j/2.x/migrate-from-log4j1.html#Log4j2ConfigurationFormat for details about Log4j configuration file migration. >&2 - echo You can also use the \$KAFKA_HOME/config/log4j2.yml file as a starting point. Make sure to remove the Log4j 1.x configuration after completing the migration. >&2 + echo You can also use the \$KAFKA_HOME/config/log4j2.yaml file as a starting point. Make sure to remove the Log4j 1.x configuration after completing the migration. >&2 export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/log4j.properties" -elif [ -f "$base_dir/../config/log4j2.yml" ]; then - export KAFKA_LOG4J_OPTS="-Dlog4j2.configurationFile=$base_dir/../config/log4j2.yml" +elif [ -f "$base_dir/../config/log4j2.yaml" ]; then + export KAFKA_LOG4J_OPTS="-Dlog4j2.configurationFile=$base_dir/../config/log4j2.yaml" elif [ -f "$base_dir/../config/log4j2.xml" ]; then export KAFKA_LOG4J_OPTS="-Dlog4j2.configurationFile=$base_dir/../config/log4j2.xml" fi diff --git a/bin/windows/connect-distributed.bat b/bin/windows/connect-distributed.bat index be1dd014a8302..92f9a9cce5391 100644 --- a/bin/windows/connect-distributed.bat +++ b/bin/windows/connect-distributed.bat @@ -29,10 +29,10 @@ rem Log4j settings IF EXIST "%BASE_DIR%/config/connect-log4j.properties" ( echo DEPRECATED: Using Log4j 1.x configuration file %BASE_DIR%/config/connect-log4j.properties echo To use a Log4j 2.x configuration, please see https://logging.apache.org/log4j/2.x/migrate-from-log4j1.html#Log4j2ConfigurationFormat for details about Log4j configuration file migration. - echo You can also use the %BASE_DIR%/config/connect-log4j2.yml file as a starting point. Make sure to remove the Log4j 1.x configuration after completing the migration. + echo You can also use the %BASE_DIR%/config/connect-log4j2.yaml file as a starting point. Make sure to remove the Log4j 1.x configuration after completing the migration. set KAFKA_LOG4J_OPTS=-Dlog4j.configuration=file:%BASE_DIR%/config/connect-log4j.properties -) ELSE IF EXIST "%BASE_DIR%/config/connect-log4j2.yml" ( - set KAFKA_LOG4J_OPTS=-Dlog4j2.configurationFile=%BASE_DIR%/config/connect-log4j2.yml +) ELSE IF EXIST "%BASE_DIR%/config/connect-log4j2.yaml" ( + set KAFKA_LOG4J_OPTS=-Dlog4j2.configurationFile=%BASE_DIR%/config/connect-log4j2.yaml ) ELSE IF EXIST "%BASE_DIR%/config/connect-log4j2.xml" ( set KAFKA_LOG4J_OPTS=-Dlog4j2.configurationFile=%BASE_DIR%/config/connect-log4j2.xml ) diff --git a/bin/windows/connect-standalone.bat b/bin/windows/connect-standalone.bat index 56ae686adf5c7..eacb67f4322e7 100644 --- a/bin/windows/connect-standalone.bat +++ b/bin/windows/connect-standalone.bat @@ -29,10 +29,10 @@ rem Log4j settings IF EXIST "%BASE_DIR%/config/connect-log4j.properties" ( echo DEPRECATED: Using Log4j 1.x configuration file %BASE_DIR%/config/connect-log4j.properties echo To use a Log4j 2.x configuration, please see https://logging.apache.org/log4j/2.x/migrate-from-log4j1.html#Log4j2ConfigurationFormat for details about Log4j configuration file migration. - echo You can also use the %BASE_DIR%/config/connect-log4j2.yml file as a starting point. Make sure to remove the Log4j 1.x configuration after completing the migration. + echo You can also use the %BASE_DIR%/config/connect-log4j2.yaml file as a starting point. Make sure to remove the Log4j 1.x configuration after completing the migration. set KAFKA_LOG4J_OPTS=-Dlog4j.configuration=file:%BASE_DIR%/config/connect-log4j.properties -) ELSE IF EXIST "%BASE_DIR%/config/connect-log4j2.yml" ( - set KAFKA_LOG4J_OPTS=-Dlog4j2.configurationFile=%BASE_DIR%/config/connect-log4j2.yml +) ELSE IF EXIST "%BASE_DIR%/config/connect-log4j2.yaml" ( + set KAFKA_LOG4J_OPTS=-Dlog4j2.configurationFile=%BASE_DIR%/config/connect-log4j2.yaml ) ELSE IF EXIST "%BASE_DIR%/config/connect-log4j2.xml" ( set KAFKA_LOG4J_OPTS=-Dlog4j2.configurationFile=%BASE_DIR%/config/connect-log4j2.xml ) diff --git a/bin/windows/kafka-server-start.bat b/bin/windows/kafka-server-start.bat index 594f9f4087358..f7e259514d8e5 100644 --- a/bin/windows/kafka-server-start.bat +++ b/bin/windows/kafka-server-start.bat @@ -23,10 +23,10 @@ SetLocal IF EXIST "%~dp0../../config/log4j.properties" ( echo DEPRECATED: Using Log4j 1.x configuration file %~dp0../../config/log4j.properties echo To use a Log4j 2.x configuration, please see https://logging.apache.org/log4j/2.x/migrate-from-log4j1.html#Log4j2ConfigurationFormat for details about Log4j configuration file migration. - echo You can also use the %~dp0../../config/log4j2.yml file as a starting point. Make sure to remove the Log4j 1.x configuration after completing the migration. + echo You can also use the %~dp0../../config/log4j2.yaml file as a starting point. Make sure to remove the Log4j 1.x configuration after completing the migration. set KAFKA_LOG4J_OPTS=-Dlog4j.configuration=file:%~dp0../../config/log4j.properties -) ELSE IF EXIST "%~dp0../../config/log4j2.yml" ( - set KAFKA_LOG4J_OPTS=-Dlog4j2.configurationFile=%~dp0../../config/log4j2.yml +) ELSE IF EXIST "%~dp0../../config/log4j2.yaml" ( + set KAFKA_LOG4J_OPTS=-Dlog4j2.configurationFile=%~dp0../../config/log4j2.yaml ) ELSE IF EXIST "%~dp0../../config/log4j2.xml" ( set KAFKA_LOG4J_OPTS=-Dlog4j2.configurationFile=%~dp0../../config/log4j2.xml ) From 17cc7fc76837cecd26ca64a32c398edeb4ccc2ec Mon Sep 17 00:00:00 2001 From: frankvicky Date: Thu, 21 Nov 2024 16:10:45 +0800 Subject: [PATCH 34/46] KAFKA-9366: Fix typo --- tests/kafkatest/services/kafka/kafka.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/kafkatest/services/kafka/kafka.py b/tests/kafkatest/services/kafka/kafka.py index 60e6811f54ef7..6c92aec9d28b7 100644 --- a/tests/kafkatest/services/kafka/kafka.py +++ b/tests/kafkatest/services/kafka/kafka.py @@ -33,7 +33,7 @@ from kafkatest.services.security.security_config import SecurityConfig from kafkatest.version import DEV_BRANCH from kafkatest.version import KafkaVersion -from kafkatest.services.kafka.util import fix_opts_for_new_jvm, get_log4j_config_param, get_log4j_config_for_connect +from kafkatest.services.kafka.util import fix_opts_for_new_jvm, get_log4j_config_param, get_log4j_config class KafkaListener: @@ -874,7 +874,7 @@ def start_node(self, node, timeout_sec=60, **kwargs): self.logger.info("kafka.properties:") self.logger.info(prop_file) node.account.create_file(KafkaService.CONFIG_FILE, prop_file) - node.account.create_file(self.LOG4J_CONFIG, self.render(get_log4j_config_for_connect(node), log_dir=KafkaService.OPERATIONAL_LOG_DIR)) + node.account.create_file(self.LOG4J_CONFIG, self.render(get_log4j_config(node), log_dir=KafkaService.OPERATIONAL_LOG_DIR)) if self.quorum_info.using_kraft: # format log directories if necessary From 8fa5a7318217576a59456955db6b2a391ec62b66 Mon Sep 17 00:00:00 2001 From: frankvicky Date: Thu, 21 Nov 2024 16:31:45 +0800 Subject: [PATCH 35/46] KAFKA-9366: update util.py --- tests/kafkatest/services/kafka/util.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/tests/kafkatest/services/kafka/util.py b/tests/kafkatest/services/kafka/util.py index adadc0ed1e3af..0ed53aae41a3a 100644 --- a/tests/kafkatest/services/kafka/util.py +++ b/tests/kafkatest/services/kafka/util.py @@ -16,7 +16,7 @@ from collections import namedtuple from kafkatest.utils.remote_account import java_version -from kafkatest.version import LATEST_4_0 +from kafkatest.version import LATEST_4_0, get_version TopicPartition = namedtuple('TopicPartition', ['topic', 'partition']) @@ -32,13 +32,13 @@ def fix_opts_for_new_jvm(node): return "" def get_log4j_config_param(node): - return '-Dlog4j2.configurationFile=' if node.version >= LATEST_4_0 else '-Dlog4j.configuration=file:' + return '-Dlog4j2.configurationFile=' if get_version(node) >= LATEST_4_0 else '-Dlog4j.configuration=file:' def get_log4j_config(node): - return 'log4j2.yaml' if node.version >= LATEST_4_0 else 'log4j.properties' + return 'log4j2.yaml' if get_version(node) >= LATEST_4_0 else 'log4j.properties' def get_log4j_config_for_connect(node): - return 'connect_log4j2.yaml' if node.version >= LATEST_4_0 else 'connect_log4j.properties' + return 'connect_log4j2.yaml' if get_version(node) >= LATEST_4_0 else 'connect_log4j.properties' def get_log4j_config_for_tools(node): - return 'tools_log4j2.yaml' if node.version >= LATEST_4_0 else 'tools_log4j.properties' + return 'tools_log4j2.yaml' if get_version(node) >= LATEST_4_0 else 'tools_log4j.properties' From 1d06e84da61635494b1c7b8293106c694ce21f87 Mon Sep 17 00:00:00 2001 From: frankvicky Date: Thu, 21 Nov 2024 23:15:07 +0800 Subject: [PATCH 36/46] KAFKA-9366: Fix typo --- tests/kafkatest/services/trogdor/trogdor.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/kafkatest/services/trogdor/trogdor.py b/tests/kafkatest/services/trogdor/trogdor.py index f796cdeeb5dec..3a54b663f02de 100644 --- a/tests/kafkatest/services/trogdor/trogdor.py +++ b/tests/kafkatest/services/trogdor/trogdor.py @@ -161,7 +161,7 @@ def _start_agent_node(self, node): def _start_trogdor_daemon(self, daemon_name, stdout_stderr_capture_path, log4j_properties_path, log_path, node): - cmd = "export KAFKA_LOG4J_OPTS='%s=file:%s'; " % (get_log4j_config_param(node), log4j_properties_path) + cmd = "export KAFKA_LOG4J_OPTS='%s%s'; " % (get_log4j_config_param(node), log4j_properties_path) cmd += "%s %s --%s.config %s --node-name %s 1>> %s 2>> %s &" % \ (self.path.script("trogdor.sh", node), daemon_name, From e488b0a33fab0c50aa0414c3cce84a59937a81ac Mon Sep 17 00:00:00 2001 From: frankvicky Date: Fri, 22 Nov 2024 15:42:11 +0800 Subject: [PATCH 37/46] KAFKA-9366: update e2e log4j2.yaml --- .../services/kafka/templates/log4j2.yaml | 40 ++++++++----------- .../services/templates/connect_log4j2.yaml | 6 +-- .../services/templates/tools_log4j2.yaml | 8 ++-- .../streams/templates/log4j2_template.yaml | 1 - 4 files changed, 22 insertions(+), 33 deletions(-) diff --git a/tests/kafkatest/services/kafka/templates/log4j2.yaml b/tests/kafkatest/services/kafka/templates/log4j2.yaml index 01a96705b1f32..75592aac1bdff 100644 --- a/tests/kafkatest/services/kafka/templates/log4j2.yaml +++ b/tests/kafkatest/services/kafka/templates/log4j2.yaml @@ -17,8 +17,6 @@ Configuration: Property: - name: "log_dir" value: {{ log_dir }} - - name: "log_level" - value: "DEBUG" - name: "logPattern" value: "[%d] %p %m (%c)%n" @@ -163,114 +161,110 @@ Configuration: Loggers: Root: - level: "${sys:log_level}" + level: {{ log_level|default("DEBUG") }} AppenderRef: - ref: STDOUT Logger: - name: kafka.producer.async.DefaultEventHandler - level: "${sys:log_level}" - additivity: false + level: {{ log_level|default("DEBUG") }} AppenderRef: - ref: KafkaInfoAppender - ref: KafkaDebugAppender - name: kafka.client.ClientUtils - level: "${sys:log_level}" - additivity: false + level: {{ log_level|default("DEBUG") }} AppenderRef: - ref: KafkaInfoAppender - ref: KafkaDebugAppender - name: kafka.perf - level: "${sys:log_level}" - additivity: false + level: {{ log_level|default("DEBUG") }} AppenderRef: - ref: KafkaInfoAppender - ref: KafkaDebugAppender - name: kafka.perf.ProducerPerformance$ProducerThread - level: "${sys:log_level}" - additivity: false + level: {{ log_level|default("DEBUG") }} AppenderRef: - ref: KafkaInfoAppender - ref: KafkaDebugAppender - name: kafka - level: "${sys:log_level}" + level: {{ log_level|default("DEBUG") }} AppenderRef: - ref: KafkaInfoAppender - ref: KafkaDebugAppender - name: kafka.network.RequestChannel$ - level: "${sys:log_level}" + level: {{ log_level|default("DEBUG") }} additivity: false AppenderRef: - ref: RequestInfoAppender - ref: RequestDebugAppender - name: kafka.network.Processor - level: "${sys:log_level}" + level: {{ log_level|default("DEBUG") }} AppenderRef: - ref: RequestInfoAppender - ref: RequestDebugAppender - name: kafka.server.KafkaApis - level: "${sys:log_level}" + level: {{ log_level|default("DEBUG") }} additivity: false AppenderRef: - ref: RequestInfoAppender - ref: RequestDebugAppender - name: kafka.request.logger - level: "${sys:log_level}" + level: {{ log_level|default("DEBUG") }} additivity: false AppenderRef: - ref: RequestInfoAppender - ref: RequestDebugAppender - name: org.apache.kafka.raft - level: "${sys:log_level}" + level: {{ log_level|default("DEBUG") }} AppenderRef: - ref: ControllerInfoAppender - ref: ControllerDebugAppender - name: org.apache.kafka.controller - level: "${sys:log_level}" + level: {{ log_level|default("DEBUG") }} AppenderRef: - ref: ControllerInfoAppender - ref: ControllerDebugAppender - name: kafka.controller - level: "${sys:log_level}" + level: {{ log_level|default("DEBUG") }} additivity: false AppenderRef: - ref: ControllerInfoAppender - ref: ControllerDebugAppender - name: kafka.log.LogCleaner - level: "${sys:log_level}" + level: {{ log_level|default("DEBUG") }} additivity: false AppenderRef: - ref: CleanerInfoAppender - ref: CleanerDebugAppender - name: state.change.logger - level: "${sys:log_level}" + level: {{ log_level|default("DEBUG") }} additivity: false AppenderRef: - ref: StateChangeInfoAppender - ref: StateChangeDebugAppender - name: kafka.authorizer.logger - level: "${sys:log_level}" + level: {{ log_level|default("DEBUG") }} additivity: false AppenderRef: - ref: AuthorizerInfoAppender - ref: AuthorizerDebugAppender - name: org.apache.kafka.coordinator.group - level: "${sys:log_level}" + level: {{ log_level|default("DEBUG") }} additivity: false AppenderRef: - ref: KafkaInfoAppender diff --git a/tests/kafkatest/services/templates/connect_log4j2.yaml b/tests/kafkatest/services/templates/connect_log4j2.yaml index 5029452447e02..c96551e2eed53 100644 --- a/tests/kafkatest/services/templates/connect_log4j2.yaml +++ b/tests/kafkatest/services/templates/connect_log4j2.yaml @@ -25,17 +25,13 @@ Configuration: PatternLayout: pattern: "${logPattern}" - RollingFile: + File: - name: FILE fileName: {{ log_file }} - filePattern: "test.log.%d{yyyy-MM-dd-HH}" append: true immediateFlush: true PatternLayout: pattern: "${logPattern}" - TimeBasedTriggeringPolicy: - modulate: true - interval: 1 Loggers: Root: diff --git a/tests/kafkatest/services/templates/tools_log4j2.yaml b/tests/kafkatest/services/templates/tools_log4j2.yaml index ff3bbde954df9..35ca4ed7bedd3 100644 --- a/tests/kafkatest/services/templates/tools_log4j2.yaml +++ b/tests/kafkatest/services/templates/tools_log4j2.yaml @@ -31,9 +31,9 @@ Configuration: Logger: # Add additional loggers dynamically if defined - {% if loggers is defined %} - {% for logger, log_level in loggers.items() %} + {% if loggers is defined %} + {% for logger, log_level in loggers.items() %} - name: {{ logger }} level: {{ log_level }} - {% endfor %} - {% endif %} \ No newline at end of file + {% endfor %} + {% endif %} \ No newline at end of file diff --git a/tests/kafkatest/tests/streams/templates/log4j2_template.yaml b/tests/kafkatest/tests/streams/templates/log4j2_template.yaml index 9ac96af9d1691..f94e7d437a94a 100644 --- a/tests/kafkatest/tests/streams/templates/log4j2_template.yaml +++ b/tests/kafkatest/tests/streams/templates/log4j2_template.yaml @@ -19,7 +19,6 @@ Configuration: File: name: FILE fileName: {{ log_file }} - filePattern: "streams-templates-%d{yyyy-MM-dd}.log" append: true immediateFlush: true PatternLayout: From 81131662bf2bcc7538b7d042241775a730a65c68 Mon Sep 17 00:00:00 2001 From: frankvicky Date: Fri, 22 Nov 2024 16:44:40 +0800 Subject: [PATCH 38/46] KAFKA-9366: Fix import-control --- checkstyle/import-control.xml | 2 +- .../org/apache/kafka/clients/consumer/KafkaConsumerTest.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/checkstyle/import-control.xml b/checkstyle/import-control.xml index 93d566f1431b9..b347ffa134ef2 100644 --- a/checkstyle/import-control.xml +++ b/checkstyle/import-control.xml @@ -225,7 +225,7 @@ - + diff --git a/clients/src/test/java/org/apache/kafka/clients/consumer/KafkaConsumerTest.java b/clients/src/test/java/org/apache/kafka/clients/consumer/KafkaConsumerTest.java index 7d122c2986c0d..5d308e5d1073e 100644 --- a/clients/src/test/java/org/apache/kafka/clients/consumer/KafkaConsumerTest.java +++ b/clients/src/test/java/org/apache/kafka/clients/consumer/KafkaConsumerTest.java @@ -102,7 +102,7 @@ import org.apache.kafka.test.MockMetricsReporter; import org.apache.kafka.test.TestUtils; -import org.apache.log4j.Level; +import org.apache.logging.log4j.Level; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Timeout; import org.junit.jupiter.params.ParameterizedTest; From 2b749e844073c6dca0b1186f54dd5b644b796335 Mon Sep 17 00:00:00 2001 From: frankvicky Date: Fri, 22 Nov 2024 17:43:17 +0800 Subject: [PATCH 39/46] KAFKA-9366: update e2e log4j2.yaml --- tests/kafkatest/services/kafka/templates/log4j2.yaml | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/tests/kafkatest/services/kafka/templates/log4j2.yaml b/tests/kafkatest/services/kafka/templates/log4j2.yaml index 75592aac1bdff..78f3961445b2c 100644 --- a/tests/kafkatest/services/kafka/templates/log4j2.yaml +++ b/tests/kafkatest/services/kafka/templates/log4j2.yaml @@ -37,6 +37,7 @@ Configuration: Filters: ThresholdFilter: level: INFO + onMatch: ACCEPT - name: KafkaDebugAppender fileName: "${sys:log_dir}/debug/server.log" @@ -48,6 +49,7 @@ Configuration: Filters: ThresholdFilter: level: DEBUG + onMatch: ACCEPT - name: StateChangeInfoAppender fileName: "${sys:log_dir}/info/state-change.log" @@ -59,6 +61,7 @@ Configuration: Filters: ThresholdFilter: level: INFO + onMatch: ACCEPT - name: StateChangeDebugAppender fileName: "${sys:log_dir}/debug/state-change.log" @@ -70,6 +73,7 @@ Configuration: Filters: ThresholdFilter: level: DEBUG + onMatch: ACCEPT - name: RequestInfoAppender fileName: "${sys:log_dir}/info/kafka-request.log" @@ -81,6 +85,7 @@ Configuration: Filters: ThresholdFilter: level: INFO + onMatch: ACCEPT - name: RequestDebugAppender fileName: "${sys:log_dir}/debug/kafka-request.log" @@ -92,6 +97,7 @@ Configuration: Filters: ThresholdFilter: level: DEBUG + onMatch: ACCEPT - name: CleanerInfoAppender fileName: "${sys:log_dir}/info/log-cleaner.log" @@ -103,6 +109,7 @@ Configuration: Filters: ThresholdFilter: level: INFO + onMatch: ACCEPT - name: CleanerDebugAppender fileName: "${sys:log_dir}/debug/log-cleaner.log" @@ -114,6 +121,7 @@ Configuration: Filters: ThresholdFilter: level: DEBUG + onMatch: ACCEPT - name: ControllerInfoAppender fileName: "${sys:log_dir}/info/controller.log" @@ -125,6 +133,7 @@ Configuration: Filters: ThresholdFilter: level: INFO + onMatch: ACCEPT - name: ControllerDebugAppender fileName: "${sys:log_dir}/debug/controller.log" @@ -136,6 +145,7 @@ Configuration: Filters: ThresholdFilter: level: DEBUG + onMatch: ACCEPT - name: AuthorizerInfoAppender fileName: "${sys:log_dir}/info/kafka-authorizer.log" @@ -147,6 +157,7 @@ Configuration: Filters: ThresholdFilter: level: INFO + onMatch: ACCEPT - name: AuthorizerDebugAppender fileName: "${sys:log_dir}/debug/kafka-authorizer.log" @@ -158,6 +169,7 @@ Configuration: Filters: ThresholdFilter: level: DEBUG + onMatch: ACCEPT Loggers: Root: From 07d1bb12af6811d64adb8ba7927a60b86bde4570 Mon Sep 17 00:00:00 2001 From: frankvicky Date: Sun, 24 Nov 2024 20:30:12 +0800 Subject: [PATCH 40/46] KAFKA-9366: revert uncessary changes --- tests/kafkatest/services/streams.py | 10 +++++----- tests/kafkatest/services/templates/connect_log4j2.yaml | 5 ----- 2 files changed, 5 insertions(+), 10 deletions(-) diff --git a/tests/kafkatest/services/streams.py b/tests/kafkatest/services/streams.py index a455032f86dfe..d6d6b1708130b 100644 --- a/tests/kafkatest/services/streams.py +++ b/tests/kafkatest/services/streams.py @@ -427,7 +427,7 @@ def start_cmd(self, node): args['disable_auto_terminate'] = self.DISABLE_AUTO_TERMINATE args['kafka_run_class'] = self.path.script("kafka-run-class.sh", node) - cmd = "( export KAFKA_LOG4J_OPTS=\"%(log4j_param)s%(log4j)s\";" \ + cmd = "( export KAFKA_LOG4J_OPTS=\"%(log4j_param)s%(log4j)s\"; " \ "INCLUDE_TEST_JARS=true %(kafka_run_class)s %(streams_class_name)s " \ " %(config_file)s %(user_test_args1)s %(disable_auto_terminate)s" \ " & echo $! >&3 ) 1>> %(stdout)s 2>> %(stderr)s 3> %(pidfile)s" % args @@ -504,7 +504,7 @@ def start_cmd(self, node): args['log4j'] = self.LOG4J_CONFIG_FILE args['kafka_run_class'] = self.path.script("kafka-run-class.sh", node) - cmd = "( export KAFKA_LOG4J_OPTS=\"%(log4j_param)s%(log4j)s\";" \ + cmd = "( export KAFKA_LOG4J_OPTS=\"%(log4j_param)s%(log4j)s\"; " \ "INCLUDE_TEST_JARS=true %(kafka_run_class)s %(streams_class_name)s " \ " %(config_file)s %(user_test_args1)s %(user_test_args2)s %(user_test_args3)s" \ " %(user_test_args4)s & echo $! >&3 ) 1>> %(stdout)s 2>> %(stderr)s 3> %(pidfile)s" % args @@ -546,7 +546,7 @@ def start_cmd(self, node): args['input.topics'] = self.topic args['kafka_run_class'] = self.path.script("kafka-run-class.sh", node) - cmd = "( export KAFKA_LOG4J_OPTS=\"%(log4j_param)s%(log4j)s\";" \ + cmd = "(export KAFKA_LOG4J_OPTS=\"%(log4j_param)s%(log4j)s\";" \ "%(kafka_run_class)s %(streams_class_name)s " \ "--bootstrap-server %(bootstrap.servers)s " \ "--force " \ @@ -641,7 +641,7 @@ def start_cmd(self, node): args['version'] = self.KAFKA_STREAMS_VERSION args['kafka_run_class'] = self.path.script("kafka-run-class.sh", node) - cmd = "( export KAFKA_LOG4J_OPTS=\"%(log4j_param)s%(log4j)s\";" \ + cmd = "( export KAFKA_LOG4J_OPTS=\"%(log4j_param)s%(log4j)s\"; " \ "INCLUDE_TEST_JARS=true UPGRADE_KAFKA_STREAMS_TEST_VERSION=%(version)s " \ " %(kafka_run_class)s %(streams_class_name)s %(config_file)s " \ " & echo $! >&3 ) 1>> %(stdout)s 2>> %(stderr)s 3> %(pidfile)s" % args @@ -742,7 +742,7 @@ def start_cmd(self, node): args['version'] = self.KAFKA_STREAMS_VERSION args['kafka_run_class'] = self.path.script("kafka-run-class.sh", node) - cmd = "( export KAFKA_LOG4J_OPTS=\"%(log4j_param)s%(log4j)s\";" \ + cmd = "( export KAFKA_LOG4J_OPTS=\"%(log4j_param)s%(log4j)s\"; " \ "INCLUDE_TEST_JARS=true UPGRADE_KAFKA_STREAMS_TEST_VERSION=%(version)s " \ " %(kafka_run_class)s %(streams_class_name)s %(config_file)s " \ " & echo $! >&3 ) 1>> %(stdout)s 2>> %(stderr)s 3> %(pidfile)s" % args diff --git a/tests/kafkatest/services/templates/connect_log4j2.yaml b/tests/kafkatest/services/templates/connect_log4j2.yaml index c96551e2eed53..71f9f0f39bd95 100644 --- a/tests/kafkatest/services/templates/connect_log4j2.yaml +++ b/tests/kafkatest/services/templates/connect_log4j2.yaml @@ -20,11 +20,6 @@ Configuration: value: "[%d] %p %m (%c)%n" Appenders: - Console: - name: STDOUT - PatternLayout: - pattern: "${logPattern}" - File: - name: FILE fileName: {{ log_file }} From 87fc72010aa1ec56ac5909af8957b07222e4035a Mon Sep 17 00:00:00 2001 From: frankvicky Date: Mon, 25 Nov 2024 17:56:53 +0800 Subject: [PATCH 41/46] KAFKA-9366: Update README and e2e log4j2.yaml --- README.md | 2 +- .../services/trogdor/templates/log4j2.yaml | 15 +++------------ 2 files changed, 4 insertions(+), 13 deletions(-) diff --git a/README.md b/README.md index 585356ea5e2a3..74b4c935452cb 100644 --- a/README.md +++ b/README.md @@ -55,7 +55,7 @@ Follow instructions in https://kafka.apache.org/quickstart By default, there will be only small number of logs output while testing. You can adjust it by changing the `log4j2.yml` file in the module's `src/test/resources` directory. For example, if you want to see more logs for clients project tests, you can modify [the line](https://github.com/apache/kafka/blob/trunk/clients/src/test/resources/log4j2.yml#L35) in `clients/src/test/resources/log4j2.yml` -to `log4j.logger.org.apache.kafka=INFO` and then run: +to `level: INFO` and then run: ./gradlew cleanTest clients:test --tests NetworkClientTest diff --git a/tests/kafkatest/services/trogdor/templates/log4j2.yaml b/tests/kafkatest/services/trogdor/templates/log4j2.yaml index b6367ff2774a6..3e92fead4a6b9 100644 --- a/tests/kafkatest/services/trogdor/templates/log4j2.yaml +++ b/tests/kafkatest/services/trogdor/templates/log4j2.yaml @@ -25,10 +25,9 @@ Configuration: PatternLayout: pattern: "${logPattern}" - RollingFile: + File: - name: FILE fileName: {{ log_file }} - filePattern: "${sys:kafka.logs.dir}/connect-%d{yyyy-MM-dd-HH}.log" PatternLayout: pattern: "${logPattern}" Loggers: @@ -36,21 +35,13 @@ Configuration: level: DEBUG AppenderRef: - ref: MyFileLogger + Logger: - name: kafka level: DEBUG - additivity: false - AppenderRef: - - ref: MyFileLogger - name: org.apache.kafka level: DEBUG - additivity: false - AppenderRef: - - ref: MyFileLogger - name: org.eclipse - level: DEBUG - additivity: false - AppenderRef: - - ref: MyFileLogger + level: INFO From 6d41bcd92758c7bc02c173b98dc4c53f88633ba0 Mon Sep 17 00:00:00 2001 From: frankvicky Date: Thu, 28 Nov 2024 00:02:39 +0800 Subject: [PATCH 42/46] KAFKA-9366: Fix the bugs of Loggers#currentLoggers --- .../src/main/java/org/apache/kafka/connect/runtime/Loggers.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/Loggers.java b/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/Loggers.java index fec90d02379c7..0c16d0d6f0160 100644 --- a/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/Loggers.java +++ b/connect/runtime/src/main/java/org/apache/kafka/connect/runtime/Loggers.java @@ -189,7 +189,7 @@ List currentLoggers() { .map(LoggerConfig::getName) .distinct() .map(LogManager::getLogger) - .collect(Collectors.toUnmodifiableList()); + .collect(Collectors.toCollection(ArrayList::new)); } // visible for testing From 064fe52b9ea66522b45d4f4af447ae4de43d71b2 Mon Sep 17 00:00:00 2001 From: frankvicky Date: Fri, 29 Nov 2024 11:26:52 +0800 Subject: [PATCH 43/46] KAFKA-9366: Update log4j2.yaml of e2e --- .../services/kafka/templates/log4j2.yaml | 80 +++++++++---------- tests/kafkatest/services/kafka/util.py | 2 +- .../performance/templates/tools_log4j2.yaml | 30 +++++++ .../services/templates/tools_log4j2.yaml | 4 +- .../services/trogdor/templates/log4j2.yaml | 9 +-- 5 files changed, 75 insertions(+), 50 deletions(-) create mode 100644 tests/kafkatest/services/performance/templates/tools_log4j2.yaml diff --git a/tests/kafkatest/services/kafka/templates/log4j2.yaml b/tests/kafkatest/services/kafka/templates/log4j2.yaml index 78f3961445b2c..22e3f118f680d 100644 --- a/tests/kafkatest/services/kafka/templates/log4j2.yaml +++ b/tests/kafkatest/services/kafka/templates/log4j2.yaml @@ -28,8 +28,8 @@ Configuration: RollingFile: - name: KafkaInfoAppender - fileName: "${sys:log_dir}/info/server.log" - filePattern: "${sys:log_dir}/info/server.log.%d{yyyy-MM-dd-HH}" + fileName: "${log_dir}/info/server.log" + filePattern: "${log_dir}/info/server.log.%d{yyyy-MM-dd-HH}" PatternLayout: pattern: "${logPattern}" TimeBasedTriggeringPolicy: @@ -39,21 +39,21 @@ Configuration: level: INFO onMatch: ACCEPT - - name: KafkaDebugAppender - fileName: "${sys:log_dir}/debug/server.log" - filePattern: "${sys:log_dir}/debug/server.log.%d{yyyy-MM-dd-HH}" + - name: StateChangeInfoAppender + fileName: "${log_dir}/info/state-change.log" + filePattern: "${log_dir}/info/state-change.log.%d{yyyy-MM-dd-HH}" PatternLayout: pattern: "${logPattern}" TimeBasedTriggeringPolicy: interval: 1 Filters: ThresholdFilter: - level: DEBUG + level: INFO onMatch: ACCEPT - - name: StateChangeInfoAppender - fileName: "${sys:log_dir}/info/state-change.log" - filePattern: "${sys:log_dir}/info/state-change.log.%d{yyyy-MM-dd-HH}" + - name: RequestInfoAppender + fileName: "${log_dir}/info/kafka-request.log" + filePattern: "${log_dir}/info/kafka-request.log.%d{yyyy-MM-dd-HH}" PatternLayout: pattern: "${logPattern}" TimeBasedTriggeringPolicy: @@ -63,21 +63,21 @@ Configuration: level: INFO onMatch: ACCEPT - - name: StateChangeDebugAppender - fileName: "${sys:log_dir}/debug/state-change.log" - filePattern: "${sys:log_dir}/debug/state-change.log.%d{yyyy-MM-dd-HH}" + - name: CleanerInfoAppender + fileName: "${log_dir}/info/log-cleaner.log" + filePattern: "${log_dir}/info/log-cleaner.log.%d{yyyy-MM-dd-HH}" PatternLayout: pattern: "${logPattern}" TimeBasedTriggeringPolicy: interval: 1 Filters: ThresholdFilter: - level: DEBUG + level: INFO onMatch: ACCEPT - - name: RequestInfoAppender - fileName: "${sys:log_dir}/info/kafka-request.log" - filePattern: "${sys:log_dir}/info/kafka-request.log.%d{yyyy-MM-dd-HH}" + - name: ControllerInfoAppender + fileName: "${log_dir}/info/controller.log" + filePattern: "${log_dir}/info/controller.log.%d{yyyy-MM-dd-HH}" PatternLayout: pattern: "${logPattern}" TimeBasedTriggeringPolicy: @@ -87,33 +87,33 @@ Configuration: level: INFO onMatch: ACCEPT - - name: RequestDebugAppender - fileName: "${sys:log_dir}/debug/kafka-request.log" - filePattern: "${sys:log_dir}/debug/kafka-request.log.%d{yyyy-MM-dd-HH}" + - name: AuthorizerInfoAppender + fileName: "${log_dir}/info/kafka-authorizer.log" + filePattern: "${log_dir}/info/kafka-authorizer.log.%d{yyyy-MM-dd-HH}" PatternLayout: pattern: "${logPattern}" TimeBasedTriggeringPolicy: interval: 1 Filters: ThresholdFilter: - level: DEBUG + level: INFO onMatch: ACCEPT - - name: CleanerInfoAppender - fileName: "${sys:log_dir}/info/log-cleaner.log" - filePattern: "${sys:log_dir}/info/log-cleaner.log.%d{yyyy-MM-dd-HH}" + - name: KafkaDebugAppender + fileName: "${log_dir}/debug/server.log" + filePattern: "${log_dir}/debug/server.log.%d{yyyy-MM-dd-HH}" PatternLayout: pattern: "${logPattern}" TimeBasedTriggeringPolicy: interval: 1 Filters: ThresholdFilter: - level: INFO + level: DEBUG onMatch: ACCEPT - - name: CleanerDebugAppender - fileName: "${sys:log_dir}/debug/log-cleaner.log" - filePattern: "${sys:log_dir}/debug/log-cleaner.log.%d{yyyy-MM-dd-HH}" + - name: StateChangeDebugAppender + fileName: "${log_dir}/debug/state-change.log" + filePattern: "${log_dir}/debug/state-change.log.%d{yyyy-MM-dd-HH}" PatternLayout: pattern: "${logPattern}" TimeBasedTriggeringPolicy: @@ -123,21 +123,21 @@ Configuration: level: DEBUG onMatch: ACCEPT - - name: ControllerInfoAppender - fileName: "${sys:log_dir}/info/controller.log" - filePattern: "${sys:log_dir}/info/controller.log.%d{yyyy-MM-dd-HH}" + - name: RequestDebugAppender + fileName: "${log_dir}/debug/kafka-request.log" + filePattern: "${log_dir}/debug/kafka-request.log.%d{yyyy-MM-dd-HH}" PatternLayout: pattern: "${logPattern}" TimeBasedTriggeringPolicy: interval: 1 Filters: ThresholdFilter: - level: INFO + level: DEBUG onMatch: ACCEPT - - name: ControllerDebugAppender - fileName: "${sys:log_dir}/debug/controller.log" - filePattern: "${sys:log_dir}/debug/controller.log.%d{yyyy-MM-dd-HH}" + - name: CleanerDebugAppender + fileName: "${log_dir}/debug/log-cleaner.log" + filePattern: "${log_dir}/debug/log-cleaner.log.%d{yyyy-MM-dd-HH}" PatternLayout: pattern: "${logPattern}" TimeBasedTriggeringPolicy: @@ -147,21 +147,21 @@ Configuration: level: DEBUG onMatch: ACCEPT - - name: AuthorizerInfoAppender - fileName: "${sys:log_dir}/info/kafka-authorizer.log" - filePattern: "${sys:log_dir}/info/kafka-authorizer.log.%d{yyyy-MM-dd-HH}" + - name: ControllerDebugAppender + fileName: "${log_dir}/debug/controller.log" + filePattern: "${log_dir}/debug/controller.log.%d{yyyy-MM-dd-HH}" PatternLayout: pattern: "${logPattern}" TimeBasedTriggeringPolicy: interval: 1 Filters: ThresholdFilter: - level: INFO + level: DEBUG onMatch: ACCEPT - name: AuthorizerDebugAppender - fileName: "${sys:log_dir}/debug/kafka-authorizer.log" - filePattern: "${sys:log_dir}/debug/kafka-authorizer.log.%d{yyyy-MM-dd-HH}" + fileName: "${log_dir}/debug/kafka-authorizer.log" + filePattern: "${log_dir}/debug/kafka-authorizer.log.%d{yyyy-MM-dd-HH}" PatternLayout: pattern: "${logPattern}" TimeBasedTriggeringPolicy: diff --git a/tests/kafkatest/services/kafka/util.py b/tests/kafkatest/services/kafka/util.py index 0ed53aae41a3a..0b6f5f41dd17b 100644 --- a/tests/kafkatest/services/kafka/util.py +++ b/tests/kafkatest/services/kafka/util.py @@ -32,7 +32,7 @@ def fix_opts_for_new_jvm(node): return "" def get_log4j_config_param(node): - return '-Dlog4j2.configurationFile=' if get_version(node) >= LATEST_4_0 else '-Dlog4j.configuration=file:' + return '-Dlog4j2.configurationFile=file:' if get_version(node) >= LATEST_4_0 else '-Dlog4j.configuration=file:' def get_log4j_config(node): return 'log4j2.yaml' if get_version(node) >= LATEST_4_0 else 'log4j.properties' diff --git a/tests/kafkatest/services/performance/templates/tools_log4j2.yaml b/tests/kafkatest/services/performance/templates/tools_log4j2.yaml new file mode 100644 index 0000000000000..5c5e1099f94fd --- /dev/null +++ b/tests/kafkatest/services/performance/templates/tools_log4j2.yaml @@ -0,0 +1,30 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +Configuration: + Appenders: + File: + name: FILE + fileName: {{ log_file }} + append: true + immediateFlush: true + PatternLayout: + pattern: "[%d] %p %m (%c)%n" + + Loggers: + Root: + level: {{ log_level|default("INFO") }} + AppenderRef: + - ref: FILE diff --git a/tests/kafkatest/services/templates/tools_log4j2.yaml b/tests/kafkatest/services/templates/tools_log4j2.yaml index 35ca4ed7bedd3..2f41025d4850d 100644 --- a/tests/kafkatest/services/templates/tools_log4j2.yaml +++ b/tests/kafkatest/services/templates/tools_log4j2.yaml @@ -29,11 +29,11 @@ Configuration: AppenderRef: - ref: FILE + {% if loggers is defined %} Logger: # Add additional loggers dynamically if defined - {% if loggers is defined %} {% for logger, log_level in loggers.items() %} - name: {{ logger }} level: {{ log_level }} {% endfor %} - {% endif %} \ No newline at end of file + {% endif %} \ No newline at end of file diff --git a/tests/kafkatest/services/trogdor/templates/log4j2.yaml b/tests/kafkatest/services/trogdor/templates/log4j2.yaml index 3e92fead4a6b9..42c1aa281e783 100644 --- a/tests/kafkatest/services/trogdor/templates/log4j2.yaml +++ b/tests/kafkatest/services/trogdor/templates/log4j2.yaml @@ -20,14 +20,9 @@ Configuration: value: "[%d] %p %m (%c)%n" Appenders: - Console: - name: STDOUT - PatternLayout: - pattern: "${logPattern}" - File: - - name: FILE - fileName: {{ log_file }} + - name: MyFileLogger + fileName: {{ log_path }} PatternLayout: pattern: "${logPattern}" Loggers: From 762a54d143369fe291f25700cabd3242f4ff05c5 Mon Sep 17 00:00:00 2001 From: frankvicky Date: Sat, 30 Nov 2024 09:11:22 +0800 Subject: [PATCH 44/46] KAFKA-9366: replace tools and trogdor log4j.properties with log4j2.yaml --- .../{log4j.properties => log4j2.yaml} | 33 ++++++++++++++----- .../{log4j.properties => log4j2.yaml} | 33 ++++++++++++++----- 2 files changed, 50 insertions(+), 16 deletions(-) rename tools/src/test/resources/{log4j.properties => log4j2.yaml} (55%) rename trogdor/src/test/resources/{log4j.properties => log4j2.yaml} (55%) diff --git a/tools/src/test/resources/log4j.properties b/tools/src/test/resources/log4j2.yaml similarity index 55% rename from tools/src/test/resources/log4j.properties rename to tools/src/test/resources/log4j2.yaml index 3aca07dc53016..aef7e5616287f 100644 --- a/tools/src/test/resources/log4j.properties +++ b/tools/src/test/resources/log4j2.yaml @@ -1,9 +1,9 @@ # Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with +# contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at +# the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # @@ -12,11 +12,28 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -log4j.rootLogger=INFO, stdout -log4j.appender.stdout=org.apache.log4j.ConsoleAppender -log4j.appender.stdout.layout=org.apache.log4j.PatternLayout -log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c:%L)%n +Configuration: + Properties: + Property: + - name: "logPattern" + value: "[%d] %p %m (%c:%L)%n" -log4j.logger.org.apache.kafka=INFO -log4j.logger.org.eclipse.jetty=INFO + Appenders: + Console: + name: STDOUT + PatternLayout: + pattern: "${logPattern}" + + Loggers: + Root: + level: INFO + AppenderRef: + - ref: STDOUT + + Logger: + - name: org.apache.kafka + level: ERROR + + - name: org.eclipse.jetty + level: ERROR diff --git a/trogdor/src/test/resources/log4j.properties b/trogdor/src/test/resources/log4j2.yaml similarity index 55% rename from trogdor/src/test/resources/log4j.properties rename to trogdor/src/test/resources/log4j2.yaml index 5291604d49ae5..4c3355e307e86 100644 --- a/trogdor/src/test/resources/log4j.properties +++ b/trogdor/src/test/resources/log4j2.yaml @@ -1,9 +1,9 @@ # Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with +# contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at +# the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # @@ -12,11 +12,28 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -log4j.rootLogger=TRACE, stdout -log4j.appender.stdout=org.apache.log4j.ConsoleAppender -log4j.appender.stdout.layout=org.apache.log4j.PatternLayout -log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c:%L)%n +Configuration: + Properties: + Property: + - name: "logPattern" + value: "[%d] %p %m (%c:%L)%n" -log4j.logger.org.apache.kafka=INFO -log4j.logger.org.eclipse.jetty=INFO + Appenders: + Console: + name: STDOUT + PatternLayout: + pattern: "${logPattern}" + + Loggers: + Root: + level: TRACE + AppenderRef: + - ref: STDOUT + + Logger: + - name: org.apache.kafka + level: ERROR + + - name: org.eclipse.jetty + level: ERROR From 3eb5522c3bd054e9ba78b81e08c8b0a06525774e Mon Sep 17 00:00:00 2001 From: frankvicky Date: Sat, 30 Nov 2024 10:29:39 +0800 Subject: [PATCH 45/46] KAFKA-9366: render log4j config according to node version --- tests/kafkatest/services/connect.py | 9 ++++----- tests/kafkatest/services/console_consumer.py | 7 +++---- tests/kafkatest/services/kafka/kafka.py | 8 ++++---- tests/kafkatest/services/kafka/util.py | 9 +++++++++ .../performance/consumer_performance.py | 5 ++--- .../services/performance/end_to_end_latency.py | 5 ++--- .../performance/producer_performance.py | 5 ++--- tests/kafkatest/services/streams.py | 17 ++++++++--------- .../services/transactional_message_copier.py | 5 ++--- tests/kafkatest/services/trogdor/trogdor.py | 13 ++++++------- tests/kafkatest/services/verifiable_consumer.py | 5 ++--- tests/kafkatest/services/verifiable_producer.py | 5 ++--- .../streams/streams_relational_smoke_test.py | 6 +++--- 13 files changed, 49 insertions(+), 50 deletions(-) diff --git a/tests/kafkatest/services/connect.py b/tests/kafkatest/services/connect.py index deb7cf0c7d761..8fc9d361bb8e3 100644 --- a/tests/kafkatest/services/connect.py +++ b/tests/kafkatest/services/connect.py @@ -38,7 +38,6 @@ class ConnectServiceBase(KafkaPathResolverMixin, Service): LOG_FILE = os.path.join(PERSISTENT_ROOT, "connect.log") STDOUT_FILE = os.path.join(PERSISTENT_ROOT, "connect.stdout") STDERR_FILE = os.path.join(PERSISTENT_ROOT, "connect.stderr") - LOG4J_CONFIG_FILE = os.path.join(PERSISTENT_ROOT, "connect-log4j.properties") PID_FILE = os.path.join(PERSISTENT_ROOT, "connect.pid") EXTERNAL_CONFIGS_FILE = os.path.join(PERSISTENT_ROOT, "connect-external-configs.properties") CONNECT_REST_PORT = 8083 @@ -340,7 +339,7 @@ def node(self): return self.nodes[0] def start_cmd(self, node, connector_configs): - cmd = "( export KAFKA_LOG4J_OPTS=\"%s%s\"; " % (get_log4j_config_param(node), self.LOG4J_CONFIG_FILE) + cmd = "( export KAFKA_LOG4J_OPTS=\"%s%s\"; " % (get_log4j_config_param(node), get_log4j_config_for_connect(node)) heap_kafka_opts = "-XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=%s" % \ self.logs["connect_heap_dump_file"]["path"] other_kafka_opts = self.security_config.kafka_opts.strip('\"') @@ -364,7 +363,7 @@ def start_node(self, node, **kwargs): if self.external_config_template_func: node.account.create_file(self.EXTERNAL_CONFIGS_FILE, self.external_config_template_func(node)) node.account.create_file(self.CONFIG_FILE, self.config_template_func(node)) - node.account.create_file(self.LOG4J_CONFIG_FILE, self.render(get_log4j_config_for_connect(node), log_file=self.LOG_FILE)) + node.account.create_file(get_log4j_config_for_connect(node), self.render(get_log4j_config_for_connect(node), log_file=self.LOG_FILE)) remote_connector_configs = [] for idx, template in enumerate(self.connector_config_templates): target_file = os.path.join(self.PERSISTENT_ROOT, "connect-connector-" + str(idx) + ".properties") @@ -400,7 +399,7 @@ def __init__(self, context, num_nodes, kafka, files, offsets_topic="connect-offs # connector_configs argument is intentionally ignored in distributed service. def start_cmd(self, node, connector_configs): - cmd = "( export KAFKA_LOG4J_OPTS=\"%s%s\"; " % (get_log4j_config_param(node), self.LOG4J_CONFIG_FILE) + cmd = "( export KAFKA_LOG4J_OPTS=\"%s%s\"; " % (get_log4j_config_param(node), get_log4j_config_for_connect(node)) heap_kafka_opts = "-XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=%s" % \ self.logs["connect_heap_dump_file"]["path"] other_kafka_opts = self.security_config.kafka_opts.strip('\"') @@ -421,7 +420,7 @@ def start_node(self, node, **kwargs): if self.external_config_template_func: node.account.create_file(self.EXTERNAL_CONFIGS_FILE, self.external_config_template_func(node)) node.account.create_file(self.CONFIG_FILE, self.config_template_func(node)) - node.account.create_file(self.LOG4J_CONFIG_FILE, self.render(get_log4j_config_for_connect(node), log_file=self.LOG_FILE)) + node.account.create_file(get_log4j_config_for_connect(node), self.render(get_log4j_config_for_connect(node), log_file=self.LOG_FILE)) if self.connector_config_templates: raise DucktapeError("Config files are not valid in distributed mode, submit connectors via the REST API") diff --git a/tests/kafkatest/services/console_consumer.py b/tests/kafkatest/services/console_consumer.py index 5035c07728983..9755faa19696d 100644 --- a/tests/kafkatest/services/console_consumer.py +++ b/tests/kafkatest/services/console_consumer.py @@ -21,7 +21,7 @@ from kafkatest.directory_layout.kafka_path import KafkaPathResolverMixin from kafkatest.services.monitor.jmx import JmxMixin, JmxTool -from kafkatest.version import DEV_BRANCH, LATEST_3_7 +from kafkatest.version import DEV_BRANCH, LATEST_3_7, get_version, LATEST_4_0 from kafkatest.services.kafka.util import fix_opts_for_new_jvm, get_log4j_config_param, get_log4j_config_for_tools """ @@ -36,7 +36,6 @@ class ConsoleConsumer(KafkaPathResolverMixin, JmxMixin, BackgroundThreadService) STDERR_CAPTURE = os.path.join(PERSISTENT_ROOT, "console_consumer.stderr") LOG_DIR = os.path.join(PERSISTENT_ROOT, "logs") LOG_FILE = os.path.join(LOG_DIR, "console_consumer.log") - LOG4J_CONFIG = os.path.join(PERSISTENT_ROOT, "tools-log4j.properties") CONFIG_FILE = os.path.join(PERSISTENT_ROOT, "console_consumer.properties") JMX_TOOL_LOG = os.path.join(PERSISTENT_ROOT, "jmx_tool.log") JMX_TOOL_ERROR_LOG = os.path.join(PERSISTENT_ROOT, "jmx_tool.err.log") @@ -147,7 +146,7 @@ def start_cmd(self, node): args['stderr'] = ConsoleConsumer.STDERR_CAPTURE args['log_dir'] = ConsoleConsumer.LOG_DIR args['log4j_param'] = get_log4j_config_param(node) - args['log4j_config'] = ConsoleConsumer.LOG4J_CONFIG + args['log4j_config'] = get_log4j_config_for_tools(node) args['config_file'] = ConsoleConsumer.CONFIG_FILE args['stdout'] = ConsoleConsumer.STDOUT_CAPTURE args['jmx_port'] = self.jmx_port @@ -228,7 +227,7 @@ def _worker(self, idx, node): # Create and upload log properties log_config = self.render(get_log4j_config_for_tools(node), log_file=ConsoleConsumer.LOG_FILE) - node.account.create_file(ConsoleConsumer.LOG4J_CONFIG, log_config) + node.account.create_file(get_log4j_config_for_tools(node), log_config) # Run and capture output cmd = self.start_cmd(node) diff --git a/tests/kafkatest/services/kafka/kafka.py b/tests/kafkatest/services/kafka/kafka.py index 6c92aec9d28b7..e0d7febc19109 100644 --- a/tests/kafkatest/services/kafka/kafka.py +++ b/tests/kafkatest/services/kafka/kafka.py @@ -33,7 +33,8 @@ from kafkatest.services.security.security_config import SecurityConfig from kafkatest.version import DEV_BRANCH from kafkatest.version import KafkaVersion -from kafkatest.services.kafka.util import fix_opts_for_new_jvm, get_log4j_config_param, get_log4j_config +from kafkatest.services.kafka.util import fix_opts_for_new_jvm, get_log4j_config_param, get_log4j_config, \ + get_log4j_config_for_kafka class KafkaListener: @@ -145,7 +146,6 @@ class for details. """ PERSISTENT_ROOT = "/mnt/kafka" STDOUT_STDERR_CAPTURE = os.path.join(PERSISTENT_ROOT, "server-start-stdout-stderr.log") - LOG4J_CONFIG = os.path.join(PERSISTENT_ROOT, "kafka-log4j.properties") # Logs such as controller.log, server.log, etc all go here OPERATIONAL_LOG_DIR = os.path.join(PERSISTENT_ROOT, "kafka-operational-logs") OPERATIONAL_LOG_INFO_DIR = os.path.join(OPERATIONAL_LOG_DIR, "info") @@ -805,7 +805,7 @@ def start_cmd(self, node): kafka_mode = self.context.globals.get("kafka_mode", "") cmd = f"export KAFKA_MODE={kafka_mode}; " cmd += "export JMX_PORT=%d; " % self.jmx_port - cmd += "export KAFKA_LOG4J_OPTS=\"%s%s\"; " % (get_log4j_config_param(node), self.LOG4J_CONFIG) + cmd += "export KAFKA_LOG4J_OPTS=\"%s%s\"; " % (get_log4j_config_param(node), get_log4j_config_for_kafka(node)) heap_kafka_opts = "-XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=%s" % \ self.logs["kafka_heap_dump_file"]["path"] security_kafka_opts = self.security_config.kafka_opts.strip('\"') @@ -874,7 +874,7 @@ def start_node(self, node, timeout_sec=60, **kwargs): self.logger.info("kafka.properties:") self.logger.info(prop_file) node.account.create_file(KafkaService.CONFIG_FILE, prop_file) - node.account.create_file(self.LOG4J_CONFIG, self.render(get_log4j_config(node), log_dir=KafkaService.OPERATIONAL_LOG_DIR)) + node.account.create_file(get_log4j_config_for_kafka(node), self.render(get_log4j_config(node), log_dir=KafkaService.OPERATIONAL_LOG_DIR)) if self.quorum_info.using_kraft: # format log directories if necessary diff --git a/tests/kafkatest/services/kafka/util.py b/tests/kafkatest/services/kafka/util.py index 0b6f5f41dd17b..695e5d62189e8 100644 --- a/tests/kafkatest/services/kafka/util.py +++ b/tests/kafkatest/services/kafka/util.py @@ -37,8 +37,17 @@ def get_log4j_config_param(node): def get_log4j_config(node): return 'log4j2.yaml' if get_version(node) >= LATEST_4_0 else 'log4j.properties' +def get_log4j_config_for_kafka(node): + return 'kafka_log4j2.yaml' if get_version(node) >= LATEST_4_0 else 'kafka_log4j.properties' + def get_log4j_config_for_connect(node): return 'connect_log4j2.yaml' if get_version(node) >= LATEST_4_0 else 'connect_log4j.properties' def get_log4j_config_for_tools(node): return 'tools_log4j2.yaml' if get_version(node) >= LATEST_4_0 else 'tools_log4j.properties' + +def get_log4j_config_for_trogdor_coordinator(node): + return 'trogdor-coordinator-log4j2.yaml' if get_version(node) >= LATEST_4_0 else 'trogdor-coordinator-log4j.properties' + +def get_log4j_config_for_trogdor_agent(node): + return 'trogdor-agent-log4j2.yaml' if get_version(node) >= LATEST_4_0 else 'trogdor-agent-log4j.properties' diff --git a/tests/kafkatest/services/performance/consumer_performance.py b/tests/kafkatest/services/performance/consumer_performance.py index 7ad4f1ff0eef8..28086e8281887 100644 --- a/tests/kafkatest/services/performance/consumer_performance.py +++ b/tests/kafkatest/services/performance/consumer_performance.py @@ -49,7 +49,6 @@ class ConsumerPerformanceService(PerformanceService): STDOUT_CAPTURE = os.path.join(PERSISTENT_ROOT, "consumer_performance.stdout") STDERR_CAPTURE = os.path.join(PERSISTENT_ROOT, "consumer_performance.stderr") LOG_FILE = os.path.join(LOG_DIR, "consumer_performance.log") - LOG4J_CONFIG = os.path.join(PERSISTENT_ROOT, "tools-log4j.properties") CONFIG_FILE = os.path.join(PERSISTENT_ROOT, "consumer.properties") logs = { @@ -111,7 +110,7 @@ def start_cmd(self, node): cmd = fix_opts_for_new_jvm(node) cmd += "export LOG_DIR=%s;" % ConsumerPerformanceService.LOG_DIR cmd += " export KAFKA_OPTS=%s;" % self.security_config.kafka_opts - cmd += " export KAFKA_LOG4J_OPTS=\"%s%s\";" % (get_log4j_config_param(node), ConsumerPerformanceService.LOG4J_CONFIG) + cmd += " export KAFKA_LOG4J_OPTS=\"%s%s\";" % (get_log4j_config_param(node), get_log4j_config_for_tools(node)) cmd += " %s" % self.path.script("kafka-consumer-perf-test.sh", node) for key, value in self.args(node.version).items(): cmd += " --%s %s" % (key, value) @@ -129,7 +128,7 @@ def _worker(self, idx, node): node.account.ssh("mkdir -p %s" % ConsumerPerformanceService.PERSISTENT_ROOT, allow_fail=False) log_config = self.render(get_log4j_config_for_tools(node), log_file=ConsumerPerformanceService.LOG_FILE) - node.account.create_file(ConsumerPerformanceService.LOG4J_CONFIG, log_config) + node.account.create_file(get_log4j_config_for_tools(node), log_config) node.account.create_file(ConsumerPerformanceService.CONFIG_FILE, str(self.security_config)) self.security_config.setup_node(node) diff --git a/tests/kafkatest/services/performance/end_to_end_latency.py b/tests/kafkatest/services/performance/end_to_end_latency.py index 6d5312a09b21a..1591555770594 100644 --- a/tests/kafkatest/services/performance/end_to_end_latency.py +++ b/tests/kafkatest/services/performance/end_to_end_latency.py @@ -30,7 +30,6 @@ class EndToEndLatencyService(PerformanceService): STDOUT_CAPTURE = os.path.join(PERSISTENT_ROOT, "end_to_end_latency.stdout") STDERR_CAPTURE = os.path.join(PERSISTENT_ROOT, "end_to_end_latency.stderr") LOG_FILE = os.path.join(LOG_DIR, "end_to_end_latency.log") - LOG4J_CONFIG = os.path.join(PERSISTENT_ROOT, "tools-log4j.properties") CONFIG_FILE = os.path.join(PERSISTENT_ROOT, "client.properties") logs = { @@ -75,7 +74,7 @@ def start_cmd(self, node): }) cmd = fix_opts_for_new_jvm(node) - cmd += "export KAFKA_LOG4J_OPTS=\"%s%s\"; " % (get_log4j_config_param(node), EndToEndLatencyService.LOG4J_CONFIG) + cmd += "export KAFKA_LOG4J_OPTS=\"%s%s\"; " % (get_log4j_config_param(node), get_log4j_config_for_tools(node)) cmd += "KAFKA_OPTS=%(kafka_opts)s %(kafka_run_class)s %(java_class_name)s " % args cmd += "%(bootstrap_servers)s %(topic)s %(num_records)d %(acks)d %(message_bytes)d %(config_file)s" % args @@ -89,7 +88,7 @@ def _worker(self, idx, node): log_config = self.render(get_log4j_config_for_tools(node), log_file=EndToEndLatencyService.LOG_FILE) - node.account.create_file(EndToEndLatencyService.LOG4J_CONFIG, log_config) + node.account.create_file(get_log4j_config_for_tools(node), log_config) client_config = str(self.security_config) client_config += "compression_type=%(compression_type)s" % self.args node.account.create_file(EndToEndLatencyService.CONFIG_FILE, client_config) diff --git a/tests/kafkatest/services/performance/producer_performance.py b/tests/kafkatest/services/performance/producer_performance.py index e11e6412b6c9c..acfe4790d731c 100644 --- a/tests/kafkatest/services/performance/producer_performance.py +++ b/tests/kafkatest/services/performance/producer_performance.py @@ -33,7 +33,6 @@ class ProducerPerformanceService(HttpMetricsCollector, PerformanceService): STDERR_CAPTURE = os.path.join(PERSISTENT_ROOT, "producer_performance.stderr") LOG_DIR = os.path.join(PERSISTENT_ROOT, "logs") LOG_FILE = os.path.join(LOG_DIR, "producer_performance.log") - LOG4J_CONFIG = os.path.join(PERSISTENT_ROOT, "tools-log4j.properties") def __init__(self, context, num_nodes, kafka, topic, num_records, record_size, throughput, version=DEV_BRANCH, settings=None, intermediate_stats=False, client_id="producer-performance"): @@ -90,7 +89,7 @@ def start_cmd(self, node): cmd += "for file in %s; do CLASSPATH=$CLASSPATH:$file; done; " % jar cmd += "export CLASSPATH; " - cmd += " export KAFKA_LOG4J_OPTS=\"%s%s\"; " % (get_log4j_config_param(node), ProducerPerformanceService.LOG4J_CONFIG) + cmd += " export KAFKA_LOG4J_OPTS=\"%s%s\"; " % (get_log4j_config_param(node), get_log4j_config_for_tools(node)) cmd += "KAFKA_OPTS=%(kafka_opts)s KAFKA_HEAP_OPTS=\"-XX:+HeapDumpOnOutOfMemoryError\" %(kafka_run_class)s org.apache.kafka.tools.ProducerPerformance " \ "--topic %(topic)s --num-records %(num_records)d --record-size %(record_size)d --throughput %(throughput)d --producer-props bootstrap.servers=%(bootstrap_servers)s client.id=%(client_id)s %(metrics_props)s" % args @@ -120,7 +119,7 @@ def _worker(self, idx, node): # Create and upload log properties log_config = self.render(get_log4j_config_for_tools(node), log_file=ProducerPerformanceService.LOG_FILE) - node.account.create_file(ProducerPerformanceService.LOG4J_CONFIG, log_config) + node.account.create_file(get_log4j_config_for_tools(node), log_config) cmd = self.start_cmd(node) self.logger.debug("Producer performance %d command: %s", idx, cmd) diff --git a/tests/kafkatest/services/streams.py b/tests/kafkatest/services/streams.py index d6d6b1708130b..df8a0b3923073 100644 --- a/tests/kafkatest/services/streams.py +++ b/tests/kafkatest/services/streams.py @@ -38,7 +38,6 @@ class StreamsTestBaseService(KafkaPathResolverMixin, JmxMixin, Service): STDERR_FILE = os.path.join(PERSISTENT_ROOT, "streams.stderr") JMX_LOG_FILE = os.path.join(PERSISTENT_ROOT, "jmx_tool.log") JMX_ERR_FILE = os.path.join(PERSISTENT_ROOT, "jmx_tool.err.log") - LOG4J_CONFIG_FILE = os.path.join(PERSISTENT_ROOT, "tools-log4j.properties") PID_FILE = os.path.join(PERSISTENT_ROOT, "streams.pid") CLEAN_NODE_ENABLED = True @@ -287,7 +286,7 @@ def start_cmd(self, node): args['stderr'] = self.STDERR_FILE args['pidfile'] = self.PID_FILE args['log4j_param'] = get_log4j_config_param(node) - args['log4j'] = self.LOG4J_CONFIG_FILE + args['log4j'] = get_log4j_config_for_tools(node) args['kafka_run_class'] = self.path.script("kafka-run-class.sh", node) cmd = "( export KAFKA_LOG4J_OPTS=\"%(log4j_param)s%(log4j)s\"; " \ @@ -307,7 +306,7 @@ def start_node(self, node): node.account.mkdirs(self.PERSISTENT_ROOT) prop_file = self.prop_file() node.account.create_file(self.CONFIG_FILE, prop_file) - node.account.create_file(self.LOG4J_CONFIG_FILE, self.render(get_log4j_config_for_tools(node), log_file=self.LOG_FILE)) + node.account.create_file(get_log4j_config_for_tools(node), self.render(get_log4j_config_for_tools(node), log_file=self.LOG_FILE)) self.logger.info("Starting StreamsTest process on " + str(node.account)) with node.account.monitor_log(self.STDOUT_FILE) as monitor: @@ -366,7 +365,7 @@ def start_cmd(self, node): args['stderr'] = self.STDERR_FILE args['pidfile'] = self.PID_FILE args['log4j_param'] = get_log4j_config_param(node) - args['log4j'] = self.LOG4J_CONFIG_FILE + args['log4j'] = get_log4j_config_for_tools(node) args['version'] = self.KAFKA_STREAMS_VERSION args['kafka_run_class'] = self.path.script("kafka-run-class.sh", node) @@ -423,7 +422,7 @@ def start_cmd(self, node): args['stderr'] = self.STDERR_FILE args['pidfile'] = self.PID_FILE args['log4j_param'] = get_log4j_config_param(node) - args['log4j'] = self.LOG4J_CONFIG_FILE + args['log4j'] = get_log4j_config_for_tools(node) args['disable_auto_terminate'] = self.DISABLE_AUTO_TERMINATE args['kafka_run_class'] = self.path.script("kafka-run-class.sh", node) @@ -501,7 +500,7 @@ def start_cmd(self, node): args['stderr'] = self.STDERR_FILE args['pidfile'] = self.PID_FILE args['log4j_param'] = get_log4j_config_param(node) - args['log4j'] = self.LOG4J_CONFIG_FILE + args['log4j'] = get_log4j_config_for_tools(node) args['kafka_run_class'] = self.path.script("kafka-run-class.sh", node) cmd = "( export KAFKA_LOG4J_OPTS=\"%(log4j_param)s%(log4j)s\"; " \ @@ -541,7 +540,7 @@ def start_cmd(self, node): args['stderr'] = self.STDERR_FILE args['pidfile'] = self.PID_FILE args['log4j_param'] = get_log4j_config_param(node) - args['log4j'] = self.LOG4J_CONFIG_FILE + args['log4j'] = get_log4j_config_for_tools(node) args['application.id'] = self.applicationId args['input.topics'] = self.topic args['kafka_run_class'] = self.path.script("kafka-run-class.sh", node) @@ -637,7 +636,7 @@ def start_cmd(self, node): args['stderr'] = self.STDERR_FILE args['pidfile'] = self.PID_FILE args['log4j_param'] = get_log4j_config_param(node) - args['log4j'] = self.LOG4J_CONFIG_FILE + args['log4j'] = get_log4j_config_for_tools(node) args['version'] = self.KAFKA_STREAMS_VERSION args['kafka_run_class'] = self.path.script("kafka-run-class.sh", node) @@ -738,7 +737,7 @@ def start_cmd(self, node): args['stderr'] = self.STDERR_FILE args['pidfile'] = self.PID_FILE args['log4j_param'] = get_log4j_config_param(node) - args['log4j'] = self.LOG4J_CONFIG_FILE + args['log4j'] = get_log4j_config_for_tools(node) args['version'] = self.KAFKA_STREAMS_VERSION args['kafka_run_class'] = self.path.script("kafka-run-class.sh", node) diff --git a/tests/kafkatest/services/transactional_message_copier.py b/tests/kafkatest/services/transactional_message_copier.py index 75646dad52ed1..d1f918cd8e115 100644 --- a/tests/kafkatest/services/transactional_message_copier.py +++ b/tests/kafkatest/services/transactional_message_copier.py @@ -34,7 +34,6 @@ class TransactionalMessageCopier(KafkaPathResolverMixin, BackgroundThreadService STDERR_CAPTURE = os.path.join(PERSISTENT_ROOT, "transactional_message_copier.stderr") LOG_DIR = os.path.join(PERSISTENT_ROOT, "logs") LOG_FILE = os.path.join(LOG_DIR, "transactional_message_copier.log") - LOG4J_CONFIG = os.path.join(PERSISTENT_ROOT, "tools-log4j.properties") logs = { "transactional_message_copier_stdout": { @@ -80,7 +79,7 @@ def _worker(self, idx, node): # Create and upload log properties log_config = self.render(get_log4j_config_for_tools(node), log_file=TransactionalMessageCopier.LOG_FILE) - node.account.create_file(TransactionalMessageCopier.LOG4J_CONFIG, log_config) + node.account.create_file(get_log4j_config_for_tools(node).LOG4J_CONFIG, log_config) # Configure security self.security_config = self.kafka.security_config.client_config(node=node) self.security_config.setup_node(node) @@ -117,7 +116,7 @@ def _worker(self, idx, node): def start_cmd(self, node, idx): cmd = "export LOG_DIR=%s;" % TransactionalMessageCopier.LOG_DIR cmd += " export KAFKA_OPTS=%s;" % self.security_config.kafka_opts - cmd += " export KAFKA_LOG4J_OPTS=\"%s%s\"; " % (get_log4j_config_param(node), TransactionalMessageCopier.LOG4J_CONFIG) + cmd += " export KAFKA_LOG4J_OPTS=\"%s%s\"; " % (get_log4j_config_param(node), get_log4j_config_for_tools(node)) cmd += self.path.script("kafka-run-class.sh", node) + " org.apache.kafka.tools." + "TransactionalMessageCopier" cmd += " --broker-list %s" % self.kafka.bootstrap_servers(self.security_config.security_protocol) cmd += " --transactional-id %s" % self.transactional_id diff --git a/tests/kafkatest/services/trogdor/trogdor.py b/tests/kafkatest/services/trogdor/trogdor.py index 3a54b663f02de..618c68d78513a 100644 --- a/tests/kafkatest/services/trogdor/trogdor.py +++ b/tests/kafkatest/services/trogdor/trogdor.py @@ -22,7 +22,8 @@ from ducktape.services.service import Service from ducktape.utils.util import wait_until from kafkatest.directory_layout.kafka_path import KafkaPathResolverMixin -from kafkatest.services.kafka.util import get_log4j_config_param, get_log4j_config +from kafkatest.services.kafka.util import get_log4j_config_param, get_log4j_config, \ + get_log4j_config_for_trogdor_coordinator, get_log4j_config_for_trogdor_agent class TrogdorService(KafkaPathResolverMixin, Service): @@ -49,8 +50,6 @@ class TrogdorService(KafkaPathResolverMixin, Service): AGENT_STDOUT_STDERR = os.path.join(PERSISTENT_ROOT, "trogdor-agent-stdout-stderr.log") COORDINATOR_LOG = os.path.join(PERSISTENT_ROOT, "trogdor-coordinator.log") AGENT_LOG = os.path.join(PERSISTENT_ROOT, "trogdor-agent.log") - COORDINATOR_LOG4J_PROPERTIES = os.path.join(PERSISTENT_ROOT, "trogdor-coordinator-log4j.properties") - AGENT_LOG4J_PROPERTIES = os.path.join(PERSISTENT_ROOT, "trogdor-agent-log4j.properties") CONFIG_PATH = os.path.join(PERSISTENT_ROOT, "trogdor.conf") DEFAULT_AGENT_PORT=8888 DEFAULT_COORDINATOR_PORT=8889 @@ -142,20 +141,20 @@ def start_node(self, node): self._start_agent_node(node) def _start_coordinator_node(self, node): - node.account.create_file(TrogdorService.COORDINATOR_LOG4J_PROPERTIES, + node.account.create_file(get_log4j_config_for_trogdor_coordinator(node), self.render(get_log4j_config(node), log_path=TrogdorService.COORDINATOR_LOG)) self._start_trogdor_daemon("coordinator", TrogdorService.COORDINATOR_STDOUT_STDERR, - TrogdorService.COORDINATOR_LOG4J_PROPERTIES, + get_log4j_config_for_trogdor_coordinator(node), TrogdorService.COORDINATOR_LOG, node) self.logger.info("Started trogdor coordinator on %s." % node.name) def _start_agent_node(self, node): - node.account.create_file(TrogdorService.AGENT_LOG4J_PROPERTIES, + node.account.create_file(get_log4j_config_for_trogdor_agent(node), self.render(get_log4j_config(node), log_path=TrogdorService.AGENT_LOG)) self._start_trogdor_daemon("agent", TrogdorService.AGENT_STDOUT_STDERR, - TrogdorService.AGENT_LOG4J_PROPERTIES, + get_log4j_config_for_trogdor_agent(node), TrogdorService.AGENT_LOG, node) self.logger.info("Started trogdor agent on %s." % node.name) diff --git a/tests/kafkatest/services/verifiable_consumer.py b/tests/kafkatest/services/verifiable_consumer.py index 101e507f7f564..04500283d7831 100644 --- a/tests/kafkatest/services/verifiable_consumer.py +++ b/tests/kafkatest/services/verifiable_consumer.py @@ -216,7 +216,6 @@ class VerifiableConsumer(KafkaPathResolverMixin, VerifiableClientMixin, Backgrou STDERR_CAPTURE = os.path.join(PERSISTENT_ROOT, "verifiable_consumer.stderr") LOG_DIR = os.path.join(PERSISTENT_ROOT, "logs") LOG_FILE = os.path.join(LOG_DIR, "verifiable_consumer.log") - LOG4J_CONFIG = os.path.join(PERSISTENT_ROOT, "tools-log4j.properties") CONFIG_FILE = os.path.join(PERSISTENT_ROOT, "verifiable_consumer.properties") logs = { @@ -300,7 +299,7 @@ def _worker(self, idx, node): # Create and upload log properties log_config = self.render(get_log4j_config_for_tools(node), log_file=VerifiableConsumer.LOG_FILE) - node.account.create_file(VerifiableConsumer.LOG4J_CONFIG, log_config) + node.account.create_file(get_log4j_config_for_tools(node), log_config) # Create and upload config file self.security_config = self.kafka.security_config.client_config(self.prop_file, node, @@ -383,7 +382,7 @@ def start_cmd(self, node): cmd = "" cmd += "export LOG_DIR=%s;" % VerifiableConsumer.LOG_DIR cmd += " export KAFKA_OPTS=%s;" % self.security_config.kafka_opts - cmd += " export KAFKA_LOG4J_OPTS=\"%s%s\"; " % (get_log4j_config_param(node), VerifiableConsumer.LOG4J_CONFIG) + cmd += " export KAFKA_LOG4J_OPTS=\"%s%s\"; " % (get_log4j_config_param(node), get_log4j_config_for_tools(node)) cmd += self.impl.exec_cmd(node) if self.on_record_consumed: cmd += " --verbose" diff --git a/tests/kafkatest/services/verifiable_producer.py b/tests/kafkatest/services/verifiable_producer.py index d7a12c91f171d..6f473d8bb13c0 100644 --- a/tests/kafkatest/services/verifiable_producer.py +++ b/tests/kafkatest/services/verifiable_producer.py @@ -41,7 +41,6 @@ class VerifiableProducer(KafkaPathResolverMixin, VerifiableClientMixin, Backgrou STDERR_CAPTURE = os.path.join(PERSISTENT_ROOT, "verifiable_producer.stderr") LOG_DIR = os.path.join(PERSISTENT_ROOT, "logs") LOG_FILE = os.path.join(LOG_DIR, "verifiable_producer.log") - LOG4J_CONFIG = os.path.join(PERSISTENT_ROOT, "tools-log4j.properties") CONFIG_FILE = os.path.join(PERSISTENT_ROOT, "verifiable_producer.properties") logs = { @@ -128,7 +127,7 @@ def _worker(self, idx, node): # Create and upload log properties log_config = self.render(get_log4j_config_for_tools(node), log_file=VerifiableProducer.LOG_FILE) - node.account.create_file(VerifiableProducer.LOG4J_CONFIG, log_config) + node.account.create_file(get_log4j_config_for_tools(node), log_config) # Configure security self.security_config = self.kafka.security_config.client_config(node=node, @@ -222,7 +221,7 @@ def start_cmd(self, node, idx): cmd += " export KAFKA_OPTS=%s;" % self.security_config.kafka_opts cmd += fix_opts_for_new_jvm(node) - cmd += " export KAFKA_LOG4J_OPTS=\"%s%s\"; " % (get_log4j_config_param(node), VerifiableProducer.LOG4J_CONFIG) + cmd += " export KAFKA_LOG4J_OPTS=\"%s%s\"; " % (get_log4j_config_param(node), get_log4j_config_for_tools(node)) cmd += self.impl.exec_cmd(node) version = get_version(node) if version >= V_2_5_0: diff --git a/tests/kafkatest/tests/streams/streams_relational_smoke_test.py b/tests/kafkatest/tests/streams/streams_relational_smoke_test.py index 4488d24ac6521..a879d95c418e1 100644 --- a/tests/kafkatest/tests/streams/streams_relational_smoke_test.py +++ b/tests/kafkatest/tests/streams/streams_relational_smoke_test.py @@ -18,7 +18,7 @@ from ducktape.mark.resource import cluster from ducktape.utils.util import wait_until from kafkatest.services.kafka import quorum -from kafkatest.services.kafka.util import get_log4j_config_param +from kafkatest.services.kafka.util import get_log4j_config_param, get_log4j_config_for_tools from kafkatest.services.streams import StreamsTestBaseService from kafkatest.tests.kafka_test import KafkaTest from kafkatest.version import LATEST_4_0 @@ -43,7 +43,7 @@ def start_cmd(self, node): " %(mode)s %(kafka)s %(nodeId)s %(processing_guarantee)s %(state_dir)s" \ " & echo $! >&3 ) 1>> %(stdout)s 2>> %(stderr)s 3> %(pidfile)s" % { "log4j_param": get_log4j_config_param(node), - "log4j": self.LOG4J_CONFIG_FILE, + "log4j": get_log4j_config_for_tools(node), "kafka_run_class": self.path.script("kafka-run-class.sh", node), "mode": self.mode, "kafka": self.kafka.bootstrap_servers(), @@ -57,7 +57,7 @@ def start_cmd(self, node): def start_node(self, node): node.account.mkdirs(self.PERSISTENT_ROOT) - node.account.create_file(self.LOG4J_CONFIG_FILE, + node.account.create_file(get_log4j_config_for_tools(node), self.render("log4j2_template.yaml" if node.version >= LATEST_4_0 else "log4j_template.properties", log_file=self.LOG_FILE)) From 99cc467593d00ff8a4215dde3b59b0b7f29d8cd9 Mon Sep 17 00:00:00 2001 From: frankvicky Date: Sun, 1 Dec 2024 09:07:15 +0800 Subject: [PATCH 46/46] KAFKA-9366: Update script file and e2e test --- bin/connect-distributed.sh | 11 ++++------- bin/connect-mirror-maker.sh | 11 ++++------- bin/connect-standalone.sh | 11 ++++------- bin/kafka-server-start.sh | 11 ++++------- bin/windows/connect-distributed.bat | 18 +++++++++--------- bin/windows/connect-standalone.bat | 18 +++++++++--------- bin/windows/kafka-server-start.bat | 16 ++++++++-------- tests/kafkatest/services/connect.py | 12 ++++++++---- tests/kafkatest/services/kafka/kafka.py | 8 ++++---- tests/kafkatest/services/kafka/util.py | 3 --- 10 files changed, 54 insertions(+), 65 deletions(-) diff --git a/bin/connect-distributed.sh b/bin/connect-distributed.sh index 5074ca2bef7bc..a9d185493ef68 100755 --- a/bin/connect-distributed.sh +++ b/bin/connect-distributed.sh @@ -22,15 +22,12 @@ fi base_dir=$(dirname $0) -if [ -f "$base_dir/../config/connect-log4j.properties" ]; then - echo DEPRECATED: Using Log4j 1.x configuration file \$KAFKA_HOME/config/connect-log4j.properties >&2 +if [ -z "$KAFKA_LOG4J_OPTS" ]; then + export KAFKA_LOG4J_OPTS="-Dlog4j2.configurationFile=$base_dir/../config/connect-log4j2.yaml" +elif echo "$KAFKA_LOG4J_OPTS" | grep -qE "log4j\.[^[:space:]]+$"; then + echo DEPRECATED: A Log4j 1.x configuration file has been detected, which is no longer recommended. >&2 echo To use a Log4j 2.x configuration, please see https://logging.apache.org/log4j/2.x/migrate-from-log4j1.html#Log4j2ConfigurationFormat for details about Log4j configuration file migration. >&2 echo You can also use the \$KAFKA_HOME/config/connect-log4j2.yaml file as a starting point. Make sure to remove the Log4j 1.x configuration after completing the migration. >&2 - export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/connect-log4j.properties" -elif [ -f "$base_dir/../config/connect-log4j2.yaml" ]; then - export KAFKA_LOG4J_OPTS="-Dlog4j2.configurationFile=$base_dir/../config/connect-log4j2.yaml" -elif [ -f "$base_dir/../config/connect-log4j2.xml" ]; then - export KAFKA_LOG4J_OPTS="-Dlog4j2.configurationFile=$base_dir/../config/connect-log4j2.xml" fi if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then diff --git a/bin/connect-mirror-maker.sh b/bin/connect-mirror-maker.sh index daf10e5a99b50..9030575477252 100755 --- a/bin/connect-mirror-maker.sh +++ b/bin/connect-mirror-maker.sh @@ -22,15 +22,12 @@ fi base_dir=$(dirname $0) -if [ -f "$base_dir/../config/connect-log4j.properties" ]; then - echo DEPRECATED: Using Log4j 1.x configuration file \$KAFKA_HOME/config/connect-log4j.properties >&2 +if [ -z "$KAFKA_LOG4J_OPTS" ]; then + export KAFKA_LOG4J_OPTS="-Dlog4j2.configurationFile=$base_dir/../config/connect-log4j2.yaml" +elif echo "$KAFKA_LOG4J_OPTS" | grep -qE "log4j\.[^[:space:]]+$"; then + echo DEPRECATED: A Log4j 1.x configuration file has been detected, which is no longer recommended. >&2 echo To use a Log4j 2.x configuration, please see https://logging.apache.org/log4j/2.x/migrate-from-log4j1.html#Log4j2ConfigurationFormat for details about Log4j configuration file migration. >&2 echo You can also use the \$KAFKA_HOME/config/connect-log4j2.yaml file as a starting point. Make sure to remove the Log4j 1.x configuration after completing the migration. >&2 - export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/connect-log4j.properties" -elif [ -f "$base_dir/../config/connect-log4j2.yaml" ]; then - export KAFKA_LOG4J_OPTS="-Dlog4j2.configurationFile=$base_dir/../config/connect-log4j2.yaml" -elif [ -f "$base_dir/../config/connect-log4j2.xml" ]; then - export KAFKA_LOG4J_OPTS="-Dlog4j2.configurationFile=$base_dir/../config/connect-log4j2.xml" fi if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then diff --git a/bin/connect-standalone.sh b/bin/connect-standalone.sh index 6787c9b4db672..92e8dc9c8ee94 100755 --- a/bin/connect-standalone.sh +++ b/bin/connect-standalone.sh @@ -22,15 +22,12 @@ fi base_dir=$(dirname $0) -if [ -f "$base_dir/../config/connect-log4j.properties" ]; then - echo DEPRECATED: Using Log4j 1.x configuration file \$KAFKA_HOME/config/connect-log4j.properties >&2 +if [ -z "$KAFKA_LOG4J_OPTS" ]; then + export KAFKA_LOG4J_OPTS="-Dlog4j2.configurationFile=$base_dir/../config/connect-log4j2.yaml" +elif echo "$KAFKA_LOG4J_OPTS" | grep -qE "log4j\.[^[:space:]]+$"; then + echo DEPRECATED: A Log4j 1.x configuration file has been detected, which is no longer recommended. >&2 echo To use a Log4j 2.x configuration, please see https://logging.apache.org/log4j/2.x/migrate-from-log4j1.html#Log4j2ConfigurationFormat for details about Log4j configuration file migration. >&2 echo You can also use the \$KAFKA_HOME/config/connect-log4j2.yaml file as a starting point. Make sure to remove the Log4j 1.x configuration after completing the migration. >&2 - export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/connect-log4j.properties" -elif [ -f "$base_dir/../config/connect-log4j2.yaml" ]; then - export KAFKA_LOG4J_OPTS="-Dlog4j2.configurationFile=$base_dir/../config/connect-log4j2.yaml" -elif [ -f "$base_dir/../config/connect-log4j2.xml" ]; then - export KAFKA_LOG4J_OPTS="-Dlog4j2.configurationFile=$base_dir/../config/connect-log4j2.xml" fi if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then diff --git a/bin/kafka-server-start.sh b/bin/kafka-server-start.sh index f0e5336e91b57..6539746160f4c 100755 --- a/bin/kafka-server-start.sh +++ b/bin/kafka-server-start.sh @@ -21,15 +21,12 @@ then fi base_dir=$(dirname $0) -if [ -f "$base_dir/../config/log4j.properties" ]; then - echo DEPRECATED: Using Log4j 1.x configuration file \$KAFKA_HOME/config/log4j.properties >&2 +if [ -z "$KAFKA_LOG4J_OPTS" ]; then + export KAFKA_LOG4J_OPTS="-Dlog4j2.configurationFile=$base_dir/../config/log4j2.yaml" +elif echo "$KAFKA_LOG4J_OPTS" | grep -qE "log4j\.[^[:space:]]+$"; then + echo DEPRECATED: A Log4j 1.x configuration file has been detected, which is no longer recommended. >&2 echo To use a Log4j 2.x configuration, please see https://logging.apache.org/log4j/2.x/migrate-from-log4j1.html#Log4j2ConfigurationFormat for details about Log4j configuration file migration. >&2 echo You can also use the \$KAFKA_HOME/config/log4j2.yaml file as a starting point. Make sure to remove the Log4j 1.x configuration after completing the migration. >&2 - export KAFKA_LOG4J_OPTS="-Dlog4j.configuration=file:$base_dir/../config/log4j.properties" -elif [ -f "$base_dir/../config/log4j2.yaml" ]; then - export KAFKA_LOG4J_OPTS="-Dlog4j2.configurationFile=$base_dir/../config/log4j2.yaml" -elif [ -f "$base_dir/../config/log4j2.xml" ]; then - export KAFKA_LOG4J_OPTS="-Dlog4j2.configurationFile=$base_dir/../config/log4j2.xml" fi if [ "x$KAFKA_HEAP_OPTS" = "x" ]; then diff --git a/bin/windows/connect-distributed.bat b/bin/windows/connect-distributed.bat index 92f9a9cce5391..43c338fc26a06 100644 --- a/bin/windows/connect-distributed.bat +++ b/bin/windows/connect-distributed.bat @@ -26,15 +26,15 @@ set BASE_DIR=%CD% popd rem Log4j settings -IF EXIST "%BASE_DIR%/config/connect-log4j.properties" ( - echo DEPRECATED: Using Log4j 1.x configuration file %BASE_DIR%/config/connect-log4j.properties - echo To use a Log4j 2.x configuration, please see https://logging.apache.org/log4j/2.x/migrate-from-log4j1.html#Log4j2ConfigurationFormat for details about Log4j configuration file migration. - echo You can also use the %BASE_DIR%/config/connect-log4j2.yaml file as a starting point. Make sure to remove the Log4j 1.x configuration after completing the migration. - set KAFKA_LOG4J_OPTS=-Dlog4j.configuration=file:%BASE_DIR%/config/connect-log4j.properties -) ELSE IF EXIST "%BASE_DIR%/config/connect-log4j2.yaml" ( - set KAFKA_LOG4J_OPTS=-Dlog4j2.configurationFile=%BASE_DIR%/config/connect-log4j2.yaml -) ELSE IF EXIST "%BASE_DIR%/config/connect-log4j2.xml" ( - set KAFKA_LOG4J_OPTS=-Dlog4j2.configurationFile=%BASE_DIR%/config/connect-log4j2.xml +IF ["%KAFKA_LOG4J_OPTS%"] EQU [""] ( + set KAFKA_LOG4J_OPTS=-Dlog4j2.configurationFile=%BASE_DIR%/config/connect-log4j2.yaml +) ELSE ( + echo %KAFKA_LOG4J_OPTS% | findstr /r /c:"log4j\.[^ ]*$" >nul + IF %ERRORLEVEL% == 0 ( + echo DEPRECATED: A Log4j 1.x configuration file has been detected, which is no longer recommended. + echo To use a Log4j 2.x configuration, please see https://logging.apache.org/log4j/2.x/migrate-from-log4j1.html#Log4j2ConfigurationFormat for details about Log4j configuration file migration. >&2 + echo You can also use the %BASE_DIR%/config/connect-log4j2.yaml file as a starting point. Make sure to remove the Log4j 1.x configuration after completing the migration. >&2 + ) ) "%~dp0kafka-run-class.bat" org.apache.kafka.connect.cli.ConnectDistributed %* diff --git a/bin/windows/connect-standalone.bat b/bin/windows/connect-standalone.bat index eacb67f4322e7..bac8bbd1291b6 100644 --- a/bin/windows/connect-standalone.bat +++ b/bin/windows/connect-standalone.bat @@ -26,15 +26,15 @@ set BASE_DIR=%CD% popd rem Log4j settings -IF EXIST "%BASE_DIR%/config/connect-log4j.properties" ( - echo DEPRECATED: Using Log4j 1.x configuration file %BASE_DIR%/config/connect-log4j.properties - echo To use a Log4j 2.x configuration, please see https://logging.apache.org/log4j/2.x/migrate-from-log4j1.html#Log4j2ConfigurationFormat for details about Log4j configuration file migration. - echo You can also use the %BASE_DIR%/config/connect-log4j2.yaml file as a starting point. Make sure to remove the Log4j 1.x configuration after completing the migration. - set KAFKA_LOG4J_OPTS=-Dlog4j.configuration=file:%BASE_DIR%/config/connect-log4j.properties -) ELSE IF EXIST "%BASE_DIR%/config/connect-log4j2.yaml" ( - set KAFKA_LOG4J_OPTS=-Dlog4j2.configurationFile=%BASE_DIR%/config/connect-log4j2.yaml -) ELSE IF EXIST "%BASE_DIR%/config/connect-log4j2.xml" ( - set KAFKA_LOG4J_OPTS=-Dlog4j2.configurationFile=%BASE_DIR%/config/connect-log4j2.xml +IF ["%KAFKA_LOG4J_OPTS%"] EQU [""] ( + set KAFKA_LOG4J_OPTS=-Dlog4j2.configurationFile=%BASE_DIR%/config/connect-log4j2.yaml +) ELSE ( + echo %KAFKA_LOG4J_OPTS% | findstr /r /c:"log4j\.[^ ]*$" >nul + IF %ERRORLEVEL% == 0 ( + echo DEPRECATED: A Log4j 1.x configuration file has been detected, which is no longer recommended. + echo To use a Log4j 2.x configuration, please see https://logging.apache.org/log4j/2.x/migrate-from-log4j1.html#Log4j2ConfigurationFormat for details about Log4j configuration file migration. + echo You can also use the %BASE_DIR%/config/connect-log4j2.yaml file as a starting point. Make sure to remove the Log4j 1.x configuration after completing the migration. + ) ) "%~dp0kafka-run-class.bat" org.apache.kafka.connect.cli.ConnectStandalone %* diff --git a/bin/windows/kafka-server-start.bat b/bin/windows/kafka-server-start.bat index f7e259514d8e5..ff29321d00068 100644 --- a/bin/windows/kafka-server-start.bat +++ b/bin/windows/kafka-server-start.bat @@ -20,15 +20,15 @@ IF [%1] EQU [] ( ) SetLocal -IF EXIST "%~dp0../../config/log4j.properties" ( - echo DEPRECATED: Using Log4j 1.x configuration file %~dp0../../config/log4j.properties - echo To use a Log4j 2.x configuration, please see https://logging.apache.org/log4j/2.x/migrate-from-log4j1.html#Log4j2ConfigurationFormat for details about Log4j configuration file migration. - echo You can also use the %~dp0../../config/log4j2.yaml file as a starting point. Make sure to remove the Log4j 1.x configuration after completing the migration. - set KAFKA_LOG4J_OPTS=-Dlog4j.configuration=file:%~dp0../../config/log4j.properties -) ELSE IF EXIST "%~dp0../../config/log4j2.yaml" ( +IF ["%KAFKA_LOG4J_OPTS%"] EQU [""] ( set KAFKA_LOG4J_OPTS=-Dlog4j2.configurationFile=%~dp0../../config/log4j2.yaml -) ELSE IF EXIST "%~dp0../../config/log4j2.xml" ( - set KAFKA_LOG4J_OPTS=-Dlog4j2.configurationFile=%~dp0../../config/log4j2.xml +) ELSE ( + echo %KAFKA_LOG4J_OPTS% | findstr /r /c:"log4j\.[^ ]*$" >nul + IF %ERRORLEVEL% == 0 ( + echo DEPRECATED: A Log4j 1.x configuration file has been detected, which is no longer recommended. + echo To use a Log4j 2.x configuration, please see https://logging.apache.org/log4j/2.x/migrate-from-log4j1.html#Log4j2ConfigurationFormat for details about Log4j configuration file migration. + echo You can also use the %~dp0../../config/log4j2.yaml file as a starting point. Make sure to remove the Log4j 1.x configuration after completing the migration. + ) ) IF ["%KAFKA_HEAP_OPTS%"] EQU [""] ( rem detect OS architecture diff --git a/tests/kafkatest/services/connect.py b/tests/kafkatest/services/connect.py index 8fc9d361bb8e3..e09eba30b3e03 100644 --- a/tests/kafkatest/services/connect.py +++ b/tests/kafkatest/services/connect.py @@ -339,7 +339,8 @@ def node(self): return self.nodes[0] def start_cmd(self, node, connector_configs): - cmd = "( export KAFKA_LOG4J_OPTS=\"%s%s\"; " % (get_log4j_config_param(node), get_log4j_config_for_connect(node)) + cmd = "( export KAFKA_LOG4J_OPTS=\"%s%s\"; " % \ + (get_log4j_config_param(node), os.path.join(self.PERSISTENT_ROOT, get_log4j_config_for_connect(node))) heap_kafka_opts = "-XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=%s" % \ self.logs["connect_heap_dump_file"]["path"] other_kafka_opts = self.security_config.kafka_opts.strip('\"') @@ -363,7 +364,8 @@ def start_node(self, node, **kwargs): if self.external_config_template_func: node.account.create_file(self.EXTERNAL_CONFIGS_FILE, self.external_config_template_func(node)) node.account.create_file(self.CONFIG_FILE, self.config_template_func(node)) - node.account.create_file(get_log4j_config_for_connect(node), self.render(get_log4j_config_for_connect(node), log_file=self.LOG_FILE)) + node.account.create_file(os.path.join(self.PERSISTENT_ROOT, get_log4j_config_for_connect(node)), + self.render(get_log4j_config_for_connect(node), log_file=self.LOG_FILE)) remote_connector_configs = [] for idx, template in enumerate(self.connector_config_templates): target_file = os.path.join(self.PERSISTENT_ROOT, "connect-connector-" + str(idx) + ".properties") @@ -399,7 +401,8 @@ def __init__(self, context, num_nodes, kafka, files, offsets_topic="connect-offs # connector_configs argument is intentionally ignored in distributed service. def start_cmd(self, node, connector_configs): - cmd = "( export KAFKA_LOG4J_OPTS=\"%s%s\"; " % (get_log4j_config_param(node), get_log4j_config_for_connect(node)) + cmd = ("( export KAFKA_LOG4J_OPTS=\"%s%s\"; " % + (get_log4j_config_param(node), os.path.join(self.PERSISTENT_ROOT, get_log4j_config_for_connect(node)))) heap_kafka_opts = "-XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=%s" % \ self.logs["connect_heap_dump_file"]["path"] other_kafka_opts = self.security_config.kafka_opts.strip('\"') @@ -420,7 +423,8 @@ def start_node(self, node, **kwargs): if self.external_config_template_func: node.account.create_file(self.EXTERNAL_CONFIGS_FILE, self.external_config_template_func(node)) node.account.create_file(self.CONFIG_FILE, self.config_template_func(node)) - node.account.create_file(get_log4j_config_for_connect(node), self.render(get_log4j_config_for_connect(node), log_file=self.LOG_FILE)) + node.account.create_file(os.path.join(self.PERSISTENT_ROOT, get_log4j_config_for_connect(node)), + self.render(get_log4j_config_for_connect(node), log_file=self.LOG_FILE)) if self.connector_config_templates: raise DucktapeError("Config files are not valid in distributed mode, submit connectors via the REST API") diff --git a/tests/kafkatest/services/kafka/kafka.py b/tests/kafkatest/services/kafka/kafka.py index e0d7febc19109..b713aacb04e55 100644 --- a/tests/kafkatest/services/kafka/kafka.py +++ b/tests/kafkatest/services/kafka/kafka.py @@ -33,8 +33,7 @@ from kafkatest.services.security.security_config import SecurityConfig from kafkatest.version import DEV_BRANCH from kafkatest.version import KafkaVersion -from kafkatest.services.kafka.util import fix_opts_for_new_jvm, get_log4j_config_param, get_log4j_config, \ - get_log4j_config_for_kafka +from kafkatest.services.kafka.util import fix_opts_for_new_jvm, get_log4j_config_param, get_log4j_config class KafkaListener: @@ -805,7 +804,7 @@ def start_cmd(self, node): kafka_mode = self.context.globals.get("kafka_mode", "") cmd = f"export KAFKA_MODE={kafka_mode}; " cmd += "export JMX_PORT=%d; " % self.jmx_port - cmd += "export KAFKA_LOG4J_OPTS=\"%s%s\"; " % (get_log4j_config_param(node), get_log4j_config_for_kafka(node)) + cmd += "export KAFKA_LOG4J_OPTS=\"%s%s\"; " % (get_log4j_config_param(node), os.path.join(self.PERSISTENT_ROOT, get_log4j_config(node))) heap_kafka_opts = "-XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=%s" % \ self.logs["kafka_heap_dump_file"]["path"] security_kafka_opts = self.security_config.kafka_opts.strip('\"') @@ -874,7 +873,8 @@ def start_node(self, node, timeout_sec=60, **kwargs): self.logger.info("kafka.properties:") self.logger.info(prop_file) node.account.create_file(KafkaService.CONFIG_FILE, prop_file) - node.account.create_file(get_log4j_config_for_kafka(node), self.render(get_log4j_config(node), log_dir=KafkaService.OPERATIONAL_LOG_DIR)) + node.account.create_file(os.path.join(self.PERSISTENT_ROOT, get_log4j_config(node)), + self.render(get_log4j_config(node), log_dir=KafkaService.OPERATIONAL_LOG_DIR)) if self.quorum_info.using_kraft: # format log directories if necessary diff --git a/tests/kafkatest/services/kafka/util.py b/tests/kafkatest/services/kafka/util.py index 695e5d62189e8..a2e22ac32b7c3 100644 --- a/tests/kafkatest/services/kafka/util.py +++ b/tests/kafkatest/services/kafka/util.py @@ -37,9 +37,6 @@ def get_log4j_config_param(node): def get_log4j_config(node): return 'log4j2.yaml' if get_version(node) >= LATEST_4_0 else 'log4j.properties' -def get_log4j_config_for_kafka(node): - return 'kafka_log4j2.yaml' if get_version(node) >= LATEST_4_0 else 'kafka_log4j.properties' - def get_log4j_config_for_connect(node): return 'connect_log4j2.yaml' if get_version(node) >= LATEST_4_0 else 'connect_log4j.properties'