diff --git a/sdk/cosmos/azure-cosmos-spark_3-3_2-12/CHANGELOG.md b/sdk/cosmos/azure-cosmos-spark_3-3_2-12/CHANGELOG.md
index 9742a702ad43..fb316a1c441e 100644
--- a/sdk/cosmos/azure-cosmos-spark_3-3_2-12/CHANGELOG.md
+++ b/sdk/cosmos/azure-cosmos-spark_3-3_2-12/CHANGELOG.md
@@ -7,12 +7,19 @@
#### Breaking Changes
#### Bugs Fixed
-* Added log4j-core to the list of shaded packages to avoid conflicts when customers use log4j in a different version. - See [PR 45924](https://github.com/Azure/azure-sdk-for-java/pull/46451)
-
-#### Other Changes
+* Reverted known issue due to shading log4j (which was introduced in 4.38.1). - See [PR 46546](https://github.com/Azure/azure-sdk-for-java/pull/46546)
* Added change feed performance monitoring which is used to improve end lsn calculation in `CosmosPartitionPlanner`. - See [PR 46320](https://github.com/Azure/azure-sdk-for-java/pull/46320)
* Added `spark.cosmos.auth.aad.audience` as a valid configuration option to allow using AAD tokens with custom audiences. - See [PR 46554](https://github.com/Azure/azure-sdk-for-java/pull/46554)
+#### Other Changes
+
+### 4.38.1 (2025-08-22)
+
+**NOTE: This version has a known issue due to shading log4j - Please use more recent versions >= 4.38.2 or 4.38.0 instead**
+
+#### Other Changes
+* Added log4j-core to the list of shaded packages to avoid conflicts when customers use log4j in a different version. **NOTE: This change caused known issue - Please use a more recent version instead** - See [PR 45924](https://github.com/Azure/azure-sdk-for-java/pull/46451)
+
### 4.38.0 (2025-07-31)
#### Features Added
diff --git a/sdk/cosmos/azure-cosmos-spark_3-4_2-12/CHANGELOG.md b/sdk/cosmos/azure-cosmos-spark_3-4_2-12/CHANGELOG.md
index e107292a9b6f..4d5a374a9a3f 100644
--- a/sdk/cosmos/azure-cosmos-spark_3-4_2-12/CHANGELOG.md
+++ b/sdk/cosmos/azure-cosmos-spark_3-4_2-12/CHANGELOG.md
@@ -7,12 +7,19 @@
#### Breaking Changes
#### Bugs Fixed
-* Added log4j-core to the list of shaded packages to avoid conflicts when customers use log4j in a different version. - See [PR 45924](https://github.com/Azure/azure-sdk-for-java/pull/46451)
-
-#### Other Changes
+* Reverted known issue due to shading log4j (which was introduced in 4.38.1). - See [PR 46546](https://github.com/Azure/azure-sdk-for-java/pull/46546)
* Added change feed performance monitoring which is used to improve end lsn calculation in `CosmosPartitionPlanner`. - See [PR 46320](https://github.com/Azure/azure-sdk-for-java/pull/46320)
* Added `spark.cosmos.auth.aad.audience` as a valid configuration option to allow using AAD tokens with custom audiences. - See [PR 46554](https://github.com/Azure/azure-sdk-for-java/pull/46554)
+#### Other Changes
+
+### 4.38.1 (2025-08-22)
+
+**NOTE: This version has a known issue due to shading log4j - Please use more recent versions >= 4.38.2 or 4.38.0 instead**
+
+#### Other Changes
+* Added log4j-core to the list of shaded packages to avoid conflicts when customers use log4j in a different version. **NOTE: This change caused known issue - Please use a more recent version instead** - See [PR 45924](https://github.com/Azure/azure-sdk-for-java/pull/46451)
+
### 4.38.0 (2025-07-31)
#### Features Added
diff --git a/sdk/cosmos/azure-cosmos-spark_3-5_2-12/CHANGELOG.md b/sdk/cosmos/azure-cosmos-spark_3-5_2-12/CHANGELOG.md
index 16af86a9910c..9ee8c557c861 100644
--- a/sdk/cosmos/azure-cosmos-spark_3-5_2-12/CHANGELOG.md
+++ b/sdk/cosmos/azure-cosmos-spark_3-5_2-12/CHANGELOG.md
@@ -7,12 +7,19 @@
#### Breaking Changes
#### Bugs Fixed
-* Added log4j-core to the list of shaded packages to avoid conflicts when customers use log4j in a different version. - See [PR 45924](https://github.com/Azure/azure-sdk-for-java/pull/46451)
-
-#### Other Changes
+* Reverted known issue due to shading log4j (which was introduced in 4.38.1). - See [PR 46546](https://github.com/Azure/azure-sdk-for-java/pull/46546)
* Added change feed performance monitoring which is used to improve end lsn calculation in `CosmosPartitionPlanner`. - See [PR 46320](https://github.com/Azure/azure-sdk-for-java/pull/46320)
* Added `spark.cosmos.auth.aad.audience` as a valid configuration option to allow using AAD tokens with custom audiences. - See [PR 46554](https://github.com/Azure/azure-sdk-for-java/pull/46554)
+#### Other Changes
+
+### 4.38.1 (2025-08-22)
+
+**NOTE: This version has a known issue due to shading log4j - Please use more recent versions >= 4.38.2 or 4.38.0 instead**
+
+#### Other Changes
+* Added log4j-core to the list of shaded packages to avoid conflicts when customers use log4j in a different version. **NOTE: This change caused known issue - Please use a more recent version instead** - See [PR 45924](https://github.com/Azure/azure-sdk-for-java/pull/46451)
+
### 4.38.0 (2025-07-31)
#### Features Added
diff --git a/sdk/cosmos/azure-cosmos-spark_3_2-12/pom.xml b/sdk/cosmos/azure-cosmos-spark_3_2-12/pom.xml
index 923b58c4e86d..d7f89b45ff9c 100644
--- a/sdk/cosmos/azure-cosmos-spark_3_2-12/pom.xml
+++ b/sdk/cosmos/azure-cosmos-spark_3_2-12/pom.xml
@@ -589,10 +589,6 @@
net.jcip
${shadingPrefix}.net.jcip
-
- org.apache.logging.log4j
- ${shadingPrefix}.org.apache.logging.log4j
-
@@ -613,18 +609,6 @@
**
-
- org.apache.logging.log4j:log4j-core
-
- **
-
-
-
- org.apache.logging.log4j:log4j-slf4j-impl
-
- **
-
-
*:*
diff --git a/sdk/cosmos/azure-cosmos-spark_3_2-12/src/main/scala/com/azure/cosmos/spark/CosmosClientCache.scala b/sdk/cosmos/azure-cosmos-spark_3_2-12/src/main/scala/com/azure/cosmos/spark/CosmosClientCache.scala
index 8b0c66517120..fdf051f6c9af 100644
--- a/sdk/cosmos/azure-cosmos-spark_3_2-12/src/main/scala/com/azure/cosmos/spark/CosmosClientCache.scala
+++ b/sdk/cosmos/azure-cosmos-spark_3_2-12/src/main/scala/com/azure/cosmos/spark/CosmosClientCache.scala
@@ -499,7 +499,7 @@ private[spark] object CosmosClientCache extends BasicLoggingTrait {
val azMonConfig = cosmosClientConfiguration.azureMonitorConfig.get
val openTelemetry = configureOpenTelemetrySdk(cosmosClientConfiguration, azMonConfig)
- if (azMonConfig.logLevel != Level.OFF) {
+ if (azMonConfig.enabled && azMonConfig.logLevel != Level.OFF) {
configureLoggingForAzureMonitor(cosmosClientConfiguration, azMonConfig, openTelemetry)
}
@@ -647,7 +647,6 @@ private[spark] object CosmosClientCache extends BasicLoggingTrait {
telemetryConfig = telemetryConfig
.tracingOptions(tracingOptions)
- .enableTransportLevelTracing()
} else {
logInfo("Azure Monitor traces/logs disabled.")
}
diff --git a/sdk/cosmos/azure-cosmos-spark_3_2-12/test-databricks/notebooks/basicScenarioAad.scala b/sdk/cosmos/azure-cosmos-spark_3_2-12/test-databricks/notebooks/basicScenarioAad.scala
deleted file mode 100644
index 92923af8228f..000000000000
--- a/sdk/cosmos/azure-cosmos-spark_3_2-12/test-databricks/notebooks/basicScenarioAad.scala
+++ /dev/null
@@ -1,114 +0,0 @@
-// Databricks notebook source
-// val cosmosEndpoint = ""
-// val subscriptionId = ""
-// val tenantId = ""
-// val resourceGroupName = ""
-// val clientId = ""
-// val clientSecret = ""
-
-println("SCENARIO: basicScenarioAad")
-
-val authType = "ServicePrincipal"
-val cosmosEndpoint = dbutils.widgets.get("cosmosEndpoint")
-val subscriptionId = dbutils.widgets.get("subscriptionId")
-val tenantId = dbutils.widgets.get("tenantId")
-val resourceGroupName = dbutils.widgets.get("resourceGroupName")
-val clientId = dbutils.widgets.get("clientId")
-val clientSecret = dbutils.widgets.get("clientSecret")
-val cosmosContainerName = dbutils.widgets.get("cosmosContainerName")
-val cosmosDatabaseName = dbutils.widgets.get("cosmosDatabaseName")
-
-val cfg = Map("spark.cosmos.accountEndpoint" -> cosmosEndpoint,
- "spark.cosmos.auth.type" -> authType,
- "spark.cosmos.account.subscriptionId" -> subscriptionId,
- "spark.cosmos.account.tenantId" -> tenantId,
- "spark.cosmos.account.resourceGroupName" -> resourceGroupName,
- "spark.cosmos.account.azureEnvironment" -> "Custom",
- "spark.cosmos.account.azureEnvironment.management" -> "https://management.azure.com/",
- "spark.cosmos.account.azureEnvironment.aad" -> "https://login.microsoftonline.com/",
- "spark.cosmos.auth.aad.clientId" -> clientId,
- "spark.cosmos.auth.aad.clientSecret" -> clientSecret,
- "spark.cosmos.database" -> cosmosDatabaseName,
- "spark.cosmos.container" -> cosmosContainerName,
- "spark.cosmos.enforceNativeTransport" -> "true"
-)
-
-val cfgWithAutoSchemaInference = Map("spark.cosmos.accountEndpoint" -> cosmosEndpoint,
- "spark.cosmos.auth.type" -> authType,
- "spark.cosmos.account.subscriptionId" -> subscriptionId,
- "spark.cosmos.account.tenantId" -> tenantId,
- "spark.cosmos.account.resourceGroupName" -> resourceGroupName,
- "spark.cosmos.auth.aad.clientId" -> clientId,
- "spark.cosmos.auth.aad.clientSecret" -> clientSecret,
- "spark.cosmos.database" -> cosmosDatabaseName,
- "spark.cosmos.container" -> cosmosContainerName,
- "spark.cosmos.read.inferSchema.enabled" -> "true",
- "spark.cosmos.enforceNativeTransport" -> "true",
- "spark.cosmos.account.azureEnvironment" -> "Custom",
- "spark.cosmos.account.azureEnvironment.management" -> "https://management.azure.com/",
- "spark.cosmos.account.azureEnvironment.aad" -> "https://login.microsoftonline.com/"
-)
-
-// COMMAND ----------
-
-// create Cosmos Database and Cosmos Container using Catalog APIs
-spark.conf.set(s"spark.sql.catalog.cosmosCatalogSpn", "com.azure.cosmos.spark.CosmosCatalog")
-spark.conf.set(s"spark.sql.catalog.cosmosCatalogSpn.spark.cosmos.accountEndpoint", cosmosEndpoint)
-spark.conf.set(s"spark.sql.catalog.cosmosCatalogSpn.spark.cosmos.auth.type", authType)
-spark.conf.set(s"spark.sql.catalog.cosmosCatalogSpn.spark.cosmos.account.subscriptionId", subscriptionId)
-spark.conf.set(s"spark.sql.catalog.cosmosCatalogSpn.spark.cosmos.account.tenantId", tenantId)
-spark.conf.set(s"spark.sql.catalog.cosmosCatalogSpn.spark.cosmos.account.resourceGroupName", resourceGroupName)
-spark.conf.set(s"spark.sql.catalog.cosmosCatalogSpn.spark.cosmos.auth.aad.clientId", clientId)
-spark.conf.set(s"spark.sql.catalog.cosmosCatalogSpn.spark.cosmos.auth.aad.clientSecret", clientSecret)
-
-// create a cosmos database
-spark.sql(s"CREATE DATABASE IF NOT EXISTS cosmosCatalogSpn.${cosmosDatabaseName};")
-
-// create a cosmos container
-spark.sql(s"CREATE TABLE IF NOT EXISTS cosmosCatalogSpn.${cosmosDatabaseName}.${cosmosContainerName} using cosmos.oltp " +
- s"TBLPROPERTIES(partitionKeyPath = '/id', manualThroughput = '400')")
-
-// update the throughput
-spark.sql(s"ALTER TABLE cosmosCatalogSpn.${cosmosDatabaseName}.${cosmosContainerName} " +
- s"SET TBLPROPERTIES('manualThroughput' = '1100')")
-
-// COMMAND ----------
-
-// ingestion
-spark.createDataFrame(Seq(("cat-alive", "Schrodinger cat", 2, true), ("cat-dead", "Schrodinger cat", 2, false)))
- .toDF("id","name","age","isAlive")
- .write
- .format("cosmos.oltp")
- .options(cfg)
- .mode("APPEND")
- .save()
-
-// COMMAND ----------
-
-// Show the schema of the table and data without auto schema inference
-val df = spark.read.format("cosmos.oltp").options(cfg).load()
-df.printSchema()
-
-df.show()
-
-// COMMAND ----------
-
-// Show the schema of the table and data with auto schema inference
-val df = spark.read.format("cosmos.oltp").options(cfgWithAutoSchemaInference).load()
-df.printSchema()
-
-df.show()
-
-// COMMAND ----------
-
-import org.apache.spark.sql.functions.col
-
-// Query to find the live cat and increment age of the alive cat
-df.filter(col("isAlive") === true)
- .withColumn("age", col("age") + 1)
- .show()
-
-// COMMAND ----------
-
-// cleanup
-spark.sql(s"DROP TABLE cosmosCatalogSpn.${cosmosDatabaseName}.${cosmosContainerName};")
diff --git a/sdk/cosmos/azure-cosmos-spark_3_2-12/test-databricks/notebooks/basicScenarioAadMisspelled.scala b/sdk/cosmos/azure-cosmos-spark_3_2-12/test-databricks/notebooks/basicScenarioAadMisspelled.scala
deleted file mode 100644
index 43dec100f5e3..000000000000
--- a/sdk/cosmos/azure-cosmos-spark_3_2-12/test-databricks/notebooks/basicScenarioAadMisspelled.scala
+++ /dev/null
@@ -1,106 +0,0 @@
-// Databricks notebook source
-// val cosmosEndpoint = ""
-// val subscriptionId = ""
-// val tenantId = ""
-// val resourceGroupName = ""
-// val clientId = ""
-// val clientSecret = ""
-
-println("SCENARIO: basicScenarioAadMisspelled")
-
-val authType = "ServicePrinciple"
-val cosmosEndpoint = dbutils.widgets.get("cosmosEndpoint")
-val subscriptionId = dbutils.widgets.get("subscriptionId")
-val tenantId = dbutils.widgets.get("tenantId")
-val resourceGroupName = dbutils.widgets.get("resourceGroupName")
-val clientId = dbutils.widgets.get("clientId")
-val clientSecret = dbutils.widgets.get("clientSecret")
-val cosmosContainerName = dbutils.widgets.get("cosmosContainerName")
-val cosmosDatabaseName = dbutils.widgets.get("cosmosDatabaseName")
-
-val cfg = Map("spark.cosmos.accountEndpoint" -> cosmosEndpoint,
- "spark.cosmos.auth.type" -> authType,
- "spark.cosmos.account.subscriptionId" -> subscriptionId,
- "spark.cosmos.account.tenantId" -> tenantId,
- "spark.cosmos.account.resourceGroupName" -> resourceGroupName,
- "spark.cosmos.auth.aad.clientId" -> clientId,
- "spark.cosmos.auth.aad.clientSecret" -> clientSecret,
- "spark.cosmos.database" -> cosmosDatabaseName,
- "spark.cosmos.container" -> cosmosContainerName
-)
-
-val cfgWithAutoSchemaInference = Map("spark.cosmos.accountEndpoint" -> cosmosEndpoint,
- "spark.cosmos.auth.type" -> authType,
- "spark.cosmos.account.subscriptionId" -> subscriptionId,
- "spark.cosmos.account.tenantId" -> tenantId,
- "spark.cosmos.account.resourceGroupName" -> resourceGroupName,
- "spark.cosmos.auth.aad.clientId" -> clientId,
- "spark.cosmos.auth.aad.clientSecret" -> clientSecret,
- "spark.cosmos.database" -> cosmosDatabaseName,
- "spark.cosmos.container" -> cosmosContainerName,
- "spark.cosmos.read.inferSchema.enabled" -> "true"
-)
-
-// COMMAND ----------
-
-// create Cosmos Database and Cosmos Container using Catalog APIs
-spark.conf.set(s"spark.sql.catalog.cosmosCatalogSpnOld", "com.azure.cosmos.spark.CosmosCatalog")
-spark.conf.set(s"spark.sql.catalog.cosmosCatalogSpnOld.spark.cosmos.accountEndpoint", cosmosEndpoint)
-spark.conf.set(s"spark.sql.catalog.cosmosCatalogSpnOld.spark.cosmos.auth.type", authType)
-spark.conf.set(s"spark.sql.catalog.cosmosCatalogSpnOld.spark.cosmos.account.subscriptionId", subscriptionId)
-spark.conf.set(s"spark.sql.catalog.cosmosCatalogSpnOld.spark.cosmos.account.tenantId", tenantId)
-spark.conf.set(s"spark.sql.catalog.cosmosCatalogSpnOld.spark.cosmos.account.resourceGroupName", resourceGroupName)
-spark.conf.set(s"spark.sql.catalog.cosmosCatalogSpnOld.spark.cosmos.auth.aad.clientId", clientId)
-spark.conf.set(s"spark.sql.catalog.cosmosCatalogSpnOld.spark.cosmos.auth.aad.clientSecret", clientSecret)
-
-// create a cosmos database
-spark.sql(s"CREATE DATABASE IF NOT EXISTS cosmosCatalogSpnOld.${cosmosDatabaseName};")
-
-// create a cosmos container
-spark.sql(s"CREATE TABLE IF NOT EXISTS cosmosCatalogSpnOld.${cosmosDatabaseName}.${cosmosContainerName} using cosmos.oltp " +
- s"TBLPROPERTIES(partitionKeyPath = '/id', manualThroughput = '400')")
-
-// update the throughput
-spark.sql(s"ALTER TABLE cosmosCatalogSpnOld.${cosmosDatabaseName}.${cosmosContainerName} " +
- s"SET TBLPROPERTIES('manualThroughput' = '1100')")
-
-// COMMAND ----------
-
-// ingestion
-spark.createDataFrame(Seq(("cat-alive", "Schrodinger cat", 2, true), ("cat-dead", "Schrodinger cat", 2, false)))
- .toDF("id","name","age","isAlive")
- .write
- .format("cosmos.oltp")
- .options(cfg)
- .mode("APPEND")
- .save()
-
-// COMMAND ----------
-
-// Show the schema of the table and data without auto schema inference
-val df = spark.read.format("cosmos.oltp").options(cfg).load()
-df.printSchema()
-
-df.show()
-
-// COMMAND ----------
-
-// Show the schema of the table and data with auto schema inference
-val df = spark.read.format("cosmos.oltp").options(cfgWithAutoSchemaInference).load()
-df.printSchema()
-
-df.show()
-
-// COMMAND ----------
-
-import org.apache.spark.sql.functions.col
-
-// Query to find the live cat and increment age of the alive cat
-df.filter(col("isAlive") === true)
- .withColumn("age", col("age") + 1)
- .show()
-
-// COMMAND ----------
-
-// cleanup
-spark.sql(s"DROP TABLE cosmosCatalogSpnOld.${cosmosDatabaseName}.${cosmosContainerName};")