From 085a0f9ba086bc3ee9c8b66ba037e634f40b4647 Mon Sep 17 00:00:00 2001 From: Eduard Tudenhoefner Date: Thu, 2 Oct 2025 16:22:14 +0200 Subject: [PATCH 1/2] Build: Bump Jetty to 12.1.5 --- .../iceberg/aws/s3/signer/TestS3RestSigner.java | 5 +++-- .../iceberg/rest/TestBaseWithRESTServer.java | 5 +++-- .../org/apache/iceberg/rest/TestRESTCatalog.java | 14 ++++++++++++-- .../apache/iceberg/rest/TestRESTViewCatalog.java | 5 +++-- .../TestRESTViewCatalogWithAssumedViewSupport.java | 5 +++-- gradle/libs.versions.toml | 4 ++-- .../rest/RESTCompatibilityKitCatalogTests.java | 9 +++++++++ .../org/apache/iceberg/rest/RESTCatalogServer.java | 5 +++-- 8 files changed, 38 insertions(+), 14 deletions(-) diff --git a/aws/src/integration/java/org/apache/iceberg/aws/s3/signer/TestS3RestSigner.java b/aws/src/integration/java/org/apache/iceberg/aws/s3/signer/TestS3RestSigner.java index b51d97cc611a..f09360915725 100644 --- a/aws/src/integration/java/org/apache/iceberg/aws/s3/signer/TestS3RestSigner.java +++ b/aws/src/integration/java/org/apache/iceberg/aws/s3/signer/TestS3RestSigner.java @@ -37,10 +37,10 @@ import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; import org.apache.iceberg.rest.auth.OAuth2Properties; import org.apache.iceberg.util.ThreadPools; +import org.eclipse.jetty.ee10.servlet.ServletContextHandler; +import org.eclipse.jetty.ee10.servlet.ServletHolder; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.handler.gzip.GzipHandler; -import org.eclipse.jetty.servlet.ServletContextHandler; -import org.eclipse.jetty.servlet.ServletHolder; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeAll; @@ -181,6 +181,7 @@ public void before() throws Exception { CreateMultipartUploadRequest.builder().bucket(BUCKET).key("random/multipart-key").build()); } + @SuppressWarnings("removal") private static Server initHttpServer() throws Exception { S3SignerServlet.SignRequestValidator deleteObjectsWithBody = new S3SignerServlet.SignRequestValidator( diff --git a/core/src/test/java/org/apache/iceberg/rest/TestBaseWithRESTServer.java b/core/src/test/java/org/apache/iceberg/rest/TestBaseWithRESTServer.java index f1a172a4237c..b799b49cd618 100644 --- a/core/src/test/java/org/apache/iceberg/rest/TestBaseWithRESTServer.java +++ b/core/src/test/java/org/apache/iceberg/rest/TestBaseWithRESTServer.java @@ -34,10 +34,10 @@ import org.apache.iceberg.inmemory.InMemoryCatalog; import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; import org.apache.iceberg.rest.responses.ErrorResponse; +import org.eclipse.jetty.ee10.servlet.ServletContextHandler; +import org.eclipse.jetty.ee10.servlet.ServletHolder; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.handler.gzip.GzipHandler; -import org.eclipse.jetty.servlet.ServletContextHandler; -import org.eclipse.jetty.servlet.ServletHolder; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.io.TempDir; @@ -61,6 +61,7 @@ public abstract class TestBaseWithRESTServer { @TempDir private Path temp; + @SuppressWarnings("removal") @BeforeEach public void before() throws Exception { File warehouse = temp.toFile(); diff --git a/core/src/test/java/org/apache/iceberg/rest/TestRESTCatalog.java b/core/src/test/java/org/apache/iceberg/rest/TestRESTCatalog.java index d03c5f9b8896..0ba2065a6e95 100644 --- a/core/src/test/java/org/apache/iceberg/rest/TestRESTCatalog.java +++ b/core/src/test/java/org/apache/iceberg/rest/TestRESTCatalog.java @@ -113,10 +113,10 @@ import org.apache.iceberg.util.Pair; import org.assertj.core.api.InstanceOfAssertFactories; import org.awaitility.Awaitility; +import org.eclipse.jetty.ee10.servlet.ServletContextHandler; +import org.eclipse.jetty.ee10.servlet.ServletHolder; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.handler.gzip.GzipHandler; -import org.eclipse.jetty.servlet.ServletContextHandler; -import org.eclipse.jetty.servlet.ServletHolder; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -264,6 +264,7 @@ protected T execute( private Server httpServer; private HeaderValidatingAdapter adapterForRESTServer; + @SuppressWarnings("removal") @BeforeEach public void createCatalog() throws Exception { File warehouse = temp.toFile(); @@ -409,6 +410,15 @@ protected boolean requiresNamespaceCreate() { return true; } + @Override + protected boolean supportsNamesWithSlashes() { + // names with slashes are rejected and considered as suspicious characters after upgrading Jetty + // and the Servlet API. See also + // https://jakarta.ee/specifications/servlet/6.0/jakarta-servlet-spec-6.0.html#uri-path-canonicalization + // for additional details + return false; + } + /* RESTCatalog specific tests */ @Test diff --git a/core/src/test/java/org/apache/iceberg/rest/TestRESTViewCatalog.java b/core/src/test/java/org/apache/iceberg/rest/TestRESTViewCatalog.java index fd2faf55087c..f02ab2b9bbd4 100644 --- a/core/src/test/java/org/apache/iceberg/rest/TestRESTViewCatalog.java +++ b/core/src/test/java/org/apache/iceberg/rest/TestRESTViewCatalog.java @@ -57,10 +57,10 @@ import org.apache.iceberg.rest.responses.LoadViewResponse; import org.apache.iceberg.view.ViewCatalogTests; import org.apache.iceberg.view.ViewMetadata; +import org.eclipse.jetty.ee10.servlet.ServletContextHandler; +import org.eclipse.jetty.ee10.servlet.ServletHolder; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.handler.gzip.GzipHandler; -import org.eclipse.jetty.servlet.ServletContextHandler; -import org.eclipse.jetty.servlet.ServletHolder; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -78,6 +78,7 @@ public class TestRESTViewCatalog extends ViewCatalogTests { protected InMemoryCatalog backendCatalog; protected Server httpServer; + @SuppressWarnings("removal") @BeforeEach public void createCatalog() throws Exception { File warehouse = temp.toFile(); diff --git a/core/src/test/java/org/apache/iceberg/rest/TestRESTViewCatalogWithAssumedViewSupport.java b/core/src/test/java/org/apache/iceberg/rest/TestRESTViewCatalogWithAssumedViewSupport.java index 1ba340cc56c2..fa999e803325 100644 --- a/core/src/test/java/org/apache/iceberg/rest/TestRESTViewCatalogWithAssumedViewSupport.java +++ b/core/src/test/java/org/apache/iceberg/rest/TestRESTViewCatalogWithAssumedViewSupport.java @@ -31,14 +31,15 @@ import org.apache.iceberg.inmemory.InMemoryCatalog; import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; import org.apache.iceberg.rest.responses.ConfigResponse; +import org.eclipse.jetty.ee10.servlet.ServletContextHandler; +import org.eclipse.jetty.ee10.servlet.ServletHolder; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.handler.gzip.GzipHandler; -import org.eclipse.jetty.servlet.ServletContextHandler; -import org.eclipse.jetty.servlet.ServletHolder; import org.junit.jupiter.api.BeforeEach; public class TestRESTViewCatalogWithAssumedViewSupport extends TestRESTViewCatalog { + @SuppressWarnings("removal") @BeforeEach public void createCatalog() throws Exception { File warehouse = temp.toFile(); diff --git a/gradle/libs.versions.toml b/gradle/libs.versions.toml index 8cd9e566b367..59cdaac33c5a 100644 --- a/gradle/libs.versions.toml +++ b/gradle/libs.versions.toml @@ -66,7 +66,7 @@ jakarta-el-api = "3.0.3" jakarta-servlet-api = "6.1.0" jaxb-api = "2.3.1" jaxb-runtime = "2.3.9" -jetty = "11.0.26" +jetty = "12.1.5" junit = "5.14.3" junit-platform = "1.14.3" junit-pioneer = "2.3.0" @@ -204,7 +204,7 @@ guava-testlib = { module = "com.google.guava:guava-testlib", version.ref = "guav jakarta-el-api = { module = "jakarta.el:jakarta.el-api", version.ref = "jakarta-el-api" } jakarta-servlet = {module = "jakarta.servlet:jakarta.servlet-api", version.ref = "jakarta-servlet-api"} jetty-server = { module = "org.eclipse.jetty:jetty-server", version.ref = "jetty" } -jetty-servlet = { module = "org.eclipse.jetty:jetty-servlet", version.ref = "jetty" } +jetty-servlet = { module = "org.eclipse.jetty.ee10:jetty-ee10-servlet", version.ref = "jetty" } junit-jupiter = { module = "org.junit.jupiter:junit-jupiter", version.ref = "junit" } junit-jupiter-engine = { module = "org.junit.jupiter:junit-jupiter-engine", version.ref = "junit" } junit-pioneer = { module = "org.junit-pioneer:junit-pioneer", version.ref = "junit-pioneer" } diff --git a/open-api/src/test/java/org/apache/iceberg/rest/RESTCompatibilityKitCatalogTests.java b/open-api/src/test/java/org/apache/iceberg/rest/RESTCompatibilityKitCatalogTests.java index 87ec90663db2..9a1f86706db6 100644 --- a/open-api/src/test/java/org/apache/iceberg/rest/RESTCompatibilityKitCatalogTests.java +++ b/open-api/src/test/java/org/apache/iceberg/rest/RESTCompatibilityKitCatalogTests.java @@ -97,4 +97,13 @@ protected boolean supportsNamesWithDot() { return PropertyUtil.propertyAsBoolean( restCatalog.properties(), RESTCompatibilityKitSuite.RCK_SUPPORTS_NAMES_WITH_DOT, false); } + + @Override + protected boolean supportsNamesWithSlashes() { + // names with slashes are rejected and considered as suspicious characters after upgrading Jetty + // and the Servlet API. See also + // https://jakarta.ee/specifications/servlet/6.0/jakarta-servlet-spec-6.0.html#uri-path-canonicalization + // for additional details + return false; + } } diff --git a/open-api/src/testFixtures/java/org/apache/iceberg/rest/RESTCatalogServer.java b/open-api/src/testFixtures/java/org/apache/iceberg/rest/RESTCatalogServer.java index 5f0f89d92646..34d8761a902b 100644 --- a/open-api/src/testFixtures/java/org/apache/iceberg/rest/RESTCatalogServer.java +++ b/open-api/src/testFixtures/java/org/apache/iceberg/rest/RESTCatalogServer.java @@ -28,12 +28,12 @@ import org.apache.iceberg.jdbc.JdbcCatalog; import org.apache.iceberg.relocated.com.google.common.collect.Maps; import org.apache.iceberg.util.PropertyUtil; +import org.eclipse.jetty.ee10.servlet.ServletContextHandler; +import org.eclipse.jetty.ee10.servlet.ServletHolder; import org.eclipse.jetty.server.Connector; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.ServerConnector; import org.eclipse.jetty.server.handler.gzip.GzipHandler; -import org.eclipse.jetty.servlet.ServletContextHandler; -import org.eclipse.jetty.servlet.ServletHolder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -106,6 +106,7 @@ private CatalogContext initializeBackendCatalog() throws IOException { catalogProperties); } + @SuppressWarnings("removal") public void start(boolean join) throws Exception { CatalogContext catalogContext = initializeBackendCatalog(); From a025f5388b92889b8b1d9940de3e604ee11ed32e Mon Sep 17 00:00:00 2001 From: manuzhang Date: Fri, 13 Feb 2026 16:16:31 +0100 Subject: [PATCH 2/2] Spark: Disable UI configs to avoid dependency conflicts with Spark's Jetty version --- spark/v3.4/build.gradle | 21 +------ .../iceberg/DeleteFileIndexBenchmark.java | 3 +- .../spark/MergeCardinalityCheckBenchmark.java | 2 +- .../iceberg/spark/PlanningBenchmark.java | 2 +- .../spark/TaskGroupPlanningBenchmark.java | 2 +- .../spark/UpdateProjectionBenchmark.java | 2 +- .../spark/extensions/ExtensionsTestBase.java | 1 + .../action/DeleteOrphanFilesBenchmark.java | 2 + .../IcebergSortCompactionBenchmark.java | 2 + .../spark/source/IcebergSourceBenchmark.java | 3 +- .../SparkDistributedDataScanTestBase.java | 2 + .../TestSparkDistributedDataScanDeletes.java | 2 + ...stSparkDistributedDataScanFilterFiles.java | 2 + ...TestSparkDistributedDataScanReporting.java | 2 + .../iceberg/spark/DummyMetricsServlet.java | 62 +++++++++++++++++++ .../org/apache/iceberg/spark/TestBase.java | 8 +++ .../iceberg/spark/source/ScanTestBase.java | 2 + .../spark/source/TestFilteredScan.java | 2 + .../source/TestForwardCompatibility.java | 2 + .../spark/source/TestIcebergSpark.java | 2 + .../spark/source/TestPartitionPruning.java | 2 + .../spark/source/TestPartitionValues.java | 2 + .../spark/source/TestSnapshotSelection.java | 2 + .../spark/source/TestSparkDataFile.java | 2 + .../spark/source/TestSparkDataWrite.java | 2 + .../spark/source/TestSparkReadProjection.java | 2 + .../spark/source/TestSparkReaderDeletes.java | 2 + .../TestSparkReaderWithBloomFilter.java | 2 + .../spark/source/TestStructuredStreaming.java | 2 + .../spark/source/TestWriteMetricsConfig.java | 2 + .../spark/sql/TestAggregatePushDown.java | 1 + spark/v3.5/build.gradle | 21 +------ .../iceberg/DeleteFileIndexBenchmark.java | 3 +- .../spark/MergeCardinalityCheckBenchmark.java | 2 +- .../iceberg/spark/PlanningBenchmark.java | 2 +- .../spark/TaskGroupPlanningBenchmark.java | 2 +- .../spark/UpdateProjectionBenchmark.java | 2 +- .../iceberg/spark/TestExtendedParser.java | 7 ++- .../spark/extensions/ExtensionsTestBase.java | 1 + .../action/DeleteOrphanFilesBenchmark.java | 2 + .../IcebergSortCompactionBenchmark.java | 2 + .../spark/source/DVReaderBenchmark.java | 3 +- .../spark/source/DVWriterBenchmark.java | 3 +- .../spark/source/IcebergSourceBenchmark.java | 3 +- .../SparkDistributedDataScanTestBase.java | 2 + .../TestSparkDistributedDataScanDeletes.java | 2 + ...stSparkDistributedDataScanFilterFiles.java | 2 + ...TestSparkDistributedDataScanReporting.java | 2 + .../iceberg/spark/DummyMetricsServlet.java | 62 +++++++++++++++++++ .../org/apache/iceberg/spark/TestBase.java | 8 +++ ...rquetDictionaryEncodedVectorizedReads.java | 2 + .../iceberg/spark/source/ScanTestBase.java | 2 + .../spark/source/TestFilteredScan.java | 2 + .../source/TestForwardCompatibility.java | 2 + .../spark/source/TestIcebergSpark.java | 2 + .../spark/source/TestPartitionPruning.java | 2 + .../spark/source/TestPartitionValues.java | 2 + .../spark/source/TestSnapshotSelection.java | 2 + .../spark/source/TestSparkDataFile.java | 2 + .../spark/source/TestSparkDataWrite.java | 2 + .../spark/source/TestSparkReadProjection.java | 2 + .../spark/source/TestSparkReaderDeletes.java | 2 + .../TestSparkReaderWithBloomFilter.java | 2 + .../spark/source/TestStructuredStreaming.java | 2 + .../spark/source/TestWriteMetricsConfig.java | 2 + .../spark/sql/TestAggregatePushDown.java | 1 + spark/v4.0/build.gradle | 21 +------ .../iceberg/DeleteFileIndexBenchmark.java | 3 +- .../spark/MergeCardinalityCheckBenchmark.java | 2 +- .../iceberg/spark/PlanningBenchmark.java | 2 +- .../spark/TaskGroupPlanningBenchmark.java | 2 +- .../spark/UpdateProjectionBenchmark.java | 2 +- .../iceberg/spark/TestExtendedParser.java | 7 ++- .../spark/extensions/ExtensionsTestBase.java | 1 + .../action/DeleteOrphanFilesBenchmark.java | 2 + .../IcebergSortCompactionBenchmark.java | 2 + .../spark/source/DVReaderBenchmark.java | 3 +- .../spark/source/DVWriterBenchmark.java | 3 +- .../spark/source/IcebergSourceBenchmark.java | 3 +- .../SparkDistributedDataScanTestBase.java | 2 + .../TestSparkDistributedDataScanDeletes.java | 2 + ...stSparkDistributedDataScanFilterFiles.java | 2 + ...TestSparkDistributedDataScanReporting.java | 2 + .../iceberg/spark/DummyMetricsServlet.java | 62 +++++++++++++++++++ .../org/apache/iceberg/spark/TestBase.java | 8 +++ ...rquetDictionaryEncodedVectorizedReads.java | 2 + .../iceberg/spark/source/ScanTestBase.java | 2 + .../spark/source/TestFilteredScan.java | 2 + .../source/TestForwardCompatibility.java | 2 + .../spark/source/TestIcebergSpark.java | 2 + .../spark/source/TestPartitionPruning.java | 2 + .../spark/source/TestPartitionValues.java | 2 + .../spark/source/TestSnapshotSelection.java | 2 + .../spark/source/TestSparkDataFile.java | 2 + .../spark/source/TestSparkDataWrite.java | 2 + .../spark/source/TestSparkReadProjection.java | 2 + .../spark/source/TestSparkReaderDeletes.java | 2 + .../TestSparkReaderWithBloomFilter.java | 2 + .../spark/source/TestStructuredStreaming.java | 2 + .../spark/source/TestWriteMetricsConfig.java | 2 + .../spark/sql/TestAggregatePushDown.java | 1 + spark/v4.1/build.gradle | 21 +------ .../iceberg/DeleteFileIndexBenchmark.java | 3 +- .../spark/MergeCardinalityCheckBenchmark.java | 2 +- .../iceberg/spark/PlanningBenchmark.java | 2 +- .../spark/TaskGroupPlanningBenchmark.java | 2 +- .../spark/UpdateProjectionBenchmark.java | 2 +- .../iceberg/spark/TestExtendedParser.java | 7 ++- .../spark/extensions/ExtensionsTestBase.java | 1 + .../TestPartitionedWritesToWapBranch.java | 1 + .../action/DeleteOrphanFilesBenchmark.java | 2 + .../IcebergSortCompactionBenchmark.java | 2 + .../spark/source/DVReaderBenchmark.java | 3 +- .../spark/source/DVWriterBenchmark.java | 3 +- .../spark/source/IcebergSourceBenchmark.java | 3 +- .../SparkDistributedDataScanTestBase.java | 2 + .../TestSparkDistributedDataScanDeletes.java | 2 + ...stSparkDistributedDataScanFilterFiles.java | 2 + ...TestSparkDistributedDataScanReporting.java | 2 + .../iceberg/spark/DummyMetricsServlet.java | 62 +++++++++++++++++++ .../org/apache/iceberg/spark/TestBase.java | 9 +++ ...rquetDictionaryEncodedVectorizedReads.java | 2 + .../iceberg/spark/source/ScanTestBase.java | 2 + .../spark/source/TestFilteredScan.java | 2 + .../source/TestForwardCompatibility.java | 2 + .../spark/source/TestIcebergSpark.java | 2 + .../spark/source/TestPartitionPruning.java | 2 + .../spark/source/TestPartitionValues.java | 2 + .../spark/source/TestSnapshotSelection.java | 2 + .../spark/source/TestSparkDataFile.java | 2 + .../spark/source/TestSparkDataWrite.java | 2 + .../spark/source/TestSparkReadProjection.java | 2 + .../spark/source/TestSparkReaderDeletes.java | 2 + .../TestSparkReaderWithBloomFilter.java | 2 + .../spark/source/TestStructuredStreaming.java | 2 + .../spark/source/TestWriteMetricsConfig.java | 2 + .../spark/sql/TestAggregatePushDown.java | 1 + 137 files changed, 530 insertions(+), 105 deletions(-) create mode 100644 spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/DummyMetricsServlet.java create mode 100644 spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/DummyMetricsServlet.java create mode 100644 spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/DummyMetricsServlet.java create mode 100644 spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/DummyMetricsServlet.java diff --git a/spark/v3.4/build.gradle b/spark/v3.4/build.gradle index 714ee703b842..73702448ef48 100644 --- a/spark/v3.4/build.gradle +++ b/spark/v3.4/build.gradle @@ -107,13 +107,8 @@ project(":iceberg-spark:iceberg-spark-${sparkMajorVersion}_${scalaVersion}") { testImplementation project(path: ':iceberg-api', configuration: 'testArtifacts') testImplementation project(path: ':iceberg-core', configuration: 'testArtifacts') testImplementation project(path: ':iceberg-data', configuration: 'testArtifacts') - testImplementation (project(path: ':iceberg-open-api', configuration: 'testFixturesRuntimeElements')) { - transitive = false - } - testImplementation libs.sqlite.jdbc + testImplementation (project(path: ':iceberg-open-api', configuration: 'testFixturesRuntimeElements')) testImplementation libs.awaitility - // runtime dependencies for running REST Catalog based integration test - testRuntimeOnly libs.jetty.servlet } test { @@ -174,13 +169,7 @@ project(":iceberg-spark:iceberg-spark-extensions-${sparkMajorVersion}_${scalaVer testImplementation project(path: ':iceberg-core', configuration: 'testArtifacts') testImplementation project(path: ':iceberg-hive-metastore', configuration: 'testArtifacts') testImplementation project(path: ":iceberg-spark:iceberg-spark-${sparkMajorVersion}_${scalaVersion}", configuration: 'testArtifacts') - testImplementation (project(path: ':iceberg-open-api', configuration: 'testFixturesRuntimeElements')) { - transitive = false - } - // runtime dependencies for running REST Catalog based integration test - testRuntimeOnly libs.jetty.servlet - testRuntimeOnly libs.sqlite.jdbc - + testImplementation (project(path: ':iceberg-open-api', configuration: 'testFixturesRuntimeElements')) testImplementation libs.avro.avro testImplementation libs.parquet.hadoop testImplementation libs.awaitility @@ -272,11 +261,7 @@ project(":iceberg-spark:iceberg-spark-runtime-${sparkMajorVersion}_${scalaVersio integrationRuntimeOnly project(':iceberg-hive-metastore') // runtime dependencies for running REST Catalog based integration test integrationRuntimeOnly project(path: ':iceberg-core', configuration: 'testArtifacts') - integrationRuntimeOnly (project(path: ':iceberg-open-api', configuration: 'testFixturesRuntimeElements')) { - transitive = false - } - integrationRuntimeOnly libs.jetty.servlet - integrationRuntimeOnly libs.sqlite.jdbc + integrationRuntimeOnly (project(path: ':iceberg-open-api', configuration: 'testFixturesRuntimeElements')) // Not allowed on our classpath, only the runtime jar is allowed integrationCompileOnly project(":iceberg-spark:iceberg-spark-extensions-${sparkMajorVersion}_${scalaVersion}") diff --git a/spark/v3.4/spark-extensions/src/jmh/java/org/apache/iceberg/DeleteFileIndexBenchmark.java b/spark/v3.4/spark-extensions/src/jmh/java/org/apache/iceberg/DeleteFileIndexBenchmark.java index f48e39e500c0..86f3f19de937 100644 --- a/spark/v3.4/spark-extensions/src/jmh/java/org/apache/iceberg/DeleteFileIndexBenchmark.java +++ b/spark/v3.4/spark-extensions/src/jmh/java/org/apache/iceberg/DeleteFileIndexBenchmark.java @@ -31,6 +31,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.spark.Spark3Util; import org.apache.iceberg.spark.SparkSessionCatalog; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions; import org.apache.iceberg.util.ThreadPools; import org.apache.spark.sql.SparkSession; @@ -157,7 +158,7 @@ private void initDataAndDeletes() { private void setupSpark() { this.spark = SparkSession.builder() - .config("spark.ui.enabled", false) + .config(TestBase.DISABLE_UI) .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer") .config("spark.sql.extensions", IcebergSparkSessionExtensions.class.getName()) .config("spark.sql.catalog.spark_catalog", SparkSessionCatalog.class.getName()) diff --git a/spark/v3.4/spark-extensions/src/jmh/java/org/apache/iceberg/spark/MergeCardinalityCheckBenchmark.java b/spark/v3.4/spark-extensions/src/jmh/java/org/apache/iceberg/spark/MergeCardinalityCheckBenchmark.java index dc625d240769..97e6b86dabce 100644 --- a/spark/v3.4/spark-extensions/src/jmh/java/org/apache/iceberg/spark/MergeCardinalityCheckBenchmark.java +++ b/spark/v3.4/spark-extensions/src/jmh/java/org/apache/iceberg/spark/MergeCardinalityCheckBenchmark.java @@ -155,7 +155,7 @@ private void runBenchmark(RowLevelOperationMode mode, double updatePercentage) { private void setupSpark() { this.spark = SparkSession.builder() - .config("spark.ui.enabled", false) + .config(TestBase.DISABLE_UI) .config("spark.sql.extensions", IcebergSparkSessionExtensions.class.getName()) .config("spark.sql.catalog.spark_catalog", SparkSessionCatalog.class.getName()) .config("spark.sql.catalog.spark_catalog.type", "hadoop") diff --git a/spark/v3.4/spark-extensions/src/jmh/java/org/apache/iceberg/spark/PlanningBenchmark.java b/spark/v3.4/spark-extensions/src/jmh/java/org/apache/iceberg/spark/PlanningBenchmark.java index 2b189c242867..eec32d315fc8 100644 --- a/spark/v3.4/spark-extensions/src/jmh/java/org/apache/iceberg/spark/PlanningBenchmark.java +++ b/spark/v3.4/spark-extensions/src/jmh/java/org/apache/iceberg/spark/PlanningBenchmark.java @@ -156,7 +156,7 @@ public void localPlanningWithoutFilterWithStats(Blackhole blackhole) { private void setupSpark() { this.spark = SparkSession.builder() - .config("spark.ui.enabled", false) + .config(TestBase.DISABLE_UI) .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer") .config("spark.sql.extensions", IcebergSparkSessionExtensions.class.getName()) .config("spark.sql.catalog.spark_catalog", SparkSessionCatalog.class.getName()) diff --git a/spark/v3.4/spark-extensions/src/jmh/java/org/apache/iceberg/spark/TaskGroupPlanningBenchmark.java b/spark/v3.4/spark-extensions/src/jmh/java/org/apache/iceberg/spark/TaskGroupPlanningBenchmark.java index 42b367af521d..6138d8de967c 100644 --- a/spark/v3.4/spark-extensions/src/jmh/java/org/apache/iceberg/spark/TaskGroupPlanningBenchmark.java +++ b/spark/v3.4/spark-extensions/src/jmh/java/org/apache/iceberg/spark/TaskGroupPlanningBenchmark.java @@ -240,7 +240,7 @@ private Dataset randomDataDF(Schema schema, int numRows) { private void setupSpark() { this.spark = SparkSession.builder() - .config("spark.ui.enabled", false) + .config(TestBase.DISABLE_UI) .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer") .config("spark.sql.extensions", IcebergSparkSessionExtensions.class.getName()) .config("spark.sql.catalog.spark_catalog", SparkSessionCatalog.class.getName()) diff --git a/spark/v3.4/spark-extensions/src/jmh/java/org/apache/iceberg/spark/UpdateProjectionBenchmark.java b/spark/v3.4/spark-extensions/src/jmh/java/org/apache/iceberg/spark/UpdateProjectionBenchmark.java index d917eae5eb0f..caa23625fc44 100644 --- a/spark/v3.4/spark-extensions/src/jmh/java/org/apache/iceberg/spark/UpdateProjectionBenchmark.java +++ b/spark/v3.4/spark-extensions/src/jmh/java/org/apache/iceberg/spark/UpdateProjectionBenchmark.java @@ -138,7 +138,7 @@ private void runBenchmark(RowLevelOperationMode mode, double updatePercentage) { private void setupSpark() { this.spark = SparkSession.builder() - .config("spark.ui.enabled", false) + .config(TestBase.DISABLE_UI) .config("spark.sql.extensions", IcebergSparkSessionExtensions.class.getName()) .config("spark.sql.catalog.spark_catalog", SparkSessionCatalog.class.getName()) .config("spark.sql.catalog.spark_catalog.type", "hadoop") diff --git a/spark/v3.4/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/ExtensionsTestBase.java b/spark/v3.4/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/ExtensionsTestBase.java index 4c1a5095916c..834640e24328 100644 --- a/spark/v3.4/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/ExtensionsTestBase.java +++ b/spark/v3.4/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/ExtensionsTestBase.java @@ -59,6 +59,7 @@ public static void startMetastoreAndSpark() { .config("spark.sql.legacy.respectNullabilityInTextDatasetConversion", "true") .config( SQLConf.ADAPTIVE_EXECUTION_ENABLED().key(), String.valueOf(RANDOM.nextBoolean())) + .config(TestBase.DISABLE_UI) .enableHiveSupport() .getOrCreate(); diff --git a/spark/v3.4/spark/src/jmh/java/org/apache/iceberg/spark/action/DeleteOrphanFilesBenchmark.java b/spark/v3.4/spark/src/jmh/java/org/apache/iceberg/spark/action/DeleteOrphanFilesBenchmark.java index 68406a20e725..317bd96e7df1 100644 --- a/spark/v3.4/spark/src/jmh/java/org/apache/iceberg/spark/action/DeleteOrphanFilesBenchmark.java +++ b/spark/v3.4/spark/src/jmh/java/org/apache/iceberg/spark/action/DeleteOrphanFilesBenchmark.java @@ -37,6 +37,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.spark.Spark3Util; import org.apache.iceberg.spark.SparkSessionCatalog; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.actions.SparkActions; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Encoders; @@ -179,6 +180,7 @@ private void setupSpark() { .config("spark.sql.catalog.spark_catalog", SparkSessionCatalog.class.getName()) .config("spark.sql.catalog.spark_catalog.type", "hadoop") .config("spark.sql.catalog.spark_catalog.warehouse", catalogWarehouse()) + .config(TestBase.DISABLE_UI) .master("local"); spark = builder.getOrCreate(); } diff --git a/spark/v3.4/spark/src/jmh/java/org/apache/iceberg/spark/action/IcebergSortCompactionBenchmark.java b/spark/v3.4/spark/src/jmh/java/org/apache/iceberg/spark/action/IcebergSortCompactionBenchmark.java index 2ac7c26992e3..073e8c9327df 100644 --- a/spark/v3.4/spark/src/jmh/java/org/apache/iceberg/spark/action/IcebergSortCompactionBenchmark.java +++ b/spark/v3.4/spark/src/jmh/java/org/apache/iceberg/spark/action/IcebergSortCompactionBenchmark.java @@ -41,6 +41,7 @@ import org.apache.iceberg.spark.Spark3Util; import org.apache.iceberg.spark.SparkSchemaUtil; import org.apache.iceberg.spark.SparkSessionCatalog; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.actions.SparkActions; import org.apache.iceberg.types.Types; import org.apache.spark.sql.Dataset; @@ -393,6 +394,7 @@ protected void setupSpark() { "spark.sql.catalog.spark_catalog", "org.apache.iceberg.spark.SparkSessionCatalog") .config("spark.sql.catalog.spark_catalog.type", "hadoop") .config("spark.sql.catalog.spark_catalog.warehouse", getCatalogWarehouse()) + .config(TestBase.DISABLE_UI) .master("local[*]"); spark = builder.getOrCreate(); Configuration sparkHadoopConf = spark.sessionState().newHadoopConf(); diff --git a/spark/v3.4/spark/src/jmh/java/org/apache/iceberg/spark/source/IcebergSourceBenchmark.java b/spark/v3.4/spark/src/jmh/java/org/apache/iceberg/spark/source/IcebergSourceBenchmark.java index 68c537e34a4a..debe37866ff7 100644 --- a/spark/v3.4/spark/src/jmh/java/org/apache/iceberg/spark/source/IcebergSourceBenchmark.java +++ b/spark/v3.4/spark/src/jmh/java/org/apache/iceberg/spark/source/IcebergSourceBenchmark.java @@ -30,6 +30,7 @@ import org.apache.iceberg.UpdateProperties; import org.apache.iceberg.relocated.com.google.common.collect.Maps; import org.apache.iceberg.spark.SparkSchemaUtil; +import org.apache.iceberg.spark.TestBase; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Row; import org.apache.spark.sql.SaveMode; @@ -94,7 +95,7 @@ protected void cleanupFiles() throws IOException { } protected void setupSpark(boolean enableDictionaryEncoding) { - SparkSession.Builder builder = SparkSession.builder().config("spark.ui.enabled", false); + SparkSession.Builder builder = SparkSession.builder().config(TestBase.DISABLE_UI); if (!enableDictionaryEncoding) { builder .config("parquet.dictionary.page.size", "1") diff --git a/spark/v3.4/spark/src/test/java/org/apache/iceberg/SparkDistributedDataScanTestBase.java b/spark/v3.4/spark/src/test/java/org/apache/iceberg/SparkDistributedDataScanTestBase.java index 404ba7284606..9b08d6f7ab1e 100644 --- a/spark/v3.4/spark/src/test/java/org/apache/iceberg/SparkDistributedDataScanTestBase.java +++ b/spark/v3.4/spark/src/test/java/org/apache/iceberg/SparkDistributedDataScanTestBase.java @@ -25,6 +25,7 @@ import java.util.List; import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; import org.apache.iceberg.spark.SparkReadConf; +import org.apache.iceberg.spark.TestBase; import org.apache.spark.sql.SparkSession; import org.apache.spark.sql.internal.SQLConf; import org.junit.jupiter.api.BeforeEach; @@ -90,6 +91,7 @@ protected static SparkSession initSpark(String serializer) { .master("local[2]") .config("spark.serializer", serializer) .config(SQLConf.SHUFFLE_PARTITIONS().key(), "4") + .config(TestBase.DISABLE_UI) .getOrCreate(); } } diff --git a/spark/v3.4/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanDeletes.java b/spark/v3.4/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanDeletes.java index 9361c63176e0..8eeb55171dbe 100644 --- a/spark/v3.4/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanDeletes.java +++ b/spark/v3.4/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanDeletes.java @@ -25,6 +25,7 @@ import java.util.List; import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; import org.apache.iceberg.spark.SparkReadConf; +import org.apache.iceberg.spark.TestBase; import org.apache.spark.sql.SparkSession; import org.apache.spark.sql.internal.SQLConf; import org.junit.jupiter.api.AfterAll; @@ -69,6 +70,7 @@ public static void startSpark() { .master("local[2]") .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer") .config(SQLConf.SHUFFLE_PARTITIONS().key(), "4") + .config(TestBase.DISABLE_UI) .getOrCreate(); } diff --git a/spark/v3.4/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanFilterFiles.java b/spark/v3.4/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanFilterFiles.java index a218f965ea65..eae640528f9e 100644 --- a/spark/v3.4/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanFilterFiles.java +++ b/spark/v3.4/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanFilterFiles.java @@ -23,6 +23,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; import org.apache.iceberg.spark.SparkReadConf; +import org.apache.iceberg.spark.TestBase; import org.apache.spark.sql.SparkSession; import org.apache.spark.sql.internal.SQLConf; import org.junit.jupiter.api.AfterAll; @@ -62,6 +63,7 @@ public static void startSpark() { .master("local[2]") .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer") .config(SQLConf.SHUFFLE_PARTITIONS().key(), "4") + .config(TestBase.DISABLE_UI) .getOrCreate(); } diff --git a/spark/v3.4/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanReporting.java b/spark/v3.4/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanReporting.java index acd4688440d1..6ad0907fffed 100644 --- a/spark/v3.4/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanReporting.java +++ b/spark/v3.4/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanReporting.java @@ -25,6 +25,7 @@ import java.util.List; import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; import org.apache.iceberg.spark.SparkReadConf; +import org.apache.iceberg.spark.TestBase; import org.apache.spark.sql.SparkSession; import org.apache.spark.sql.internal.SQLConf; import org.junit.jupiter.api.AfterAll; @@ -59,6 +60,7 @@ public static void startSpark() { .master("local[2]") .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer") .config(SQLConf.SHUFFLE_PARTITIONS().key(), "4") + .config(TestBase.DISABLE_UI) .getOrCreate(); } diff --git a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/DummyMetricsServlet.java b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/DummyMetricsServlet.java new file mode 100644 index 000000000000..ee1f29e56fb3 --- /dev/null +++ b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/DummyMetricsServlet.java @@ -0,0 +1,62 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.iceberg.spark; + +import com.codahale.metrics.MetricRegistry; +import java.util.Properties; +import org.apache.spark.SparkConf; +import org.apache.spark.metrics.sink.MetricsServlet; +import org.sparkproject.jetty.servlet.ServletContextHandler; + +/** + * A dummy implementation of {@link MetricsServlet} that does not start a server or report metrics. + * This is used in tests to avoid conflicts with Spark's jetty dependencies. + */ +public class DummyMetricsServlet extends MetricsServlet { + + /** + * Constructor required by Spark's reflection-based instantiation. + * + * @param properties Metrics properties + * @param registry Metric registry + */ + public DummyMetricsServlet(Properties properties, MetricRegistry registry) { + super(properties, registry); + } + + @Override + public ServletContextHandler[] getHandlers(SparkConf conf) { + return new ServletContextHandler[] {}; + } + + @Override + public void start() { + // No-op for tests + } + + @Override + public void stop() { + // No-op for tests + } + + @Override + public void report() { + // No-op for tests + } +} diff --git a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/TestBase.java b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/TestBase.java index 3c32b4693684..b89109174d90 100644 --- a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/TestBase.java +++ b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/TestBase.java @@ -65,6 +65,13 @@ public abstract class TestBase extends SparkTestHelperBase { protected static SparkSession spark = null; protected static JavaSparkContext sparkContext = null; protected static HiveCatalog catalog = null; + // disable Spark UI and use dummy servlet to avoid dependency conflicts with Spark's Jetty version + public static final Map DISABLE_UI = + ImmutableMap.of( + "spark.ui.enabled", + "false", + "spark.metrics.conf.*.sink.servlet.class", + "org.apache.iceberg.spark.DummyMetricsServlet"); @BeforeAll public static void startMetastoreAndSpark() { @@ -79,6 +86,7 @@ public static void startMetastoreAndSpark() { .config(SQLConf.PARTITION_OVERWRITE_MODE().key(), "dynamic") .config("spark.hadoop." + METASTOREURIS.varname, hiveConf.get(METASTOREURIS.varname)) .config("spark.sql.legacy.respectNullabilityInTextDatasetConversion", "true") + .config(DISABLE_UI) .enableHiveSupport() .getOrCreate(); diff --git a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/ScanTestBase.java b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/ScanTestBase.java index 6647a1b483e0..c368c4a815fe 100644 --- a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/ScanTestBase.java +++ b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/ScanTestBase.java @@ -37,6 +37,7 @@ import org.apache.iceberg.TableProperties; import org.apache.iceberg.hadoop.HadoopTables; import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.AvroDataTestBase; import org.apache.iceberg.spark.data.RandomData; import org.apache.iceberg.spark.data.TestHelpers; @@ -62,6 +63,7 @@ public static void startSpark() { SparkSession.builder() .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) .master("local[2]") + .config(TestBase.DISABLE_UI) .getOrCreate(); ScanTestBase.sc = JavaSparkContext.fromSparkContext(spark.sparkContext()); } diff --git a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestFilteredScan.java b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestFilteredScan.java index 308b1bd2c646..cfc38ed66fac 100644 --- a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestFilteredScan.java +++ b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestFilteredScan.java @@ -58,6 +58,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.spark.SparkReadOptions; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.GenericsHelpers; import org.apache.iceberg.transforms.Transforms; import org.apache.iceberg.types.Types; @@ -125,6 +126,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI) .getOrCreate(); // define UDFs used by partition tests diff --git a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestForwardCompatibility.java b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestForwardCompatibility.java index c03f7b94eca9..dcd9c2897e08 100644 --- a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestForwardCompatibility.java +++ b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestForwardCompatibility.java @@ -46,6 +46,7 @@ import org.apache.iceberg.io.OutputFile; import org.apache.iceberg.parquet.Parquet; import org.apache.iceberg.relocated.com.google.common.collect.Lists; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.RandomData; import org.apache.iceberg.spark.data.TestHelpers; import org.apache.iceberg.types.Types; @@ -99,6 +100,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI) .getOrCreate(); } diff --git a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestIcebergSpark.java b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestIcebergSpark.java index f4f57157e479..a637b975fe2b 100644 --- a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestIcebergSpark.java +++ b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestIcebergSpark.java @@ -28,6 +28,7 @@ import java.sql.Timestamp; import java.util.List; import org.apache.iceberg.spark.IcebergSpark; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.transforms.Transforms; import org.apache.iceberg.types.Types; import org.apache.spark.sql.Row; @@ -51,6 +52,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI) .getOrCreate(); } diff --git a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionPruning.java b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionPruning.java index e1402396fa7f..b72fcf82730c 100644 --- a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionPruning.java +++ b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionPruning.java @@ -59,6 +59,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Sets; import org.apache.iceberg.spark.SparkReadOptions; import org.apache.iceberg.spark.SparkSchemaUtil; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.transforms.Transforms; import org.apache.iceberg.types.Types; import org.apache.spark.api.java.JavaRDD; @@ -118,6 +119,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI) .getOrCreate(); TestPartitionPruning.sparkContext = JavaSparkContext.fromSparkContext(spark.sparkContext()); diff --git a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionValues.java b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionValues.java index b0ad930487b1..82575a720236 100644 --- a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionValues.java +++ b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionValues.java @@ -46,6 +46,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.spark.SparkReadOptions; import org.apache.iceberg.spark.SparkWriteOptions; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.RandomData; import org.apache.iceberg.spark.data.TestHelpers; import org.apache.iceberg.types.Types; @@ -112,6 +113,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI) .getOrCreate(); } diff --git a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestSnapshotSelection.java b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestSnapshotSelection.java index 11865db7fce5..fe754f4a02ba 100644 --- a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestSnapshotSelection.java +++ b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestSnapshotSelection.java @@ -42,6 +42,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.spark.SparkReadOptions; import org.apache.iceberg.spark.SparkSchemaUtil; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.types.Types; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Encoders; @@ -91,6 +92,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI) .getOrCreate(); } diff --git a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataFile.java b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataFile.java index 833a0b6b5eb0..4b7598508c65 100644 --- a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataFile.java +++ b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataFile.java @@ -59,6 +59,7 @@ import org.apache.iceberg.spark.SparkDataFile; import org.apache.iceberg.spark.SparkDeleteFile; import org.apache.iceberg.spark.SparkSchemaUtil; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.RandomData; import org.apache.iceberg.types.Conversions; import org.apache.iceberg.types.Types; @@ -125,6 +126,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI) .getOrCreate(); TestSparkDataFile.sparkContext = JavaSparkContext.fromSparkContext(spark.sparkContext()); } diff --git a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataWrite.java b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataWrite.java index 4f4c0c7f7aea..6e8a6c6edbd7 100644 --- a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataWrite.java +++ b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataWrite.java @@ -52,6 +52,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.spark.SparkWriteOptions; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.types.Types; import org.apache.iceberg.util.SnapshotUtil; import org.apache.spark.sql.Dataset; @@ -100,6 +101,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI) .getOrCreate(); } diff --git a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReadProjection.java b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReadProjection.java index 8ccea303d0c1..de6a5e59029c 100644 --- a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReadProjection.java +++ b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReadProjection.java @@ -50,6 +50,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Maps; import org.apache.iceberg.spark.SparkReadOptions; import org.apache.iceberg.spark.SparkValueConverter; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.types.Type; import org.apache.iceberg.types.TypeUtil; import org.apache.iceberg.types.Types; @@ -88,6 +89,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI) .getOrCreate(); ImmutableMap config = ImmutableMap.of( diff --git a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderDeletes.java b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderDeletes.java index eff032743e3b..33b5a1d6e600 100644 --- a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderDeletes.java +++ b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderDeletes.java @@ -71,6 +71,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Sets; import org.apache.iceberg.spark.SparkSchemaUtil; import org.apache.iceberg.spark.SparkStructLike; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.RandomData; import org.apache.iceberg.spark.data.SparkParquetWriters; import org.apache.iceberg.spark.source.metrics.NumDeletes; @@ -132,6 +133,7 @@ public static void startMetastoreAndSpark() { .config("spark.ui.liveUpdate.period", 0) .config(SQLConf.PARTITION_OVERWRITE_MODE().key(), "dynamic") .config("spark.hadoop." + METASTOREURIS.varname, hiveConf.get(METASTOREURIS.varname)) + .config(TestBase.DISABLE_UI) .enableHiveSupport() .getOrCreate(); diff --git a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderWithBloomFilter.java b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderWithBloomFilter.java index d22ecb02d483..cb2f866fab10 100644 --- a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderWithBloomFilter.java +++ b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderWithBloomFilter.java @@ -64,6 +64,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.relocated.com.google.common.collect.Maps; import org.apache.iceberg.spark.SparkValueConverter; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.types.Types; import org.apache.iceberg.util.PropertyUtil; import org.apache.spark.sql.Dataset; @@ -182,6 +183,7 @@ public static void startMetastoreAndSpark() { SparkSession.builder() .master("local[2]") .config("spark.hadoop." + METASTOREURIS.varname, hiveConf.get(METASTOREURIS.varname)) + .config(TestBase.DISABLE_UI) .enableHiveSupport() .getOrCreate(); diff --git a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestStructuredStreaming.java b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestStructuredStreaming.java index dc4fc7e187fb..a974b58a9714 100644 --- a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestStructuredStreaming.java +++ b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestStructuredStreaming.java @@ -34,6 +34,7 @@ import org.apache.iceberg.Table; import org.apache.iceberg.hadoop.HadoopTables; import org.apache.iceberg.relocated.com.google.common.collect.Lists; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.types.Types; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Encoder; @@ -69,6 +70,7 @@ public static void startSpark() { .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) .config("spark.sql.shuffle.partitions", 4) + .config(TestBase.DISABLE_UI) .getOrCreate(); } diff --git a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestWriteMetricsConfig.java b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestWriteMetricsConfig.java index c3fac70dd3fc..45ff9184566b 100644 --- a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestWriteMetricsConfig.java +++ b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestWriteMetricsConfig.java @@ -41,6 +41,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.relocated.com.google.common.collect.Maps; import org.apache.iceberg.spark.SparkWriteOptions; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.RandomData; import org.apache.iceberg.types.Types; import org.apache.iceberg.util.ByteBuffers; @@ -84,6 +85,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI) .getOrCreate(); TestWriteMetricsConfig.sc = JavaSparkContext.fromSparkContext(spark.sparkContext()); } diff --git a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/sql/TestAggregatePushDown.java b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/sql/TestAggregatePushDown.java index 5ce56b4feca7..946456fe2be8 100644 --- a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/sql/TestAggregatePushDown.java +++ b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/sql/TestAggregatePushDown.java @@ -63,6 +63,7 @@ public static void startMetastoreAndSpark() { SparkSession.builder() .master("local[2]") .config("spark.sql.iceberg.aggregate_pushdown", "true") + .config(TestBase.DISABLE_UI) .enableHiveSupport() .getOrCreate(); diff --git a/spark/v3.5/build.gradle b/spark/v3.5/build.gradle index 20a9cfb007bd..5da182bcce92 100644 --- a/spark/v3.5/build.gradle +++ b/spark/v3.5/build.gradle @@ -107,14 +107,9 @@ project(":iceberg-spark:iceberg-spark-${sparkMajorVersion}_${scalaVersion}") { testImplementation project(path: ':iceberg-api', configuration: 'testArtifacts') testImplementation project(path: ':iceberg-core', configuration: 'testArtifacts') testImplementation project(path: ':iceberg-data', configuration: 'testArtifacts') - testImplementation (project(path: ':iceberg-open-api', configuration: 'testFixturesRuntimeElements')) { - transitive = false - } - testImplementation libs.sqlite.jdbc + testImplementation (project(path: ':iceberg-open-api', configuration: 'testFixturesRuntimeElements')) testImplementation libs.awaitility testImplementation(testFixtures(project(':iceberg-parquet'))) - // runtime dependencies for running REST Catalog based integration test - testRuntimeOnly libs.jetty.servlet } test { @@ -175,13 +170,7 @@ project(":iceberg-spark:iceberg-spark-extensions-${sparkMajorVersion}_${scalaVer testImplementation project(path: ':iceberg-data', configuration: 'testArtifacts') testImplementation project(path: ':iceberg-hive-metastore', configuration: 'testArtifacts') testImplementation project(path: ":iceberg-spark:iceberg-spark-${sparkMajorVersion}_${scalaVersion}", configuration: 'testArtifacts') - testImplementation (project(path: ':iceberg-open-api', configuration: 'testFixturesRuntimeElements')) { - transitive = false - } - // runtime dependencies for running REST Catalog based integration test - testRuntimeOnly libs.jetty.servlet - testRuntimeOnly libs.sqlite.jdbc - + testImplementation (project(path: ':iceberg-open-api', configuration: 'testFixturesRuntimeElements')) testImplementation libs.avro.avro testImplementation libs.parquet.hadoop testImplementation libs.awaitility @@ -274,11 +263,7 @@ project(":iceberg-spark:iceberg-spark-runtime-${sparkMajorVersion}_${scalaVersio integrationRuntimeOnly project(':iceberg-hive-metastore') // runtime dependencies for running REST Catalog based integration test integrationRuntimeOnly project(path: ':iceberg-core', configuration: 'testArtifacts') - integrationRuntimeOnly (project(path: ':iceberg-open-api', configuration: 'testFixturesRuntimeElements')) { - transitive = false - } - integrationRuntimeOnly libs.jetty.servlet - integrationRuntimeOnly libs.sqlite.jdbc + integrationRuntimeOnly (project(path: ':iceberg-open-api', configuration: 'testFixturesRuntimeElements')) // Not allowed on our classpath, only the runtime jar is allowed integrationCompileOnly project(":iceberg-spark:iceberg-spark-extensions-${sparkMajorVersion}_${scalaVersion}") diff --git a/spark/v3.5/spark-extensions/src/jmh/java/org/apache/iceberg/DeleteFileIndexBenchmark.java b/spark/v3.5/spark-extensions/src/jmh/java/org/apache/iceberg/DeleteFileIndexBenchmark.java index 8b0b05911f66..242ef7439a39 100644 --- a/spark/v3.5/spark-extensions/src/jmh/java/org/apache/iceberg/DeleteFileIndexBenchmark.java +++ b/spark/v3.5/spark-extensions/src/jmh/java/org/apache/iceberg/DeleteFileIndexBenchmark.java @@ -31,6 +31,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.spark.Spark3Util; import org.apache.iceberg.spark.SparkSessionCatalog; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions; import org.apache.iceberg.util.ThreadPools; import org.apache.spark.sql.SparkSession; @@ -205,7 +206,7 @@ private void initDataAndDVs() { private void setupSpark() { this.spark = SparkSession.builder() - .config("spark.ui.enabled", false) + .config(TestBase.DISABLE_UI) .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer") .config("spark.sql.extensions", IcebergSparkSessionExtensions.class.getName()) .config("spark.sql.catalog.spark_catalog", SparkSessionCatalog.class.getName()) diff --git a/spark/v3.5/spark-extensions/src/jmh/java/org/apache/iceberg/spark/MergeCardinalityCheckBenchmark.java b/spark/v3.5/spark-extensions/src/jmh/java/org/apache/iceberg/spark/MergeCardinalityCheckBenchmark.java index d7f285288004..03e0410c0adc 100644 --- a/spark/v3.5/spark-extensions/src/jmh/java/org/apache/iceberg/spark/MergeCardinalityCheckBenchmark.java +++ b/spark/v3.5/spark-extensions/src/jmh/java/org/apache/iceberg/spark/MergeCardinalityCheckBenchmark.java @@ -155,7 +155,7 @@ private void runBenchmark(RowLevelOperationMode mode, double updatePercentage) { private void setupSpark() { this.spark = SparkSession.builder() - .config("spark.ui.enabled", false) + .config(TestBase.DISABLE_UI) .config("spark.sql.extensions", IcebergSparkSessionExtensions.class.getName()) .config("spark.sql.catalog.spark_catalog", SparkSessionCatalog.class.getName()) .config("spark.sql.catalog.spark_catalog.type", "hadoop") diff --git a/spark/v3.5/spark-extensions/src/jmh/java/org/apache/iceberg/spark/PlanningBenchmark.java b/spark/v3.5/spark-extensions/src/jmh/java/org/apache/iceberg/spark/PlanningBenchmark.java index 1d51350487c4..5cd8143f17bf 100644 --- a/spark/v3.5/spark-extensions/src/jmh/java/org/apache/iceberg/spark/PlanningBenchmark.java +++ b/spark/v3.5/spark-extensions/src/jmh/java/org/apache/iceberg/spark/PlanningBenchmark.java @@ -215,7 +215,7 @@ public void localPlanningViaDistributedScanWithoutFilterWithStats(Blackhole blac private void setupSpark() { this.spark = SparkSession.builder() - .config("spark.ui.enabled", false) + .config(TestBase.DISABLE_UI) .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer") .config("spark.driver.maxResultSize", "8G") .config("spark.sql.extensions", IcebergSparkSessionExtensions.class.getName()) diff --git a/spark/v3.5/spark-extensions/src/jmh/java/org/apache/iceberg/spark/TaskGroupPlanningBenchmark.java b/spark/v3.5/spark-extensions/src/jmh/java/org/apache/iceberg/spark/TaskGroupPlanningBenchmark.java index ad78205ce98c..a77c130ee17a 100644 --- a/spark/v3.5/spark-extensions/src/jmh/java/org/apache/iceberg/spark/TaskGroupPlanningBenchmark.java +++ b/spark/v3.5/spark-extensions/src/jmh/java/org/apache/iceberg/spark/TaskGroupPlanningBenchmark.java @@ -199,7 +199,7 @@ private void initDataAndDeletes() { private void setupSpark() { this.spark = SparkSession.builder() - .config("spark.ui.enabled", false) + .config(TestBase.DISABLE_UI) .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer") .config("spark.sql.extensions", IcebergSparkSessionExtensions.class.getName()) .config("spark.sql.catalog.spark_catalog", SparkSessionCatalog.class.getName()) diff --git a/spark/v3.5/spark-extensions/src/jmh/java/org/apache/iceberg/spark/UpdateProjectionBenchmark.java b/spark/v3.5/spark-extensions/src/jmh/java/org/apache/iceberg/spark/UpdateProjectionBenchmark.java index d917eae5eb0f..caa23625fc44 100644 --- a/spark/v3.5/spark-extensions/src/jmh/java/org/apache/iceberg/spark/UpdateProjectionBenchmark.java +++ b/spark/v3.5/spark-extensions/src/jmh/java/org/apache/iceberg/spark/UpdateProjectionBenchmark.java @@ -138,7 +138,7 @@ private void runBenchmark(RowLevelOperationMode mode, double updatePercentage) { private void setupSpark() { this.spark = SparkSession.builder() - .config("spark.ui.enabled", false) + .config(TestBase.DISABLE_UI) .config("spark.sql.extensions", IcebergSparkSessionExtensions.class.getName()) .config("spark.sql.catalog.spark_catalog", SparkSessionCatalog.class.getName()) .config("spark.sql.catalog.spark_catalog.type", "hadoop") diff --git a/spark/v3.5/spark-extensions/src/test/java/org/apache/iceberg/spark/TestExtendedParser.java b/spark/v3.5/spark-extensions/src/test/java/org/apache/iceberg/spark/TestExtendedParser.java index bfcb5af235d3..ef4f0090292c 100644 --- a/spark/v3.5/spark-extensions/src/test/java/org/apache/iceberg/spark/TestExtendedParser.java +++ b/spark/v3.5/spark-extensions/src/test/java/org/apache/iceberg/spark/TestExtendedParser.java @@ -49,7 +49,12 @@ public class TestExtendedParser { @BeforeAll public static void before() { - spark = SparkSession.builder().master("local").appName("TestExtendedParser").getOrCreate(); + spark = + SparkSession.builder() + .master("local") + .appName("TestExtendedParser") + .config(TestBase.DISABLE_UI) + .getOrCreate(); } @AfterAll diff --git a/spark/v3.5/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/ExtensionsTestBase.java b/spark/v3.5/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/ExtensionsTestBase.java index 4c1a5095916c..834640e24328 100644 --- a/spark/v3.5/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/ExtensionsTestBase.java +++ b/spark/v3.5/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/ExtensionsTestBase.java @@ -59,6 +59,7 @@ public static void startMetastoreAndSpark() { .config("spark.sql.legacy.respectNullabilityInTextDatasetConversion", "true") .config( SQLConf.ADAPTIVE_EXECUTION_ENABLED().key(), String.valueOf(RANDOM.nextBoolean())) + .config(TestBase.DISABLE_UI) .enableHiveSupport() .getOrCreate(); diff --git a/spark/v3.5/spark/src/jmh/java/org/apache/iceberg/spark/action/DeleteOrphanFilesBenchmark.java b/spark/v3.5/spark/src/jmh/java/org/apache/iceberg/spark/action/DeleteOrphanFilesBenchmark.java index 64edb1002e99..47fe46558d7e 100644 --- a/spark/v3.5/spark/src/jmh/java/org/apache/iceberg/spark/action/DeleteOrphanFilesBenchmark.java +++ b/spark/v3.5/spark/src/jmh/java/org/apache/iceberg/spark/action/DeleteOrphanFilesBenchmark.java @@ -37,6 +37,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.spark.Spark3Util; import org.apache.iceberg.spark.SparkSessionCatalog; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.actions.SparkActions; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Encoders; @@ -179,6 +180,7 @@ private void setupSpark() { .config("spark.sql.catalog.spark_catalog", SparkSessionCatalog.class.getName()) .config("spark.sql.catalog.spark_catalog.type", "hadoop") .config("spark.sql.catalog.spark_catalog.warehouse", catalogWarehouse()) + .config(TestBase.DISABLE_UI) .master("local"); spark = builder.getOrCreate(); } diff --git a/spark/v3.5/spark/src/jmh/java/org/apache/iceberg/spark/action/IcebergSortCompactionBenchmark.java b/spark/v3.5/spark/src/jmh/java/org/apache/iceberg/spark/action/IcebergSortCompactionBenchmark.java index 4978961be641..683f6bb46d05 100644 --- a/spark/v3.5/spark/src/jmh/java/org/apache/iceberg/spark/action/IcebergSortCompactionBenchmark.java +++ b/spark/v3.5/spark/src/jmh/java/org/apache/iceberg/spark/action/IcebergSortCompactionBenchmark.java @@ -41,6 +41,7 @@ import org.apache.iceberg.spark.Spark3Util; import org.apache.iceberg.spark.SparkSchemaUtil; import org.apache.iceberg.spark.SparkSessionCatalog; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.actions.SparkActions; import org.apache.iceberg.types.Types; import org.apache.spark.sql.Dataset; @@ -394,6 +395,7 @@ protected void setupSpark() { "spark.sql.catalog.spark_catalog", "org.apache.iceberg.spark.SparkSessionCatalog") .config("spark.sql.catalog.spark_catalog.type", "hadoop") .config("spark.sql.catalog.spark_catalog.warehouse", getCatalogWarehouse()) + .config(TestBase.DISABLE_UI) .master("local[*]"); spark = builder.getOrCreate(); Configuration sparkHadoopConf = spark.sessionState().newHadoopConf(); diff --git a/spark/v3.5/spark/src/jmh/java/org/apache/iceberg/spark/source/DVReaderBenchmark.java b/spark/v3.5/spark/src/jmh/java/org/apache/iceberg/spark/source/DVReaderBenchmark.java index c6794e43c636..3f242ce228ca 100644 --- a/spark/v3.5/spark/src/jmh/java/org/apache/iceberg/spark/source/DVReaderBenchmark.java +++ b/spark/v3.5/spark/src/jmh/java/org/apache/iceberg/spark/source/DVReaderBenchmark.java @@ -49,6 +49,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Sets; import org.apache.iceberg.spark.Spark3Util; import org.apache.iceberg.spark.SparkSessionCatalog; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.util.ContentFileUtil; import org.apache.spark.sql.SparkSession; import org.apache.spark.sql.catalyst.InternalRow; @@ -234,7 +235,7 @@ private String generateDataFilePath() { private void setupSpark() { this.spark = SparkSession.builder() - .config("spark.ui.enabled", false) + .config(TestBase.DISABLE_UI) .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer") .config("spark.sql.catalog.spark_catalog", SparkSessionCatalog.class.getName()) .config("spark.sql.catalog.spark_catalog.type", "hadoop") diff --git a/spark/v3.5/spark/src/jmh/java/org/apache/iceberg/spark/source/DVWriterBenchmark.java b/spark/v3.5/spark/src/jmh/java/org/apache/iceberg/spark/source/DVWriterBenchmark.java index ac74fb5a109c..db5789724056 100644 --- a/spark/v3.5/spark/src/jmh/java/org/apache/iceberg/spark/source/DVWriterBenchmark.java +++ b/spark/v3.5/spark/src/jmh/java/org/apache/iceberg/spark/source/DVWriterBenchmark.java @@ -43,6 +43,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Sets; import org.apache.iceberg.spark.Spark3Util; import org.apache.iceberg.spark.SparkSessionCatalog; +import org.apache.iceberg.spark.TestBase; import org.apache.spark.sql.SparkSession; import org.apache.spark.sql.catalyst.InternalRow; import org.apache.spark.sql.catalyst.analysis.NoSuchTableException; @@ -218,7 +219,7 @@ private String generateDataFilePath() { private void setupSpark() { this.spark = SparkSession.builder() - .config("spark.ui.enabled", false) + .config(TestBase.DISABLE_UI) .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer") .config("spark.sql.catalog.spark_catalog", SparkSessionCatalog.class.getName()) .config("spark.sql.catalog.spark_catalog.type", "hadoop") diff --git a/spark/v3.5/spark/src/jmh/java/org/apache/iceberg/spark/source/IcebergSourceBenchmark.java b/spark/v3.5/spark/src/jmh/java/org/apache/iceberg/spark/source/IcebergSourceBenchmark.java index 68c537e34a4a..debe37866ff7 100644 --- a/spark/v3.5/spark/src/jmh/java/org/apache/iceberg/spark/source/IcebergSourceBenchmark.java +++ b/spark/v3.5/spark/src/jmh/java/org/apache/iceberg/spark/source/IcebergSourceBenchmark.java @@ -30,6 +30,7 @@ import org.apache.iceberg.UpdateProperties; import org.apache.iceberg.relocated.com.google.common.collect.Maps; import org.apache.iceberg.spark.SparkSchemaUtil; +import org.apache.iceberg.spark.TestBase; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Row; import org.apache.spark.sql.SaveMode; @@ -94,7 +95,7 @@ protected void cleanupFiles() throws IOException { } protected void setupSpark(boolean enableDictionaryEncoding) { - SparkSession.Builder builder = SparkSession.builder().config("spark.ui.enabled", false); + SparkSession.Builder builder = SparkSession.builder().config(TestBase.DISABLE_UI); if (!enableDictionaryEncoding) { builder .config("parquet.dictionary.page.size", "1") diff --git a/spark/v3.5/spark/src/test/java/org/apache/iceberg/SparkDistributedDataScanTestBase.java b/spark/v3.5/spark/src/test/java/org/apache/iceberg/SparkDistributedDataScanTestBase.java index 404ba7284606..9b08d6f7ab1e 100644 --- a/spark/v3.5/spark/src/test/java/org/apache/iceberg/SparkDistributedDataScanTestBase.java +++ b/spark/v3.5/spark/src/test/java/org/apache/iceberg/SparkDistributedDataScanTestBase.java @@ -25,6 +25,7 @@ import java.util.List; import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; import org.apache.iceberg.spark.SparkReadConf; +import org.apache.iceberg.spark.TestBase; import org.apache.spark.sql.SparkSession; import org.apache.spark.sql.internal.SQLConf; import org.junit.jupiter.api.BeforeEach; @@ -90,6 +91,7 @@ protected static SparkSession initSpark(String serializer) { .master("local[2]") .config("spark.serializer", serializer) .config(SQLConf.SHUFFLE_PARTITIONS().key(), "4") + .config(TestBase.DISABLE_UI) .getOrCreate(); } } diff --git a/spark/v3.5/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanDeletes.java b/spark/v3.5/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanDeletes.java index 659507e4c5e3..e28603c0b43a 100644 --- a/spark/v3.5/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanDeletes.java +++ b/spark/v3.5/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanDeletes.java @@ -25,6 +25,7 @@ import java.util.List; import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; import org.apache.iceberg.spark.SparkReadConf; +import org.apache.iceberg.spark.TestBase; import org.apache.spark.sql.SparkSession; import org.apache.spark.sql.internal.SQLConf; import org.junit.jupiter.api.AfterAll; @@ -73,6 +74,7 @@ public static void startSpark() { .master("local[2]") .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer") .config(SQLConf.SHUFFLE_PARTITIONS().key(), "4") + .config(TestBase.DISABLE_UI) .getOrCreate(); } diff --git a/spark/v3.5/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanFilterFiles.java b/spark/v3.5/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanFilterFiles.java index a218f965ea65..2967f0e22cec 100644 --- a/spark/v3.5/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanFilterFiles.java +++ b/spark/v3.5/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanFilterFiles.java @@ -23,6 +23,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; import org.apache.iceberg.spark.SparkReadConf; +import org.apache.iceberg.spark.TestBase; import org.apache.spark.sql.SparkSession; import org.apache.spark.sql.internal.SQLConf; import org.junit.jupiter.api.AfterAll; @@ -61,6 +62,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer") + .config(TestBase.DISABLE_UI) .config(SQLConf.SHUFFLE_PARTITIONS().key(), "4") .getOrCreate(); } diff --git a/spark/v3.5/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanReporting.java b/spark/v3.5/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanReporting.java index 2665d7ba8d3b..4f789d2c5ae9 100644 --- a/spark/v3.5/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanReporting.java +++ b/spark/v3.5/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanReporting.java @@ -25,6 +25,7 @@ import java.util.List; import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; import org.apache.iceberg.spark.SparkReadConf; +import org.apache.iceberg.spark.TestBase; import org.apache.spark.sql.SparkSession; import org.apache.spark.sql.internal.SQLConf; import org.junit.jupiter.api.AfterAll; @@ -63,6 +64,7 @@ public static void startSpark() { .master("local[2]") .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer") .config(SQLConf.SHUFFLE_PARTITIONS().key(), "4") + .config(TestBase.DISABLE_UI) .getOrCreate(); } diff --git a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/DummyMetricsServlet.java b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/DummyMetricsServlet.java new file mode 100644 index 000000000000..ee1f29e56fb3 --- /dev/null +++ b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/DummyMetricsServlet.java @@ -0,0 +1,62 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.iceberg.spark; + +import com.codahale.metrics.MetricRegistry; +import java.util.Properties; +import org.apache.spark.SparkConf; +import org.apache.spark.metrics.sink.MetricsServlet; +import org.sparkproject.jetty.servlet.ServletContextHandler; + +/** + * A dummy implementation of {@link MetricsServlet} that does not start a server or report metrics. + * This is used in tests to avoid conflicts with Spark's jetty dependencies. + */ +public class DummyMetricsServlet extends MetricsServlet { + + /** + * Constructor required by Spark's reflection-based instantiation. + * + * @param properties Metrics properties + * @param registry Metric registry + */ + public DummyMetricsServlet(Properties properties, MetricRegistry registry) { + super(properties, registry); + } + + @Override + public ServletContextHandler[] getHandlers(SparkConf conf) { + return new ServletContextHandler[] {}; + } + + @Override + public void start() { + // No-op for tests + } + + @Override + public void stop() { + // No-op for tests + } + + @Override + public void report() { + // No-op for tests + } +} diff --git a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/TestBase.java b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/TestBase.java index daf4e29ac075..5e7e1a1f6193 100644 --- a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/TestBase.java +++ b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/TestBase.java @@ -65,6 +65,13 @@ public abstract class TestBase extends SparkTestHelperBase { protected static SparkSession spark = null; protected static JavaSparkContext sparkContext = null; protected static HiveCatalog catalog = null; + // disable Spark UI and use dummy servlet to avoid dependency conflicts with Spark's Jetty version + public static final Map DISABLE_UI = + ImmutableMap.of( + "spark.ui.enabled", + "false", + "spark.metrics.conf.*.sink.servlet.class", + "org.apache.iceberg.spark.DummyMetricsServlet"); @BeforeAll public static void startMetastoreAndSpark() { @@ -79,6 +86,7 @@ public static void startMetastoreAndSpark() { .config(SQLConf.PARTITION_OVERWRITE_MODE().key(), "dynamic") .config("spark.hadoop." + METASTOREURIS.varname, hiveConf.get(METASTOREURIS.varname)) .config("spark.sql.legacy.respectNullabilityInTextDatasetConversion", "true") + .config(DISABLE_UI) .enableHiveSupport() .getOrCreate(); diff --git a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/data/vectorized/parquet/TestParquetDictionaryEncodedVectorizedReads.java b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/data/vectorized/parquet/TestParquetDictionaryEncodedVectorizedReads.java index 284fa0b0552f..b61ecfa2f442 100644 --- a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/data/vectorized/parquet/TestParquetDictionaryEncodedVectorizedReads.java +++ b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/data/vectorized/parquet/TestParquetDictionaryEncodedVectorizedReads.java @@ -43,6 +43,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.ImmutableList; import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; import org.apache.iceberg.relocated.com.google.common.collect.Iterables; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.TestHelpers; import org.apache.iceberg.spark.data.vectorized.VectorizedSparkParquetReaders; import org.apache.iceberg.types.Types; @@ -65,6 +66,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI) .getOrCreate(); } diff --git a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/ScanTestBase.java b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/ScanTestBase.java index 1c5905744a75..39ea25ae6f54 100644 --- a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/ScanTestBase.java +++ b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/ScanTestBase.java @@ -37,6 +37,7 @@ import org.apache.iceberg.TableProperties; import org.apache.iceberg.hadoop.HadoopTables; import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.AvroDataTestBase; import org.apache.iceberg.spark.data.RandomData; import org.apache.iceberg.spark.data.TestHelpers; @@ -62,6 +63,7 @@ public static void startSpark() { SparkSession.builder() .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) .master("local[2]") + .config(TestBase.DISABLE_UI) .getOrCreate(); ScanTestBase.sc = JavaSparkContext.fromSparkContext(spark.sparkContext()); } diff --git a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestFilteredScan.java b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestFilteredScan.java index 61d6501a6847..26c2b6ab70cb 100644 --- a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestFilteredScan.java +++ b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestFilteredScan.java @@ -54,6 +54,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.spark.SparkReadOptions; import org.apache.iceberg.spark.SparkWriteOptions; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.GenericsHelpers; import org.apache.iceberg.types.Types; import org.apache.spark.sql.Dataset; @@ -116,6 +117,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI) .getOrCreate(); } diff --git a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestForwardCompatibility.java b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestForwardCompatibility.java index 153564f7d129..0ba3f0d35fd7 100644 --- a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestForwardCompatibility.java +++ b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestForwardCompatibility.java @@ -46,6 +46,7 @@ import org.apache.iceberg.io.OutputFile; import org.apache.iceberg.parquet.Parquet; import org.apache.iceberg.relocated.com.google.common.collect.Lists; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.RandomData; import org.apache.iceberg.spark.data.TestHelpers; import org.apache.iceberg.types.Types; @@ -98,6 +99,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI) .getOrCreate(); } diff --git a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestIcebergSpark.java b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestIcebergSpark.java index f4f57157e479..a637b975fe2b 100644 --- a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestIcebergSpark.java +++ b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestIcebergSpark.java @@ -28,6 +28,7 @@ import java.sql.Timestamp; import java.util.List; import org.apache.iceberg.spark.IcebergSpark; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.transforms.Transforms; import org.apache.iceberg.types.Types; import org.apache.spark.sql.Row; @@ -51,6 +52,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI) .getOrCreate(); } diff --git a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionPruning.java b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionPruning.java index e1402396fa7f..b72fcf82730c 100644 --- a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionPruning.java +++ b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionPruning.java @@ -59,6 +59,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Sets; import org.apache.iceberg.spark.SparkReadOptions; import org.apache.iceberg.spark.SparkSchemaUtil; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.transforms.Transforms; import org.apache.iceberg.types.Types; import org.apache.spark.api.java.JavaRDD; @@ -118,6 +119,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI) .getOrCreate(); TestPartitionPruning.sparkContext = JavaSparkContext.fromSparkContext(spark.sparkContext()); diff --git a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionValues.java b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionValues.java index 0b6ab2052b66..9b5b22a73f36 100644 --- a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionValues.java +++ b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionValues.java @@ -46,6 +46,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.spark.SparkReadOptions; import org.apache.iceberg.spark.SparkWriteOptions; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.RandomData; import org.apache.iceberg.spark.data.TestHelpers; import org.apache.iceberg.types.Types; @@ -112,6 +113,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI) .getOrCreate(); } diff --git a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestSnapshotSelection.java b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestSnapshotSelection.java index 11865db7fce5..fe754f4a02ba 100644 --- a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestSnapshotSelection.java +++ b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestSnapshotSelection.java @@ -42,6 +42,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.spark.SparkReadOptions; import org.apache.iceberg.spark.SparkSchemaUtil; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.types.Types; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Encoders; @@ -91,6 +92,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI) .getOrCreate(); } diff --git a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataFile.java b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataFile.java index 3051e27d7202..a5581978feed 100644 --- a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataFile.java +++ b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataFile.java @@ -59,6 +59,7 @@ import org.apache.iceberg.spark.SparkDataFile; import org.apache.iceberg.spark.SparkDeleteFile; import org.apache.iceberg.spark.SparkSchemaUtil; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.RandomData; import org.apache.iceberg.types.Conversions; import org.apache.iceberg.types.Types; @@ -125,6 +126,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI) .getOrCreate(); TestSparkDataFile.sparkContext = JavaSparkContext.fromSparkContext(spark.sparkContext()); } diff --git a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataWrite.java b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataWrite.java index 94547c2cf8fb..3d27487f137d 100644 --- a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataWrite.java +++ b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataWrite.java @@ -52,6 +52,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.spark.SparkWriteOptions; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.types.Types; import org.apache.iceberg.util.SnapshotUtil; import org.apache.spark.sql.Dataset; @@ -100,6 +101,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI) .getOrCreate(); } diff --git a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReadProjection.java b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReadProjection.java index 8ccea303d0c1..de6a5e59029c 100644 --- a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReadProjection.java +++ b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReadProjection.java @@ -50,6 +50,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Maps; import org.apache.iceberg.spark.SparkReadOptions; import org.apache.iceberg.spark.SparkValueConverter; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.types.Type; import org.apache.iceberg.types.TypeUtil; import org.apache.iceberg.types.Types; @@ -88,6 +89,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI) .getOrCreate(); ImmutableMap config = ImmutableMap.of( diff --git a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderDeletes.java b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderDeletes.java index 076c476ac8ec..7a66bd86a9cd 100644 --- a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderDeletes.java +++ b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderDeletes.java @@ -76,6 +76,7 @@ import org.apache.iceberg.spark.ParquetReaderType; import org.apache.iceberg.spark.SparkSchemaUtil; import org.apache.iceberg.spark.SparkStructLike; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.RandomData; import org.apache.iceberg.spark.data.SparkParquetWriters; import org.apache.iceberg.spark.source.metrics.NumDeletes; @@ -138,6 +139,7 @@ public static void startMetastoreAndSpark() { .config("spark.ui.liveUpdate.period", 0) .config(SQLConf.PARTITION_OVERWRITE_MODE().key(), "dynamic") .config("spark.hadoop." + METASTOREURIS.varname, hiveConf.get(METASTOREURIS.varname)) + .config(TestBase.DISABLE_UI) .enableHiveSupport() .getOrCreate(); diff --git a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderWithBloomFilter.java b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderWithBloomFilter.java index d22ecb02d483..cb2f866fab10 100644 --- a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderWithBloomFilter.java +++ b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderWithBloomFilter.java @@ -64,6 +64,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.relocated.com.google.common.collect.Maps; import org.apache.iceberg.spark.SparkValueConverter; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.types.Types; import org.apache.iceberg.util.PropertyUtil; import org.apache.spark.sql.Dataset; @@ -182,6 +183,7 @@ public static void startMetastoreAndSpark() { SparkSession.builder() .master("local[2]") .config("spark.hadoop." + METASTOREURIS.varname, hiveConf.get(METASTOREURIS.varname)) + .config(TestBase.DISABLE_UI) .enableHiveSupport() .getOrCreate(); diff --git a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestStructuredStreaming.java b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestStructuredStreaming.java index 54048bbf218a..a6900399acad 100644 --- a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestStructuredStreaming.java +++ b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestStructuredStreaming.java @@ -34,6 +34,7 @@ import org.apache.iceberg.Table; import org.apache.iceberg.hadoop.HadoopTables; import org.apache.iceberg.relocated.com.google.common.collect.Lists; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.types.Types; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Encoder; @@ -69,6 +70,7 @@ public static void startSpark() { .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) .config("spark.sql.shuffle.partitions", 4) + .config(TestBase.DISABLE_UI) .getOrCreate(); } diff --git a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestWriteMetricsConfig.java b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestWriteMetricsConfig.java index c3fac70dd3fc..45ff9184566b 100644 --- a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestWriteMetricsConfig.java +++ b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestWriteMetricsConfig.java @@ -41,6 +41,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.relocated.com.google.common.collect.Maps; import org.apache.iceberg.spark.SparkWriteOptions; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.RandomData; import org.apache.iceberg.types.Types; import org.apache.iceberg.util.ByteBuffers; @@ -84,6 +85,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI) .getOrCreate(); TestWriteMetricsConfig.sc = JavaSparkContext.fromSparkContext(spark.sparkContext()); } diff --git a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/sql/TestAggregatePushDown.java b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/sql/TestAggregatePushDown.java index 5ce56b4feca7..946456fe2be8 100644 --- a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/sql/TestAggregatePushDown.java +++ b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/sql/TestAggregatePushDown.java @@ -63,6 +63,7 @@ public static void startMetastoreAndSpark() { SparkSession.builder() .master("local[2]") .config("spark.sql.iceberg.aggregate_pushdown", "true") + .config(TestBase.DISABLE_UI) .enableHiveSupport() .getOrCreate(); diff --git a/spark/v4.0/build.gradle b/spark/v4.0/build.gradle index fb9df4e0e0f2..a071c825f74c 100644 --- a/spark/v4.0/build.gradle +++ b/spark/v4.0/build.gradle @@ -116,14 +116,9 @@ project(":iceberg-spark:iceberg-spark-${sparkMajorVersion}_${scalaVersion}") { testImplementation project(path: ':iceberg-api', configuration: 'testArtifacts') testImplementation project(path: ':iceberg-core', configuration: 'testArtifacts') testImplementation project(path: ':iceberg-data', configuration: 'testArtifacts') - testImplementation (project(path: ':iceberg-open-api', configuration: 'testFixturesRuntimeElements')) { - transitive = false - } - testImplementation libs.sqlite.jdbc + testImplementation (project(path: ':iceberg-open-api', configuration: 'testFixturesRuntimeElements')) testImplementation libs.awaitility testImplementation(testFixtures(project(':iceberg-parquet'))) - // runtime dependencies for running REST Catalog based integration test - testRuntimeOnly libs.jetty.servlet } test { @@ -184,13 +179,7 @@ project(":iceberg-spark:iceberg-spark-extensions-${sparkMajorVersion}_${scalaVer testImplementation project(path: ':iceberg-data', configuration: 'testArtifacts') testImplementation project(path: ':iceberg-hive-metastore', configuration: 'testArtifacts') testImplementation project(path: ":iceberg-spark:iceberg-spark-${sparkMajorVersion}_${scalaVersion}", configuration: 'testArtifacts') - testImplementation (project(path: ':iceberg-open-api', configuration: 'testFixturesRuntimeElements')) { - transitive = false - } - // runtime dependencies for running REST Catalog based integration test - testRuntimeOnly libs.jetty.servlet - testRuntimeOnly libs.sqlite.jdbc - + testImplementation (project(path: ':iceberg-open-api', configuration: 'testFixturesRuntimeElements')) testImplementation libs.avro.avro testImplementation libs.parquet.hadoop testImplementation libs.awaitility @@ -283,11 +272,7 @@ project(":iceberg-spark:iceberg-spark-runtime-${sparkMajorVersion}_${scalaVersio integrationRuntimeOnly project(':iceberg-hive-metastore') // runtime dependencies for running REST Catalog based integration test integrationRuntimeOnly project(path: ':iceberg-core', configuration: 'testArtifacts') - integrationRuntimeOnly (project(path: ':iceberg-open-api', configuration: 'testFixturesRuntimeElements')) { - transitive = false - } - integrationRuntimeOnly libs.jetty.servlet - integrationRuntimeOnly libs.sqlite.jdbc + integrationRuntimeOnly (project(path: ':iceberg-open-api', configuration: 'testFixturesRuntimeElements')) // Not allowed on our classpath, only the runtime jar is allowed integrationCompileOnly project(":iceberg-spark:iceberg-spark-extensions-${sparkMajorVersion}_${scalaVersion}") diff --git a/spark/v4.0/spark-extensions/src/jmh/java/org/apache/iceberg/DeleteFileIndexBenchmark.java b/spark/v4.0/spark-extensions/src/jmh/java/org/apache/iceberg/DeleteFileIndexBenchmark.java index 9375ca3a4f46..5287ccd514ab 100644 --- a/spark/v4.0/spark-extensions/src/jmh/java/org/apache/iceberg/DeleteFileIndexBenchmark.java +++ b/spark/v4.0/spark-extensions/src/jmh/java/org/apache/iceberg/DeleteFileIndexBenchmark.java @@ -31,6 +31,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.spark.Spark3Util; import org.apache.iceberg.spark.SparkSessionCatalog; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions; import org.apache.iceberg.util.ThreadPools; import org.apache.spark.sql.SparkSession; @@ -205,7 +206,7 @@ private void initDataAndDVs() { private void setupSpark() { this.spark = SparkSession.builder() - .config("spark.ui.enabled", false) + .config(TestBase.DISABLE_UI) .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer") .config("spark.sql.extensions", IcebergSparkSessionExtensions.class.getName()) .config("spark.sql.catalog.spark_catalog", SparkSessionCatalog.class.getName()) diff --git a/spark/v4.0/spark-extensions/src/jmh/java/org/apache/iceberg/spark/MergeCardinalityCheckBenchmark.java b/spark/v4.0/spark-extensions/src/jmh/java/org/apache/iceberg/spark/MergeCardinalityCheckBenchmark.java index 963daa2c364c..ea31b98f1ac9 100644 --- a/spark/v4.0/spark-extensions/src/jmh/java/org/apache/iceberg/spark/MergeCardinalityCheckBenchmark.java +++ b/spark/v4.0/spark-extensions/src/jmh/java/org/apache/iceberg/spark/MergeCardinalityCheckBenchmark.java @@ -155,7 +155,7 @@ private void runBenchmark(RowLevelOperationMode mode, double updatePercentage) { private void setupSpark() { this.spark = SparkSession.builder() - .config("spark.ui.enabled", false) + .config(TestBase.DISABLE_UI) .config("spark.sql.extensions", IcebergSparkSessionExtensions.class.getName()) .config("spark.sql.catalog.spark_catalog", SparkSessionCatalog.class.getName()) .config("spark.sql.catalog.spark_catalog.type", "hadoop") diff --git a/spark/v4.0/spark-extensions/src/jmh/java/org/apache/iceberg/spark/PlanningBenchmark.java b/spark/v4.0/spark-extensions/src/jmh/java/org/apache/iceberg/spark/PlanningBenchmark.java index 34d9d70e6ccb..f9558240f8cb 100644 --- a/spark/v4.0/spark-extensions/src/jmh/java/org/apache/iceberg/spark/PlanningBenchmark.java +++ b/spark/v4.0/spark-extensions/src/jmh/java/org/apache/iceberg/spark/PlanningBenchmark.java @@ -215,7 +215,7 @@ public void localPlanningViaDistributedScanWithoutFilterWithStats(Blackhole blac private void setupSpark() { this.spark = SparkSession.builder() - .config("spark.ui.enabled", false) + .config(TestBase.DISABLE_UI) .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer") .config("spark.driver.maxResultSize", "8G") .config("spark.sql.extensions", IcebergSparkSessionExtensions.class.getName()) diff --git a/spark/v4.0/spark-extensions/src/jmh/java/org/apache/iceberg/spark/TaskGroupPlanningBenchmark.java b/spark/v4.0/spark-extensions/src/jmh/java/org/apache/iceberg/spark/TaskGroupPlanningBenchmark.java index 7c2def237874..e9c563b9b0ef 100644 --- a/spark/v4.0/spark-extensions/src/jmh/java/org/apache/iceberg/spark/TaskGroupPlanningBenchmark.java +++ b/spark/v4.0/spark-extensions/src/jmh/java/org/apache/iceberg/spark/TaskGroupPlanningBenchmark.java @@ -199,7 +199,7 @@ private void initDataAndDeletes() { private void setupSpark() { this.spark = SparkSession.builder() - .config("spark.ui.enabled", false) + .config(TestBase.DISABLE_UI) .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer") .config("spark.sql.extensions", IcebergSparkSessionExtensions.class.getName()) .config("spark.sql.catalog.spark_catalog", SparkSessionCatalog.class.getName()) diff --git a/spark/v4.0/spark-extensions/src/jmh/java/org/apache/iceberg/spark/UpdateProjectionBenchmark.java b/spark/v4.0/spark-extensions/src/jmh/java/org/apache/iceberg/spark/UpdateProjectionBenchmark.java index d917eae5eb0f..caa23625fc44 100644 --- a/spark/v4.0/spark-extensions/src/jmh/java/org/apache/iceberg/spark/UpdateProjectionBenchmark.java +++ b/spark/v4.0/spark-extensions/src/jmh/java/org/apache/iceberg/spark/UpdateProjectionBenchmark.java @@ -138,7 +138,7 @@ private void runBenchmark(RowLevelOperationMode mode, double updatePercentage) { private void setupSpark() { this.spark = SparkSession.builder() - .config("spark.ui.enabled", false) + .config(TestBase.DISABLE_UI) .config("spark.sql.extensions", IcebergSparkSessionExtensions.class.getName()) .config("spark.sql.catalog.spark_catalog", SparkSessionCatalog.class.getName()) .config("spark.sql.catalog.spark_catalog.type", "hadoop") diff --git a/spark/v4.0/spark-extensions/src/test/java/org/apache/iceberg/spark/TestExtendedParser.java b/spark/v4.0/spark-extensions/src/test/java/org/apache/iceberg/spark/TestExtendedParser.java index bfcb5af235d3..ef4f0090292c 100644 --- a/spark/v4.0/spark-extensions/src/test/java/org/apache/iceberg/spark/TestExtendedParser.java +++ b/spark/v4.0/spark-extensions/src/test/java/org/apache/iceberg/spark/TestExtendedParser.java @@ -49,7 +49,12 @@ public class TestExtendedParser { @BeforeAll public static void before() { - spark = SparkSession.builder().master("local").appName("TestExtendedParser").getOrCreate(); + spark = + SparkSession.builder() + .master("local") + .appName("TestExtendedParser") + .config(TestBase.DISABLE_UI) + .getOrCreate(); } @AfterAll diff --git a/spark/v4.0/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/ExtensionsTestBase.java b/spark/v4.0/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/ExtensionsTestBase.java index 796c47b545cc..f23a5d9db3ad 100644 --- a/spark/v4.0/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/ExtensionsTestBase.java +++ b/spark/v4.0/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/ExtensionsTestBase.java @@ -59,6 +59,7 @@ public static void startMetastoreAndSpark() { .config("spark.sql.legacy.respectNullabilityInTextDatasetConversion", "true") .config( SQLConf.ADAPTIVE_EXECUTION_ENABLED().key(), String.valueOf(RANDOM.nextBoolean())) + .config(TestBase.DISABLE_UI) .enableHiveSupport() .getOrCreate(); diff --git a/spark/v4.0/spark/src/jmh/java/org/apache/iceberg/spark/action/DeleteOrphanFilesBenchmark.java b/spark/v4.0/spark/src/jmh/java/org/apache/iceberg/spark/action/DeleteOrphanFilesBenchmark.java index e1d9ac18dac1..ad4c0f3e67e4 100644 --- a/spark/v4.0/spark/src/jmh/java/org/apache/iceberg/spark/action/DeleteOrphanFilesBenchmark.java +++ b/spark/v4.0/spark/src/jmh/java/org/apache/iceberg/spark/action/DeleteOrphanFilesBenchmark.java @@ -37,6 +37,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.spark.Spark3Util; import org.apache.iceberg.spark.SparkSessionCatalog; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.actions.SparkActions; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Encoders; @@ -179,6 +180,7 @@ private void setupSpark() { .config("spark.sql.catalog.spark_catalog", SparkSessionCatalog.class.getName()) .config("spark.sql.catalog.spark_catalog.type", "hadoop") .config("spark.sql.catalog.spark_catalog.warehouse", catalogWarehouse()) + .config(TestBase.DISABLE_UI) .master("local"); spark = builder.getOrCreate(); } diff --git a/spark/v4.0/spark/src/jmh/java/org/apache/iceberg/spark/action/IcebergSortCompactionBenchmark.java b/spark/v4.0/spark/src/jmh/java/org/apache/iceberg/spark/action/IcebergSortCompactionBenchmark.java index 4978961be641..683f6bb46d05 100644 --- a/spark/v4.0/spark/src/jmh/java/org/apache/iceberg/spark/action/IcebergSortCompactionBenchmark.java +++ b/spark/v4.0/spark/src/jmh/java/org/apache/iceberg/spark/action/IcebergSortCompactionBenchmark.java @@ -41,6 +41,7 @@ import org.apache.iceberg.spark.Spark3Util; import org.apache.iceberg.spark.SparkSchemaUtil; import org.apache.iceberg.spark.SparkSessionCatalog; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.actions.SparkActions; import org.apache.iceberg.types.Types; import org.apache.spark.sql.Dataset; @@ -394,6 +395,7 @@ protected void setupSpark() { "spark.sql.catalog.spark_catalog", "org.apache.iceberg.spark.SparkSessionCatalog") .config("spark.sql.catalog.spark_catalog.type", "hadoop") .config("spark.sql.catalog.spark_catalog.warehouse", getCatalogWarehouse()) + .config(TestBase.DISABLE_UI) .master("local[*]"); spark = builder.getOrCreate(); Configuration sparkHadoopConf = spark.sessionState().newHadoopConf(); diff --git a/spark/v4.0/spark/src/jmh/java/org/apache/iceberg/spark/source/DVReaderBenchmark.java b/spark/v4.0/spark/src/jmh/java/org/apache/iceberg/spark/source/DVReaderBenchmark.java index c6794e43c636..3f242ce228ca 100644 --- a/spark/v4.0/spark/src/jmh/java/org/apache/iceberg/spark/source/DVReaderBenchmark.java +++ b/spark/v4.0/spark/src/jmh/java/org/apache/iceberg/spark/source/DVReaderBenchmark.java @@ -49,6 +49,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Sets; import org.apache.iceberg.spark.Spark3Util; import org.apache.iceberg.spark.SparkSessionCatalog; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.util.ContentFileUtil; import org.apache.spark.sql.SparkSession; import org.apache.spark.sql.catalyst.InternalRow; @@ -234,7 +235,7 @@ private String generateDataFilePath() { private void setupSpark() { this.spark = SparkSession.builder() - .config("spark.ui.enabled", false) + .config(TestBase.DISABLE_UI) .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer") .config("spark.sql.catalog.spark_catalog", SparkSessionCatalog.class.getName()) .config("spark.sql.catalog.spark_catalog.type", "hadoop") diff --git a/spark/v4.0/spark/src/jmh/java/org/apache/iceberg/spark/source/DVWriterBenchmark.java b/spark/v4.0/spark/src/jmh/java/org/apache/iceberg/spark/source/DVWriterBenchmark.java index ac74fb5a109c..db5789724056 100644 --- a/spark/v4.0/spark/src/jmh/java/org/apache/iceberg/spark/source/DVWriterBenchmark.java +++ b/spark/v4.0/spark/src/jmh/java/org/apache/iceberg/spark/source/DVWriterBenchmark.java @@ -43,6 +43,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Sets; import org.apache.iceberg.spark.Spark3Util; import org.apache.iceberg.spark.SparkSessionCatalog; +import org.apache.iceberg.spark.TestBase; import org.apache.spark.sql.SparkSession; import org.apache.spark.sql.catalyst.InternalRow; import org.apache.spark.sql.catalyst.analysis.NoSuchTableException; @@ -218,7 +219,7 @@ private String generateDataFilePath() { private void setupSpark() { this.spark = SparkSession.builder() - .config("spark.ui.enabled", false) + .config(TestBase.DISABLE_UI) .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer") .config("spark.sql.catalog.spark_catalog", SparkSessionCatalog.class.getName()) .config("spark.sql.catalog.spark_catalog.type", "hadoop") diff --git a/spark/v4.0/spark/src/jmh/java/org/apache/iceberg/spark/source/IcebergSourceBenchmark.java b/spark/v4.0/spark/src/jmh/java/org/apache/iceberg/spark/source/IcebergSourceBenchmark.java index 68c537e34a4a..debe37866ff7 100644 --- a/spark/v4.0/spark/src/jmh/java/org/apache/iceberg/spark/source/IcebergSourceBenchmark.java +++ b/spark/v4.0/spark/src/jmh/java/org/apache/iceberg/spark/source/IcebergSourceBenchmark.java @@ -30,6 +30,7 @@ import org.apache.iceberg.UpdateProperties; import org.apache.iceberg.relocated.com.google.common.collect.Maps; import org.apache.iceberg.spark.SparkSchemaUtil; +import org.apache.iceberg.spark.TestBase; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Row; import org.apache.spark.sql.SaveMode; @@ -94,7 +95,7 @@ protected void cleanupFiles() throws IOException { } protected void setupSpark(boolean enableDictionaryEncoding) { - SparkSession.Builder builder = SparkSession.builder().config("spark.ui.enabled", false); + SparkSession.Builder builder = SparkSession.builder().config(TestBase.DISABLE_UI); if (!enableDictionaryEncoding) { builder .config("parquet.dictionary.page.size", "1") diff --git a/spark/v4.0/spark/src/test/java/org/apache/iceberg/SparkDistributedDataScanTestBase.java b/spark/v4.0/spark/src/test/java/org/apache/iceberg/SparkDistributedDataScanTestBase.java index 404ba7284606..9b08d6f7ab1e 100644 --- a/spark/v4.0/spark/src/test/java/org/apache/iceberg/SparkDistributedDataScanTestBase.java +++ b/spark/v4.0/spark/src/test/java/org/apache/iceberg/SparkDistributedDataScanTestBase.java @@ -25,6 +25,7 @@ import java.util.List; import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; import org.apache.iceberg.spark.SparkReadConf; +import org.apache.iceberg.spark.TestBase; import org.apache.spark.sql.SparkSession; import org.apache.spark.sql.internal.SQLConf; import org.junit.jupiter.api.BeforeEach; @@ -90,6 +91,7 @@ protected static SparkSession initSpark(String serializer) { .master("local[2]") .config("spark.serializer", serializer) .config(SQLConf.SHUFFLE_PARTITIONS().key(), "4") + .config(TestBase.DISABLE_UI) .getOrCreate(); } } diff --git a/spark/v4.0/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanDeletes.java b/spark/v4.0/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanDeletes.java index 659507e4c5e3..e28603c0b43a 100644 --- a/spark/v4.0/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanDeletes.java +++ b/spark/v4.0/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanDeletes.java @@ -25,6 +25,7 @@ import java.util.List; import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; import org.apache.iceberg.spark.SparkReadConf; +import org.apache.iceberg.spark.TestBase; import org.apache.spark.sql.SparkSession; import org.apache.spark.sql.internal.SQLConf; import org.junit.jupiter.api.AfterAll; @@ -73,6 +74,7 @@ public static void startSpark() { .master("local[2]") .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer") .config(SQLConf.SHUFFLE_PARTITIONS().key(), "4") + .config(TestBase.DISABLE_UI) .getOrCreate(); } diff --git a/spark/v4.0/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanFilterFiles.java b/spark/v4.0/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanFilterFiles.java index a218f965ea65..eae640528f9e 100644 --- a/spark/v4.0/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanFilterFiles.java +++ b/spark/v4.0/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanFilterFiles.java @@ -23,6 +23,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; import org.apache.iceberg.spark.SparkReadConf; +import org.apache.iceberg.spark.TestBase; import org.apache.spark.sql.SparkSession; import org.apache.spark.sql.internal.SQLConf; import org.junit.jupiter.api.AfterAll; @@ -62,6 +63,7 @@ public static void startSpark() { .master("local[2]") .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer") .config(SQLConf.SHUFFLE_PARTITIONS().key(), "4") + .config(TestBase.DISABLE_UI) .getOrCreate(); } diff --git a/spark/v4.0/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanReporting.java b/spark/v4.0/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanReporting.java index 2665d7ba8d3b..4f789d2c5ae9 100644 --- a/spark/v4.0/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanReporting.java +++ b/spark/v4.0/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanReporting.java @@ -25,6 +25,7 @@ import java.util.List; import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; import org.apache.iceberg.spark.SparkReadConf; +import org.apache.iceberg.spark.TestBase; import org.apache.spark.sql.SparkSession; import org.apache.spark.sql.internal.SQLConf; import org.junit.jupiter.api.AfterAll; @@ -63,6 +64,7 @@ public static void startSpark() { .master("local[2]") .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer") .config(SQLConf.SHUFFLE_PARTITIONS().key(), "4") + .config(TestBase.DISABLE_UI) .getOrCreate(); } diff --git a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/DummyMetricsServlet.java b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/DummyMetricsServlet.java new file mode 100644 index 000000000000..ee1f29e56fb3 --- /dev/null +++ b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/DummyMetricsServlet.java @@ -0,0 +1,62 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.iceberg.spark; + +import com.codahale.metrics.MetricRegistry; +import java.util.Properties; +import org.apache.spark.SparkConf; +import org.apache.spark.metrics.sink.MetricsServlet; +import org.sparkproject.jetty.servlet.ServletContextHandler; + +/** + * A dummy implementation of {@link MetricsServlet} that does not start a server or report metrics. + * This is used in tests to avoid conflicts with Spark's jetty dependencies. + */ +public class DummyMetricsServlet extends MetricsServlet { + + /** + * Constructor required by Spark's reflection-based instantiation. + * + * @param properties Metrics properties + * @param registry Metric registry + */ + public DummyMetricsServlet(Properties properties, MetricRegistry registry) { + super(properties, registry); + } + + @Override + public ServletContextHandler[] getHandlers(SparkConf conf) { + return new ServletContextHandler[] {}; + } + + @Override + public void start() { + // No-op for tests + } + + @Override + public void stop() { + // No-op for tests + } + + @Override + public void report() { + // No-op for tests + } +} diff --git a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/TestBase.java b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/TestBase.java index daf4e29ac075..5e7e1a1f6193 100644 --- a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/TestBase.java +++ b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/TestBase.java @@ -65,6 +65,13 @@ public abstract class TestBase extends SparkTestHelperBase { protected static SparkSession spark = null; protected static JavaSparkContext sparkContext = null; protected static HiveCatalog catalog = null; + // disable Spark UI and use dummy servlet to avoid dependency conflicts with Spark's Jetty version + public static final Map DISABLE_UI = + ImmutableMap.of( + "spark.ui.enabled", + "false", + "spark.metrics.conf.*.sink.servlet.class", + "org.apache.iceberg.spark.DummyMetricsServlet"); @BeforeAll public static void startMetastoreAndSpark() { @@ -79,6 +86,7 @@ public static void startMetastoreAndSpark() { .config(SQLConf.PARTITION_OVERWRITE_MODE().key(), "dynamic") .config("spark.hadoop." + METASTOREURIS.varname, hiveConf.get(METASTOREURIS.varname)) .config("spark.sql.legacy.respectNullabilityInTextDatasetConversion", "true") + .config(DISABLE_UI) .enableHiveSupport() .getOrCreate(); diff --git a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/data/vectorized/parquet/TestParquetDictionaryEncodedVectorizedReads.java b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/data/vectorized/parquet/TestParquetDictionaryEncodedVectorizedReads.java index 284fa0b0552f..b61ecfa2f442 100644 --- a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/data/vectorized/parquet/TestParquetDictionaryEncodedVectorizedReads.java +++ b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/data/vectorized/parquet/TestParquetDictionaryEncodedVectorizedReads.java @@ -43,6 +43,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.ImmutableList; import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; import org.apache.iceberg.relocated.com.google.common.collect.Iterables; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.TestHelpers; import org.apache.iceberg.spark.data.vectorized.VectorizedSparkParquetReaders; import org.apache.iceberg.types.Types; @@ -65,6 +66,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI) .getOrCreate(); } diff --git a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/ScanTestBase.java b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/ScanTestBase.java index 33c842d94be1..da9cd639218f 100644 --- a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/ScanTestBase.java +++ b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/ScanTestBase.java @@ -38,6 +38,7 @@ import org.apache.iceberg.data.Record; import org.apache.iceberg.hadoop.HadoopTables; import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.AvroDataTestBase; import org.apache.iceberg.spark.data.GenericsHelpers; import org.apache.iceberg.types.TypeUtil; @@ -62,6 +63,7 @@ public static void startSpark() { SparkSession.builder() .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) .master("local[2]") + .config(TestBase.DISABLE_UI) .getOrCreate(); ScanTestBase.sc = JavaSparkContext.fromSparkContext(spark.sparkContext()); } diff --git a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestFilteredScan.java b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestFilteredScan.java index 61d6501a6847..26c2b6ab70cb 100644 --- a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestFilteredScan.java +++ b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestFilteredScan.java @@ -54,6 +54,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.spark.SparkReadOptions; import org.apache.iceberg.spark.SparkWriteOptions; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.GenericsHelpers; import org.apache.iceberg.types.Types; import org.apache.spark.sql.Dataset; @@ -116,6 +117,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI) .getOrCreate(); } diff --git a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestForwardCompatibility.java b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestForwardCompatibility.java index c4e0d26c1c31..d7d8756a43b4 100644 --- a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestForwardCompatibility.java +++ b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestForwardCompatibility.java @@ -46,6 +46,7 @@ import org.apache.iceberg.io.OutputFile; import org.apache.iceberg.parquet.Parquet; import org.apache.iceberg.relocated.com.google.common.collect.Lists; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.RandomData; import org.apache.iceberg.spark.data.TestHelpers; import org.apache.iceberg.types.Types; @@ -98,6 +99,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI) .getOrCreate(); } diff --git a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestIcebergSpark.java b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestIcebergSpark.java index f4f57157e479..a637b975fe2b 100644 --- a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestIcebergSpark.java +++ b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestIcebergSpark.java @@ -28,6 +28,7 @@ import java.sql.Timestamp; import java.util.List; import org.apache.iceberg.spark.IcebergSpark; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.transforms.Transforms; import org.apache.iceberg.types.Types; import org.apache.spark.sql.Row; @@ -51,6 +52,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI) .getOrCreate(); } diff --git a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionPruning.java b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionPruning.java index b31880e8e958..d24b84d31437 100644 --- a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionPruning.java +++ b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionPruning.java @@ -60,6 +60,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Sets; import org.apache.iceberg.spark.SparkReadOptions; import org.apache.iceberg.spark.SparkSchemaUtil; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.transforms.Transforms; import org.apache.iceberg.types.Types; import org.apache.spark.api.java.JavaRDD; @@ -119,6 +120,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI) .getOrCreate(); TestPartitionPruning.sparkContext = JavaSparkContext.fromSparkContext(spark.sparkContext()); diff --git a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionValues.java b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionValues.java index 0b6ab2052b66..9b5b22a73f36 100644 --- a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionValues.java +++ b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionValues.java @@ -46,6 +46,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.spark.SparkReadOptions; import org.apache.iceberg.spark.SparkWriteOptions; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.RandomData; import org.apache.iceberg.spark.data.TestHelpers; import org.apache.iceberg.types.Types; @@ -112,6 +113,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI) .getOrCreate(); } diff --git a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestSnapshotSelection.java b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestSnapshotSelection.java index 11865db7fce5..fe754f4a02ba 100644 --- a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestSnapshotSelection.java +++ b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestSnapshotSelection.java @@ -42,6 +42,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.spark.SparkReadOptions; import org.apache.iceberg.spark.SparkSchemaUtil; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.types.Types; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Encoders; @@ -91,6 +92,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI) .getOrCreate(); } diff --git a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataFile.java b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataFile.java index 35a837b6b88b..c6972cd9ec23 100644 --- a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataFile.java +++ b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataFile.java @@ -60,6 +60,7 @@ import org.apache.iceberg.spark.SparkDataFile; import org.apache.iceberg.spark.SparkDeleteFile; import org.apache.iceberg.spark.SparkSchemaUtil; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.RandomData; import org.apache.iceberg.types.Conversions; import org.apache.iceberg.types.Types; @@ -126,6 +127,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI) .getOrCreate(); TestSparkDataFile.sparkContext = JavaSparkContext.fromSparkContext(spark.sparkContext()); } diff --git a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataWrite.java b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataWrite.java index 94547c2cf8fb..3d27487f137d 100644 --- a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataWrite.java +++ b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataWrite.java @@ -52,6 +52,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.spark.SparkWriteOptions; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.types.Types; import org.apache.iceberg.util.SnapshotUtil; import org.apache.spark.sql.Dataset; @@ -100,6 +101,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI) .getOrCreate(); } diff --git a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReadProjection.java b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReadProjection.java index 8ccea303d0c1..de6a5e59029c 100644 --- a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReadProjection.java +++ b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReadProjection.java @@ -50,6 +50,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Maps; import org.apache.iceberg.spark.SparkReadOptions; import org.apache.iceberg.spark.SparkValueConverter; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.types.Type; import org.apache.iceberg.types.TypeUtil; import org.apache.iceberg.types.Types; @@ -88,6 +89,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI) .getOrCreate(); ImmutableMap config = ImmutableMap.of( diff --git a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderDeletes.java b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderDeletes.java index 076c476ac8ec..7a66bd86a9cd 100644 --- a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderDeletes.java +++ b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderDeletes.java @@ -76,6 +76,7 @@ import org.apache.iceberg.spark.ParquetReaderType; import org.apache.iceberg.spark.SparkSchemaUtil; import org.apache.iceberg.spark.SparkStructLike; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.RandomData; import org.apache.iceberg.spark.data.SparkParquetWriters; import org.apache.iceberg.spark.source.metrics.NumDeletes; @@ -138,6 +139,7 @@ public static void startMetastoreAndSpark() { .config("spark.ui.liveUpdate.period", 0) .config(SQLConf.PARTITION_OVERWRITE_MODE().key(), "dynamic") .config("spark.hadoop." + METASTOREURIS.varname, hiveConf.get(METASTOREURIS.varname)) + .config(TestBase.DISABLE_UI) .enableHiveSupport() .getOrCreate(); diff --git a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderWithBloomFilter.java b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderWithBloomFilter.java index d22ecb02d483..cb2f866fab10 100644 --- a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderWithBloomFilter.java +++ b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderWithBloomFilter.java @@ -64,6 +64,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.relocated.com.google.common.collect.Maps; import org.apache.iceberg.spark.SparkValueConverter; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.types.Types; import org.apache.iceberg.util.PropertyUtil; import org.apache.spark.sql.Dataset; @@ -182,6 +183,7 @@ public static void startMetastoreAndSpark() { SparkSession.builder() .master("local[2]") .config("spark.hadoop." + METASTOREURIS.varname, hiveConf.get(METASTOREURIS.varname)) + .config(TestBase.DISABLE_UI) .enableHiveSupport() .getOrCreate(); diff --git a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestStructuredStreaming.java b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestStructuredStreaming.java index 54048bbf218a..a6900399acad 100644 --- a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestStructuredStreaming.java +++ b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestStructuredStreaming.java @@ -34,6 +34,7 @@ import org.apache.iceberg.Table; import org.apache.iceberg.hadoop.HadoopTables; import org.apache.iceberg.relocated.com.google.common.collect.Lists; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.types.Types; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Encoder; @@ -69,6 +70,7 @@ public static void startSpark() { .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) .config("spark.sql.shuffle.partitions", 4) + .config(TestBase.DISABLE_UI) .getOrCreate(); } diff --git a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestWriteMetricsConfig.java b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestWriteMetricsConfig.java index e2b5d8920e9f..ab2479d61058 100644 --- a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestWriteMetricsConfig.java +++ b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestWriteMetricsConfig.java @@ -42,6 +42,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.relocated.com.google.common.collect.Maps; import org.apache.iceberg.spark.SparkWriteOptions; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.RandomData; import org.apache.iceberg.types.Types; import org.apache.iceberg.util.ByteBuffers; @@ -85,6 +86,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI) .getOrCreate(); TestWriteMetricsConfig.sc = JavaSparkContext.fromSparkContext(spark.sparkContext()); } diff --git a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/sql/TestAggregatePushDown.java b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/sql/TestAggregatePushDown.java index ce0a0f26a096..e1d2b19f890c 100644 --- a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/sql/TestAggregatePushDown.java +++ b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/sql/TestAggregatePushDown.java @@ -63,6 +63,7 @@ public static void startMetastoreAndSpark() { SparkSession.builder() .master("local[2]") .config("spark.sql.iceberg.aggregate_pushdown", "true") + .config(TestBase.DISABLE_UI) .enableHiveSupport() .getOrCreate(); diff --git a/spark/v4.1/build.gradle b/spark/v4.1/build.gradle index a342a0b59c92..86f6433b4971 100644 --- a/spark/v4.1/build.gradle +++ b/spark/v4.1/build.gradle @@ -117,14 +117,9 @@ project(":iceberg-spark:iceberg-spark-${sparkMajorVersion}_${scalaVersion}") { testImplementation project(path: ':iceberg-api', configuration: 'testArtifacts') testImplementation project(path: ':iceberg-core', configuration: 'testArtifacts') testImplementation project(path: ':iceberg-data', configuration: 'testArtifacts') - testImplementation (project(path: ':iceberg-open-api', configuration: 'testFixturesRuntimeElements')) { - transitive = false - } - testImplementation libs.sqlite.jdbc + testImplementation (project(path: ':iceberg-open-api', configuration: 'testFixturesRuntimeElements')) testImplementation libs.awaitility testImplementation(testFixtures(project(':iceberg-parquet'))) - // runtime dependencies for running REST Catalog based integration test - testRuntimeOnly libs.jetty.servlet } test { @@ -185,13 +180,7 @@ project(":iceberg-spark:iceberg-spark-extensions-${sparkMajorVersion}_${scalaVer testImplementation project(path: ':iceberg-data', configuration: 'testArtifacts') testImplementation project(path: ':iceberg-hive-metastore', configuration: 'testArtifacts') testImplementation project(path: ":iceberg-spark:iceberg-spark-${sparkMajorVersion}_${scalaVersion}", configuration: 'testArtifacts') - testImplementation (project(path: ':iceberg-open-api', configuration: 'testFixturesRuntimeElements')) { - transitive = false - } - // runtime dependencies for running REST Catalog based integration test - testRuntimeOnly libs.jetty.servlet - testRuntimeOnly libs.sqlite.jdbc - + testImplementation (project(path: ':iceberg-open-api', configuration: 'testFixturesRuntimeElements')) testImplementation libs.avro.avro testImplementation libs.parquet.hadoop testImplementation libs.awaitility @@ -285,11 +274,7 @@ project(":iceberg-spark:iceberg-spark-runtime-${sparkMajorVersion}_${scalaVersio integrationRuntimeOnly project(':iceberg-hive-metastore') // runtime dependencies for running REST Catalog based integration test integrationRuntimeOnly project(path: ':iceberg-core', configuration: 'testArtifacts') - integrationRuntimeOnly (project(path: ':iceberg-open-api', configuration: 'testFixturesRuntimeElements')) { - transitive = false - } - integrationRuntimeOnly libs.jetty.servlet - integrationRuntimeOnly libs.sqlite.jdbc + integrationRuntimeOnly (project(path: ':iceberg-open-api', configuration: 'testFixturesRuntimeElements')) // Not allowed on our classpath, only the runtime jar is allowed integrationCompileOnly project(":iceberg-spark:iceberg-spark-extensions-${sparkMajorVersion}_${scalaVersion}") diff --git a/spark/v4.1/spark-extensions/src/jmh/java/org/apache/iceberg/DeleteFileIndexBenchmark.java b/spark/v4.1/spark-extensions/src/jmh/java/org/apache/iceberg/DeleteFileIndexBenchmark.java index a8b226ea1e37..a468a1cc8717 100644 --- a/spark/v4.1/spark-extensions/src/jmh/java/org/apache/iceberg/DeleteFileIndexBenchmark.java +++ b/spark/v4.1/spark-extensions/src/jmh/java/org/apache/iceberg/DeleteFileIndexBenchmark.java @@ -31,6 +31,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.spark.Spark3Util; import org.apache.iceberg.spark.SparkSessionCatalog; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions; import org.apache.iceberg.util.ThreadPools; import org.apache.spark.sql.SparkSession; @@ -205,7 +206,7 @@ private void initDataAndDVs() { private void setupSpark() { this.spark = SparkSession.builder() - .config("spark.ui.enabled", false) + .config(TestBase.DISABLE_UI) .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer") .config("spark.sql.extensions", IcebergSparkSessionExtensions.class.getName()) .config("spark.sql.catalog.spark_catalog", SparkSessionCatalog.class.getName()) diff --git a/spark/v4.1/spark-extensions/src/jmh/java/org/apache/iceberg/spark/MergeCardinalityCheckBenchmark.java b/spark/v4.1/spark-extensions/src/jmh/java/org/apache/iceberg/spark/MergeCardinalityCheckBenchmark.java index eeea81634596..bc34bf33e35e 100644 --- a/spark/v4.1/spark-extensions/src/jmh/java/org/apache/iceberg/spark/MergeCardinalityCheckBenchmark.java +++ b/spark/v4.1/spark-extensions/src/jmh/java/org/apache/iceberg/spark/MergeCardinalityCheckBenchmark.java @@ -155,7 +155,7 @@ private void runBenchmark(RowLevelOperationMode mode, double updatePercentage) { private void setupSpark() { this.spark = SparkSession.builder() - .config("spark.ui.enabled", false) + .config(TestBase.DISABLE_UI) .config("spark.sql.extensions", IcebergSparkSessionExtensions.class.getName()) .config("spark.sql.catalog.spark_catalog", SparkSessionCatalog.class.getName()) .config("spark.sql.catalog.spark_catalog.type", "hadoop") diff --git a/spark/v4.1/spark-extensions/src/jmh/java/org/apache/iceberg/spark/PlanningBenchmark.java b/spark/v4.1/spark-extensions/src/jmh/java/org/apache/iceberg/spark/PlanningBenchmark.java index c50a3fd406d7..0df55de933cf 100644 --- a/spark/v4.1/spark-extensions/src/jmh/java/org/apache/iceberg/spark/PlanningBenchmark.java +++ b/spark/v4.1/spark-extensions/src/jmh/java/org/apache/iceberg/spark/PlanningBenchmark.java @@ -215,7 +215,7 @@ public void localPlanningViaDistributedScanWithoutFilterWithStats(Blackhole blac private void setupSpark() { this.spark = SparkSession.builder() - .config("spark.ui.enabled", false) + .config(TestBase.DISABLE_UI) .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer") .config("spark.driver.maxResultSize", "8G") .config("spark.sql.extensions", IcebergSparkSessionExtensions.class.getName()) diff --git a/spark/v4.1/spark-extensions/src/jmh/java/org/apache/iceberg/spark/TaskGroupPlanningBenchmark.java b/spark/v4.1/spark-extensions/src/jmh/java/org/apache/iceberg/spark/TaskGroupPlanningBenchmark.java index 8a8097834ef8..fd3eab4d9df6 100644 --- a/spark/v4.1/spark-extensions/src/jmh/java/org/apache/iceberg/spark/TaskGroupPlanningBenchmark.java +++ b/spark/v4.1/spark-extensions/src/jmh/java/org/apache/iceberg/spark/TaskGroupPlanningBenchmark.java @@ -198,7 +198,7 @@ private void initDataAndDeletes() { private void setupSpark() { this.spark = SparkSession.builder() - .config("spark.ui.enabled", false) + .config(TestBase.DISABLE_UI) .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer") .config("spark.sql.extensions", IcebergSparkSessionExtensions.class.getName()) .config("spark.sql.catalog.spark_catalog", SparkSessionCatalog.class.getName()) diff --git a/spark/v4.1/spark-extensions/src/jmh/java/org/apache/iceberg/spark/UpdateProjectionBenchmark.java b/spark/v4.1/spark-extensions/src/jmh/java/org/apache/iceberg/spark/UpdateProjectionBenchmark.java index d917eae5eb0f..caa23625fc44 100644 --- a/spark/v4.1/spark-extensions/src/jmh/java/org/apache/iceberg/spark/UpdateProjectionBenchmark.java +++ b/spark/v4.1/spark-extensions/src/jmh/java/org/apache/iceberg/spark/UpdateProjectionBenchmark.java @@ -138,7 +138,7 @@ private void runBenchmark(RowLevelOperationMode mode, double updatePercentage) { private void setupSpark() { this.spark = SparkSession.builder() - .config("spark.ui.enabled", false) + .config(TestBase.DISABLE_UI) .config("spark.sql.extensions", IcebergSparkSessionExtensions.class.getName()) .config("spark.sql.catalog.spark_catalog", SparkSessionCatalog.class.getName()) .config("spark.sql.catalog.spark_catalog.type", "hadoop") diff --git a/spark/v4.1/spark-extensions/src/test/java/org/apache/iceberg/spark/TestExtendedParser.java b/spark/v4.1/spark-extensions/src/test/java/org/apache/iceberg/spark/TestExtendedParser.java index bfcb5af235d3..ef4f0090292c 100644 --- a/spark/v4.1/spark-extensions/src/test/java/org/apache/iceberg/spark/TestExtendedParser.java +++ b/spark/v4.1/spark-extensions/src/test/java/org/apache/iceberg/spark/TestExtendedParser.java @@ -49,7 +49,12 @@ public class TestExtendedParser { @BeforeAll public static void before() { - spark = SparkSession.builder().master("local").appName("TestExtendedParser").getOrCreate(); + spark = + SparkSession.builder() + .master("local") + .appName("TestExtendedParser") + .config(TestBase.DISABLE_UI) + .getOrCreate(); } @AfterAll diff --git a/spark/v4.1/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/ExtensionsTestBase.java b/spark/v4.1/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/ExtensionsTestBase.java index 6de4e0d6461e..f766fbb79aff 100644 --- a/spark/v4.1/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/ExtensionsTestBase.java +++ b/spark/v4.1/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/ExtensionsTestBase.java @@ -71,6 +71,7 @@ public static void startMetastoreAndSpark() { .config("spark.sql.legacy.respectNullabilityInTextDatasetConversion", "true") .config( SQLConf.ADAPTIVE_EXECUTION_ENABLED().key(), String.valueOf(RANDOM.nextBoolean())) + .config(TestBase.DISABLE_UI) .enableHiveSupport() .getOrCreate(); diff --git a/spark/v4.1/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestPartitionedWritesToWapBranch.java b/spark/v4.1/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestPartitionedWritesToWapBranch.java index 1db18f3a857d..af065451ab69 100644 --- a/spark/v4.1/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestPartitionedWritesToWapBranch.java +++ b/spark/v4.1/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestPartitionedWritesToWapBranch.java @@ -69,6 +69,7 @@ public static void startMetastoreAndSpark() { .config("spark.sql.shuffle.partitions", "4") .config("spark.sql.hive.metastorePartitionPruningFallbackOnException", "true") .config("spark.sql.legacy.respectNullabilityInTextDatasetConversion", "true") + .config(TestBase.DISABLE_UI) .enableHiveSupport() .getOrCreate(); diff --git a/spark/v4.1/spark/src/jmh/java/org/apache/iceberg/spark/action/DeleteOrphanFilesBenchmark.java b/spark/v4.1/spark/src/jmh/java/org/apache/iceberg/spark/action/DeleteOrphanFilesBenchmark.java index 231bb7c619f4..3fd84553f033 100644 --- a/spark/v4.1/spark/src/jmh/java/org/apache/iceberg/spark/action/DeleteOrphanFilesBenchmark.java +++ b/spark/v4.1/spark/src/jmh/java/org/apache/iceberg/spark/action/DeleteOrphanFilesBenchmark.java @@ -37,6 +37,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.spark.Spark3Util; import org.apache.iceberg.spark.SparkSessionCatalog; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.actions.SparkActions; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Encoders; @@ -179,6 +180,7 @@ private void setupSpark() { .config("spark.sql.catalog.spark_catalog", SparkSessionCatalog.class.getName()) .config("spark.sql.catalog.spark_catalog.type", "hadoop") .config("spark.sql.catalog.spark_catalog.warehouse", catalogWarehouse()) + .config(TestBase.DISABLE_UI) .master("local"); spark = builder.getOrCreate(); } diff --git a/spark/v4.1/spark/src/jmh/java/org/apache/iceberg/spark/action/IcebergSortCompactionBenchmark.java b/spark/v4.1/spark/src/jmh/java/org/apache/iceberg/spark/action/IcebergSortCompactionBenchmark.java index 4978961be641..683f6bb46d05 100644 --- a/spark/v4.1/spark/src/jmh/java/org/apache/iceberg/spark/action/IcebergSortCompactionBenchmark.java +++ b/spark/v4.1/spark/src/jmh/java/org/apache/iceberg/spark/action/IcebergSortCompactionBenchmark.java @@ -41,6 +41,7 @@ import org.apache.iceberg.spark.Spark3Util; import org.apache.iceberg.spark.SparkSchemaUtil; import org.apache.iceberg.spark.SparkSessionCatalog; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.actions.SparkActions; import org.apache.iceberg.types.Types; import org.apache.spark.sql.Dataset; @@ -394,6 +395,7 @@ protected void setupSpark() { "spark.sql.catalog.spark_catalog", "org.apache.iceberg.spark.SparkSessionCatalog") .config("spark.sql.catalog.spark_catalog.type", "hadoop") .config("spark.sql.catalog.spark_catalog.warehouse", getCatalogWarehouse()) + .config(TestBase.DISABLE_UI) .master("local[*]"); spark = builder.getOrCreate(); Configuration sparkHadoopConf = spark.sessionState().newHadoopConf(); diff --git a/spark/v4.1/spark/src/jmh/java/org/apache/iceberg/spark/source/DVReaderBenchmark.java b/spark/v4.1/spark/src/jmh/java/org/apache/iceberg/spark/source/DVReaderBenchmark.java index c6794e43c636..3f242ce228ca 100644 --- a/spark/v4.1/spark/src/jmh/java/org/apache/iceberg/spark/source/DVReaderBenchmark.java +++ b/spark/v4.1/spark/src/jmh/java/org/apache/iceberg/spark/source/DVReaderBenchmark.java @@ -49,6 +49,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Sets; import org.apache.iceberg.spark.Spark3Util; import org.apache.iceberg.spark.SparkSessionCatalog; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.util.ContentFileUtil; import org.apache.spark.sql.SparkSession; import org.apache.spark.sql.catalyst.InternalRow; @@ -234,7 +235,7 @@ private String generateDataFilePath() { private void setupSpark() { this.spark = SparkSession.builder() - .config("spark.ui.enabled", false) + .config(TestBase.DISABLE_UI) .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer") .config("spark.sql.catalog.spark_catalog", SparkSessionCatalog.class.getName()) .config("spark.sql.catalog.spark_catalog.type", "hadoop") diff --git a/spark/v4.1/spark/src/jmh/java/org/apache/iceberg/spark/source/DVWriterBenchmark.java b/spark/v4.1/spark/src/jmh/java/org/apache/iceberg/spark/source/DVWriterBenchmark.java index ac74fb5a109c..db5789724056 100644 --- a/spark/v4.1/spark/src/jmh/java/org/apache/iceberg/spark/source/DVWriterBenchmark.java +++ b/spark/v4.1/spark/src/jmh/java/org/apache/iceberg/spark/source/DVWriterBenchmark.java @@ -43,6 +43,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Sets; import org.apache.iceberg.spark.Spark3Util; import org.apache.iceberg.spark.SparkSessionCatalog; +import org.apache.iceberg.spark.TestBase; import org.apache.spark.sql.SparkSession; import org.apache.spark.sql.catalyst.InternalRow; import org.apache.spark.sql.catalyst.analysis.NoSuchTableException; @@ -218,7 +219,7 @@ private String generateDataFilePath() { private void setupSpark() { this.spark = SparkSession.builder() - .config("spark.ui.enabled", false) + .config(TestBase.DISABLE_UI) .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer") .config("spark.sql.catalog.spark_catalog", SparkSessionCatalog.class.getName()) .config("spark.sql.catalog.spark_catalog.type", "hadoop") diff --git a/spark/v4.1/spark/src/jmh/java/org/apache/iceberg/spark/source/IcebergSourceBenchmark.java b/spark/v4.1/spark/src/jmh/java/org/apache/iceberg/spark/source/IcebergSourceBenchmark.java index 68c537e34a4a..debe37866ff7 100644 --- a/spark/v4.1/spark/src/jmh/java/org/apache/iceberg/spark/source/IcebergSourceBenchmark.java +++ b/spark/v4.1/spark/src/jmh/java/org/apache/iceberg/spark/source/IcebergSourceBenchmark.java @@ -30,6 +30,7 @@ import org.apache.iceberg.UpdateProperties; import org.apache.iceberg.relocated.com.google.common.collect.Maps; import org.apache.iceberg.spark.SparkSchemaUtil; +import org.apache.iceberg.spark.TestBase; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Row; import org.apache.spark.sql.SaveMode; @@ -94,7 +95,7 @@ protected void cleanupFiles() throws IOException { } protected void setupSpark(boolean enableDictionaryEncoding) { - SparkSession.Builder builder = SparkSession.builder().config("spark.ui.enabled", false); + SparkSession.Builder builder = SparkSession.builder().config(TestBase.DISABLE_UI); if (!enableDictionaryEncoding) { builder .config("parquet.dictionary.page.size", "1") diff --git a/spark/v4.1/spark/src/test/java/org/apache/iceberg/SparkDistributedDataScanTestBase.java b/spark/v4.1/spark/src/test/java/org/apache/iceberg/SparkDistributedDataScanTestBase.java index aa4f3dc72416..d1c724425c9f 100644 --- a/spark/v4.1/spark/src/test/java/org/apache/iceberg/SparkDistributedDataScanTestBase.java +++ b/spark/v4.1/spark/src/test/java/org/apache/iceberg/SparkDistributedDataScanTestBase.java @@ -24,6 +24,7 @@ import java.util.Arrays; import java.util.List; import org.apache.iceberg.spark.SparkReadConf; +import org.apache.iceberg.spark.TestBase; import org.apache.spark.sql.SparkSession; import org.apache.spark.sql.internal.SQLConf; import org.junit.jupiter.api.BeforeEach; @@ -89,6 +90,7 @@ protected static SparkSession initSpark(String serializer) { .master("local[2]") .config("spark.serializer", serializer) .config(SQLConf.SHUFFLE_PARTITIONS().key(), "4") + .config(TestBase.DISABLE_UI) .getOrCreate(); } } diff --git a/spark/v4.1/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanDeletes.java b/spark/v4.1/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanDeletes.java index 6ffaede5b069..a21c6a08ec3b 100644 --- a/spark/v4.1/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanDeletes.java +++ b/spark/v4.1/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanDeletes.java @@ -24,6 +24,7 @@ import java.util.Arrays; import java.util.List; import org.apache.iceberg.spark.SparkReadConf; +import org.apache.iceberg.spark.TestBase; import org.apache.spark.sql.SparkSession; import org.apache.spark.sql.internal.SQLConf; import org.junit.jupiter.api.AfterAll; @@ -72,6 +73,7 @@ public static void startSpark() { .master("local[2]") .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer") .config(SQLConf.SHUFFLE_PARTITIONS().key(), "4") + .config(TestBase.DISABLE_UI) .getOrCreate(); } diff --git a/spark/v4.1/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanFilterFiles.java b/spark/v4.1/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanFilterFiles.java index 1e680ace292f..5edf4828229a 100644 --- a/spark/v4.1/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanFilterFiles.java +++ b/spark/v4.1/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanFilterFiles.java @@ -22,6 +22,7 @@ import static org.apache.iceberg.PlanningMode.LOCAL; import org.apache.iceberg.spark.SparkReadConf; +import org.apache.iceberg.spark.TestBase; import org.apache.spark.sql.SparkSession; import org.apache.spark.sql.internal.SQLConf; import org.junit.jupiter.api.AfterAll; @@ -61,6 +62,7 @@ public static void startSpark() { .master("local[2]") .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer") .config(SQLConf.SHUFFLE_PARTITIONS().key(), "4") + .config(TestBase.DISABLE_UI) .getOrCreate(); } diff --git a/spark/v4.1/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanReporting.java b/spark/v4.1/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanReporting.java index 9b736004de57..e6f3c75475d8 100644 --- a/spark/v4.1/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanReporting.java +++ b/spark/v4.1/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanReporting.java @@ -24,6 +24,7 @@ import java.util.Arrays; import java.util.List; import org.apache.iceberg.spark.SparkReadConf; +import org.apache.iceberg.spark.TestBase; import org.apache.spark.sql.SparkSession; import org.apache.spark.sql.internal.SQLConf; import org.junit.jupiter.api.AfterAll; @@ -62,6 +63,7 @@ public static void startSpark() { .master("local[2]") .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer") .config(SQLConf.SHUFFLE_PARTITIONS().key(), "4") + .config(TestBase.DISABLE_UI) .getOrCreate(); } diff --git a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/DummyMetricsServlet.java b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/DummyMetricsServlet.java new file mode 100644 index 000000000000..ee1f29e56fb3 --- /dev/null +++ b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/DummyMetricsServlet.java @@ -0,0 +1,62 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.iceberg.spark; + +import com.codahale.metrics.MetricRegistry; +import java.util.Properties; +import org.apache.spark.SparkConf; +import org.apache.spark.metrics.sink.MetricsServlet; +import org.sparkproject.jetty.servlet.ServletContextHandler; + +/** + * A dummy implementation of {@link MetricsServlet} that does not start a server or report metrics. + * This is used in tests to avoid conflicts with Spark's jetty dependencies. + */ +public class DummyMetricsServlet extends MetricsServlet { + + /** + * Constructor required by Spark's reflection-based instantiation. + * + * @param properties Metrics properties + * @param registry Metric registry + */ + public DummyMetricsServlet(Properties properties, MetricRegistry registry) { + super(properties, registry); + } + + @Override + public ServletContextHandler[] getHandlers(SparkConf conf) { + return new ServletContextHandler[] {}; + } + + @Override + public void start() { + // No-op for tests + } + + @Override + public void stop() { + // No-op for tests + } + + @Override + public void report() { + // No-op for tests + } +} diff --git a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/TestBase.java b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/TestBase.java index daf4e29ac075..507d7b313b42 100644 --- a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/TestBase.java +++ b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/TestBase.java @@ -65,6 +65,13 @@ public abstract class TestBase extends SparkTestHelperBase { protected static SparkSession spark = null; protected static JavaSparkContext sparkContext = null; protected static HiveCatalog catalog = null; + // disable Spark UI and use dummy servlet to avoid dependency conflicts with Spark's Jetty version + public static final Map DISABLE_UI = + ImmutableMap.of( + "spark.ui.enabled", + "false", + "spark.metrics.conf.*.sink.servlet.class", + "org.apache.iceberg.spark.DummyMetricsServlet"); @BeforeAll public static void startMetastoreAndSpark() { @@ -79,6 +86,8 @@ public static void startMetastoreAndSpark() { .config(SQLConf.PARTITION_OVERWRITE_MODE().key(), "dynamic") .config("spark.hadoop." + METASTOREURIS.varname, hiveConf.get(METASTOREURIS.varname)) .config("spark.sql.legacy.respectNullabilityInTextDatasetConversion", "true") + .config("spark.ui.enabled", "false") + .config(DISABLE_UI) .enableHiveSupport() .getOrCreate(); diff --git a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/data/vectorized/parquet/TestParquetDictionaryEncodedVectorizedReads.java b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/data/vectorized/parquet/TestParquetDictionaryEncodedVectorizedReads.java index 284fa0b0552f..b61ecfa2f442 100644 --- a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/data/vectorized/parquet/TestParquetDictionaryEncodedVectorizedReads.java +++ b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/data/vectorized/parquet/TestParquetDictionaryEncodedVectorizedReads.java @@ -43,6 +43,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.ImmutableList; import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; import org.apache.iceberg.relocated.com.google.common.collect.Iterables; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.TestHelpers; import org.apache.iceberg.spark.data.vectorized.VectorizedSparkParquetReaders; import org.apache.iceberg.types.Types; @@ -65,6 +66,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI) .getOrCreate(); } diff --git a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/ScanTestBase.java b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/ScanTestBase.java index 33c842d94be1..da9cd639218f 100644 --- a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/ScanTestBase.java +++ b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/ScanTestBase.java @@ -38,6 +38,7 @@ import org.apache.iceberg.data.Record; import org.apache.iceberg.hadoop.HadoopTables; import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.AvroDataTestBase; import org.apache.iceberg.spark.data.GenericsHelpers; import org.apache.iceberg.types.TypeUtil; @@ -62,6 +63,7 @@ public static void startSpark() { SparkSession.builder() .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) .master("local[2]") + .config(TestBase.DISABLE_UI) .getOrCreate(); ScanTestBase.sc = JavaSparkContext.fromSparkContext(spark.sparkContext()); } diff --git a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestFilteredScan.java b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestFilteredScan.java index 16988d1e28e9..24fecf4eb2ca 100644 --- a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestFilteredScan.java +++ b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestFilteredScan.java @@ -54,6 +54,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.spark.SparkReadOptions; import org.apache.iceberg.spark.SparkWriteOptions; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.GenericsHelpers; import org.apache.iceberg.types.Types; import org.apache.spark.sql.Dataset; @@ -116,6 +117,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI) .getOrCreate(); } diff --git a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestForwardCompatibility.java b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestForwardCompatibility.java index 0eb546bc5661..d0103ff46ead 100644 --- a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestForwardCompatibility.java +++ b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestForwardCompatibility.java @@ -46,6 +46,7 @@ import org.apache.iceberg.io.OutputFile; import org.apache.iceberg.parquet.Parquet; import org.apache.iceberg.relocated.com.google.common.collect.Lists; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.RandomData; import org.apache.iceberg.spark.data.TestHelpers; import org.apache.iceberg.types.Types; @@ -98,6 +99,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI) .getOrCreate(); } diff --git a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestIcebergSpark.java b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestIcebergSpark.java index f4f57157e479..a637b975fe2b 100644 --- a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestIcebergSpark.java +++ b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestIcebergSpark.java @@ -28,6 +28,7 @@ import java.sql.Timestamp; import java.util.List; import org.apache.iceberg.spark.IcebergSpark; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.transforms.Transforms; import org.apache.iceberg.types.Types; import org.apache.spark.sql.Row; @@ -51,6 +52,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI) .getOrCreate(); } diff --git a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionPruning.java b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionPruning.java index b31880e8e958..d24b84d31437 100644 --- a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionPruning.java +++ b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionPruning.java @@ -60,6 +60,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Sets; import org.apache.iceberg.spark.SparkReadOptions; import org.apache.iceberg.spark.SparkSchemaUtil; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.transforms.Transforms; import org.apache.iceberg.types.Types; import org.apache.spark.api.java.JavaRDD; @@ -119,6 +120,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI) .getOrCreate(); TestPartitionPruning.sparkContext = JavaSparkContext.fromSparkContext(spark.sparkContext()); diff --git a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionValues.java b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionValues.java index 0b6ab2052b66..9b5b22a73f36 100644 --- a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionValues.java +++ b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionValues.java @@ -46,6 +46,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.spark.SparkReadOptions; import org.apache.iceberg.spark.SparkWriteOptions; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.RandomData; import org.apache.iceberg.spark.data.TestHelpers; import org.apache.iceberg.types.Types; @@ -112,6 +113,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI) .getOrCreate(); } diff --git a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestSnapshotSelection.java b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestSnapshotSelection.java index 416f57e5142a..3004e8fa5c5e 100644 --- a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestSnapshotSelection.java +++ b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestSnapshotSelection.java @@ -44,6 +44,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.spark.SparkReadOptions; import org.apache.iceberg.spark.SparkSchemaUtil; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.types.Types; import org.apache.spark.sql.AnalysisException; import org.apache.spark.sql.Dataset; @@ -97,6 +98,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI) .getOrCreate(); } diff --git a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataFile.java b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataFile.java index 35a837b6b88b..c6972cd9ec23 100644 --- a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataFile.java +++ b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataFile.java @@ -60,6 +60,7 @@ import org.apache.iceberg.spark.SparkDataFile; import org.apache.iceberg.spark.SparkDeleteFile; import org.apache.iceberg.spark.SparkSchemaUtil; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.RandomData; import org.apache.iceberg.types.Conversions; import org.apache.iceberg.types.Types; @@ -126,6 +127,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI) .getOrCreate(); TestSparkDataFile.sparkContext = JavaSparkContext.fromSparkContext(spark.sparkContext()); } diff --git a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataWrite.java b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataWrite.java index 465e6a5abf3d..e39a34849ea8 100644 --- a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataWrite.java +++ b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataWrite.java @@ -52,6 +52,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.spark.SparkWriteOptions; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.types.Types; import org.apache.iceberg.util.SnapshotUtil; import org.apache.spark.sql.Dataset; @@ -100,6 +101,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI) .getOrCreate(); } diff --git a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReadProjection.java b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReadProjection.java index 8ccea303d0c1..de6a5e59029c 100644 --- a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReadProjection.java +++ b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReadProjection.java @@ -50,6 +50,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Maps; import org.apache.iceberg.spark.SparkReadOptions; import org.apache.iceberg.spark.SparkValueConverter; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.types.Type; import org.apache.iceberg.types.TypeUtil; import org.apache.iceberg.types.Types; @@ -88,6 +89,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI) .getOrCreate(); ImmutableMap config = ImmutableMap.of( diff --git a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderDeletes.java b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderDeletes.java index 6b9e20fe2367..5719b887f88d 100644 --- a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderDeletes.java +++ b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderDeletes.java @@ -76,6 +76,7 @@ import org.apache.iceberg.spark.ParquetReaderType; import org.apache.iceberg.spark.SparkSchemaUtil; import org.apache.iceberg.spark.SparkStructLike; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.RandomData; import org.apache.iceberg.spark.data.SparkParquetWriters; import org.apache.iceberg.spark.source.metrics.NumDeletes; @@ -138,6 +139,7 @@ public static void startMetastoreAndSpark() { .config("spark.ui.liveUpdate.period", 0) .config(SQLConf.PARTITION_OVERWRITE_MODE().key(), "dynamic") .config("spark.hadoop." + METASTOREURIS.varname, hiveConf.get(METASTOREURIS.varname)) + .config(TestBase.DISABLE_UI) .enableHiveSupport() .getOrCreate(); diff --git a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderWithBloomFilter.java b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderWithBloomFilter.java index d22ecb02d483..cb2f866fab10 100644 --- a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderWithBloomFilter.java +++ b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderWithBloomFilter.java @@ -64,6 +64,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.relocated.com.google.common.collect.Maps; import org.apache.iceberg.spark.SparkValueConverter; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.types.Types; import org.apache.iceberg.util.PropertyUtil; import org.apache.spark.sql.Dataset; @@ -182,6 +183,7 @@ public static void startMetastoreAndSpark() { SparkSession.builder() .master("local[2]") .config("spark.hadoop." + METASTOREURIS.varname, hiveConf.get(METASTOREURIS.varname)) + .config(TestBase.DISABLE_UI) .enableHiveSupport() .getOrCreate(); diff --git a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestStructuredStreaming.java b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestStructuredStreaming.java index 635229f6a005..5b6df47a58a3 100644 --- a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestStructuredStreaming.java +++ b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestStructuredStreaming.java @@ -34,6 +34,7 @@ import org.apache.iceberg.Table; import org.apache.iceberg.hadoop.HadoopTables; import org.apache.iceberg.relocated.com.google.common.collect.Lists; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.types.Types; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Encoder; @@ -68,6 +69,7 @@ public static void startSpark() { .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) .config("spark.sql.shuffle.partitions", 4) + .config(TestBase.DISABLE_UI) .getOrCreate(); } diff --git a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestWriteMetricsConfig.java b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestWriteMetricsConfig.java index e2b5d8920e9f..ab2479d61058 100644 --- a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestWriteMetricsConfig.java +++ b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestWriteMetricsConfig.java @@ -42,6 +42,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.relocated.com.google.common.collect.Maps; import org.apache.iceberg.spark.SparkWriteOptions; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.RandomData; import org.apache.iceberg.types.Types; import org.apache.iceberg.util.ByteBuffers; @@ -85,6 +86,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI) .getOrCreate(); TestWriteMetricsConfig.sc = JavaSparkContext.fromSparkContext(spark.sparkContext()); } diff --git a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/sql/TestAggregatePushDown.java b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/sql/TestAggregatePushDown.java index ce0a0f26a096..e1d2b19f890c 100644 --- a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/sql/TestAggregatePushDown.java +++ b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/sql/TestAggregatePushDown.java @@ -63,6 +63,7 @@ public static void startMetastoreAndSpark() { SparkSession.builder() .master("local[2]") .config("spark.sql.iceberg.aggregate_pushdown", "true") + .config(TestBase.DISABLE_UI) .enableHiveSupport() .getOrCreate();