diff --git a/aws/src/integration/java/org/apache/iceberg/aws/s3/signer/TestS3RestSigner.java b/aws/src/integration/java/org/apache/iceberg/aws/s3/signer/TestS3RestSigner.java index b51d97cc611a..34d425b16e1c 100644 --- a/aws/src/integration/java/org/apache/iceberg/aws/s3/signer/TestS3RestSigner.java +++ b/aws/src/integration/java/org/apache/iceberg/aws/s3/signer/TestS3RestSigner.java @@ -37,10 +37,10 @@ import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; import org.apache.iceberg.rest.auth.OAuth2Properties; import org.apache.iceberg.util.ThreadPools; +import org.eclipse.jetty.ee10.servlet.ServletContextHandler; +import org.eclipse.jetty.ee10.servlet.ServletHolder; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.handler.gzip.GzipHandler; -import org.eclipse.jetty.servlet.ServletContextHandler; -import org.eclipse.jetty.servlet.ServletHolder; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeAll; diff --git a/build.gradle b/build.gradle index 50481df2e2a9..8abd577f339a 100644 --- a/build.gradle +++ b/build.gradle @@ -379,11 +379,14 @@ project(':iceberg-core') { compileOnly(libs.hadoop3.client) { exclude group: 'org.apache.avro', module: 'avro' exclude group: 'org.slf4j', module: 'slf4j-log4j12' + exclude group: "org.eclipse.jetty" } testImplementation libs.jetty.servlet testImplementation libs.jakarta.servlet testImplementation libs.jetty.server + testImplementation libs.jetty.compression.server + testImplementation libs.jetty.compression.gzip testImplementation libs.mockserver.netty testImplementation libs.mockserver.client.java testImplementation libs.sqlite.jdbc @@ -1089,6 +1092,8 @@ project(':iceberg-open-api') { testFixturesImplementation libs.jetty.servlet testFixturesImplementation libs.jetty.server + testFixturesImplementation libs.jetty.compression.server + testFixturesImplementation libs.jetty.compression.gzip testFixturesImplementation libs.sqlite.jdbc testFixturesCompileOnly libs.apiguardian diff --git a/core/src/test/java/org/apache/iceberg/rest/RESTCatalogAdapter.java b/core/src/test/java/org/apache/iceberg/rest/RESTCatalogAdapter.java index 5c9e8fe6d42b..e5f3319f0d14 100644 --- a/core/src/test/java/org/apache/iceberg/rest/RESTCatalogAdapter.java +++ b/core/src/test/java/org/apache/iceberg/rest/RESTCatalogAdapter.java @@ -325,7 +325,8 @@ public T handleRequest( String eTag = ETagProvider.of(response.metadataLocation()); - if (ifNoneMatchHeader.isPresent() && eTag.equals(ifNoneMatchHeader.get().value())) { + if (ifNoneMatchHeader.isPresent() + && eTag.equals(stripETagSuffix(ifNoneMatchHeader.get().value()))) { return null; } @@ -738,4 +739,18 @@ private static SnapshotMode snapshotModeFromQueryParams(Map quer .getOrDefault("snapshots", RESTCatalogProperties.SNAPSHOT_LOADING_MODE_DEFAULT) .toUpperCase(Locale.US)); } + + /** + * Strips the compression suffix from an ETag value. + * + *

Jetty 12 appends "--gzip" to ETags when compression is enabled. This method removes such + * suffixes to allow proper ETag comparison. + */ + static String stripETagSuffix(String eTag) { + if (eTag == null) { + return null; + } + int suffixIndex = eTag.indexOf("--"); + return suffixIndex > 0 ? eTag.substring(0, suffixIndex) : eTag; + } } diff --git a/core/src/test/java/org/apache/iceberg/rest/TestBaseWithRESTServer.java b/core/src/test/java/org/apache/iceberg/rest/TestBaseWithRESTServer.java index f1a172a4237c..028a24fcf129 100644 --- a/core/src/test/java/org/apache/iceberg/rest/TestBaseWithRESTServer.java +++ b/core/src/test/java/org/apache/iceberg/rest/TestBaseWithRESTServer.java @@ -34,10 +34,10 @@ import org.apache.iceberg.inmemory.InMemoryCatalog; import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; import org.apache.iceberg.rest.responses.ErrorResponse; +import org.eclipse.jetty.compression.server.CompressionHandler; +import org.eclipse.jetty.ee10.servlet.ServletContextHandler; +import org.eclipse.jetty.ee10.servlet.ServletHolder; import org.eclipse.jetty.server.Server; -import org.eclipse.jetty.server.handler.gzip.GzipHandler; -import org.eclipse.jetty.servlet.ServletContextHandler; -import org.eclipse.jetty.servlet.ServletHolder; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.io.TempDir; @@ -75,7 +75,7 @@ public void before() throws Exception { new ServletContextHandler(ServletContextHandler.NO_SESSIONS); servletContext.addServlet( new ServletHolder(new RESTCatalogServlet(adapterForRESTServer)), "/*"); - servletContext.setHandler(new GzipHandler()); + servletContext.setHandler(new CompressionHandler()); this.httpServer = new Server(new InetSocketAddress(InetAddress.getLoopbackAddress(), 0)); httpServer.setHandler(servletContext); diff --git a/core/src/test/java/org/apache/iceberg/rest/TestFreshnessAwareLoading.java b/core/src/test/java/org/apache/iceberg/rest/TestFreshnessAwareLoading.java index 0652bb378bd4..afe7397da93a 100644 --- a/core/src/test/java/org/apache/iceberg/rest/TestFreshnessAwareLoading.java +++ b/core/src/test/java/org/apache/iceberg/rest/TestFreshnessAwareLoading.java @@ -20,6 +20,7 @@ import static org.apache.iceberg.TestBase.FILE_A; import static org.apache.iceberg.TestBase.SCHEMA; +import static org.apache.iceberg.rest.RESTCatalogAdapter.stripETagSuffix; import static org.apache.iceberg.rest.RESTTableCache.SessionIdTableId; import static org.apache.iceberg.rest.RESTTableCache.TableWithETag; import static org.apache.iceberg.rest.RequestMatcher.matches; @@ -184,7 +185,12 @@ public void notModifiedResponse() { assertThat(originalRequest.headers().contains(HttpHeaders.IF_NONE_MATCH)); assertThat( - originalRequest.headers().firstEntry(HttpHeaders.IF_NONE_MATCH).get().value()) + stripETagSuffix( + originalRequest + .headers() + .firstEntry(HttpHeaders.IF_NONE_MATCH) + .get() + .value())) .isEqualTo(eTag); assertThat( diff --git a/core/src/test/java/org/apache/iceberg/rest/TestRESTCatalog.java b/core/src/test/java/org/apache/iceberg/rest/TestRESTCatalog.java index d03c5f9b8896..36399f979c36 100644 --- a/core/src/test/java/org/apache/iceberg/rest/TestRESTCatalog.java +++ b/core/src/test/java/org/apache/iceberg/rest/TestRESTCatalog.java @@ -113,10 +113,10 @@ import org.apache.iceberg.util.Pair; import org.assertj.core.api.InstanceOfAssertFactories; import org.awaitility.Awaitility; +import org.eclipse.jetty.ee10.servlet.ServletContextHandler; +import org.eclipse.jetty.ee10.servlet.ServletHolder; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.handler.gzip.GzipHandler; -import org.eclipse.jetty.servlet.ServletContextHandler; -import org.eclipse.jetty.servlet.ServletHolder; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; @@ -409,6 +409,15 @@ protected boolean requiresNamespaceCreate() { return true; } + @Override + protected boolean supportsNamesWithSlashes() { + // names with slashes are rejected and considered as suspicious characters after upgrading Jetty + // and the Servlet API. See also + // https://jakarta.ee/specifications/servlet/6.0/jakarta-servlet-spec-6.0.html#uri-path-canonicalization + // for additional details + return false; + } + /* RESTCatalog specific tests */ @Test diff --git a/core/src/test/java/org/apache/iceberg/rest/TestRESTViewCatalog.java b/core/src/test/java/org/apache/iceberg/rest/TestRESTViewCatalog.java index fd2faf55087c..826dd53549b4 100644 --- a/core/src/test/java/org/apache/iceberg/rest/TestRESTViewCatalog.java +++ b/core/src/test/java/org/apache/iceberg/rest/TestRESTViewCatalog.java @@ -57,10 +57,10 @@ import org.apache.iceberg.rest.responses.LoadViewResponse; import org.apache.iceberg.view.ViewCatalogTests; import org.apache.iceberg.view.ViewMetadata; +import org.eclipse.jetty.ee10.servlet.ServletContextHandler; +import org.eclipse.jetty.ee10.servlet.ServletHolder; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.handler.gzip.GzipHandler; -import org.eclipse.jetty.servlet.ServletContextHandler; -import org.eclipse.jetty.servlet.ServletHolder; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; diff --git a/core/src/test/java/org/apache/iceberg/rest/TestRESTViewCatalogWithAssumedViewSupport.java b/core/src/test/java/org/apache/iceberg/rest/TestRESTViewCatalogWithAssumedViewSupport.java index 1ba340cc56c2..94311afcd9cb 100644 --- a/core/src/test/java/org/apache/iceberg/rest/TestRESTViewCatalogWithAssumedViewSupport.java +++ b/core/src/test/java/org/apache/iceberg/rest/TestRESTViewCatalogWithAssumedViewSupport.java @@ -31,10 +31,10 @@ import org.apache.iceberg.inmemory.InMemoryCatalog; import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; import org.apache.iceberg.rest.responses.ConfigResponse; +import org.eclipse.jetty.ee10.servlet.ServletContextHandler; +import org.eclipse.jetty.ee10.servlet.ServletHolder; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.handler.gzip.GzipHandler; -import org.eclipse.jetty.servlet.ServletContextHandler; -import org.eclipse.jetty.servlet.ServletHolder; import org.junit.jupiter.api.BeforeEach; public class TestRESTViewCatalogWithAssumedViewSupport extends TestRESTViewCatalog { diff --git a/gradle/libs.versions.toml b/gradle/libs.versions.toml index 6ddddc4f292e..37eb052fabeb 100644 --- a/gradle/libs.versions.toml +++ b/gradle/libs.versions.toml @@ -69,7 +69,7 @@ jakarta-el-api = "3.0.3" jakarta-servlet-api = "6.1.0" jaxb-api = "2.3.1" jaxb-runtime = "2.3.9" -jetty = "11.0.26" +jetty = "12.1.5" junit = "5.14.2" junit-platform = "1.14.2" junit-pioneer = "2.3.0" @@ -208,7 +208,9 @@ guava-testlib = { module = "com.google.guava:guava-testlib", version.ref = "guav jakarta-el-api = { module = "jakarta.el:jakarta.el-api", version.ref = "jakarta-el-api" } jakarta-servlet = {module = "jakarta.servlet:jakarta.servlet-api", version.ref = "jakarta-servlet-api"} jetty-server = { module = "org.eclipse.jetty:jetty-server", version.ref = "jetty" } -jetty-servlet = { module = "org.eclipse.jetty:jetty-servlet", version.ref = "jetty" } +jetty-servlet = { module = "org.eclipse.jetty.ee10:jetty-ee10-servlet", version.ref = "jetty" } +jetty-compression-server = { module = "org.eclipse.jetty.compression:jetty-compression-server", version.ref = "jetty" } +jetty-compression-gzip = { module = "org.eclipse.jetty.compression:jetty-compression-gzip", version.ref = "jetty" } junit-jupiter = { module = "org.junit.jupiter:junit-jupiter", version.ref = "junit" } junit-jupiter-engine = { module = "org.junit.jupiter:junit-jupiter-engine", version.ref = "junit" } junit-pioneer = { module = "org.junit-pioneer:junit-pioneer", version.ref = "junit-pioneer" } diff --git a/open-api/src/test/java/org/apache/iceberg/rest/RESTCompatibilityKitCatalogTests.java b/open-api/src/test/java/org/apache/iceberg/rest/RESTCompatibilityKitCatalogTests.java index 87ec90663db2..9a1f86706db6 100644 --- a/open-api/src/test/java/org/apache/iceberg/rest/RESTCompatibilityKitCatalogTests.java +++ b/open-api/src/test/java/org/apache/iceberg/rest/RESTCompatibilityKitCatalogTests.java @@ -97,4 +97,13 @@ protected boolean supportsNamesWithDot() { return PropertyUtil.propertyAsBoolean( restCatalog.properties(), RESTCompatibilityKitSuite.RCK_SUPPORTS_NAMES_WITH_DOT, false); } + + @Override + protected boolean supportsNamesWithSlashes() { + // names with slashes are rejected and considered as suspicious characters after upgrading Jetty + // and the Servlet API. See also + // https://jakarta.ee/specifications/servlet/6.0/jakarta-servlet-spec-6.0.html#uri-path-canonicalization + // for additional details + return false; + } } diff --git a/open-api/src/testFixtures/java/org/apache/iceberg/rest/RESTCatalogServer.java b/open-api/src/testFixtures/java/org/apache/iceberg/rest/RESTCatalogServer.java index 5f0f89d92646..a9675e77f5af 100644 --- a/open-api/src/testFixtures/java/org/apache/iceberg/rest/RESTCatalogServer.java +++ b/open-api/src/testFixtures/java/org/apache/iceberg/rest/RESTCatalogServer.java @@ -28,12 +28,12 @@ import org.apache.iceberg.jdbc.JdbcCatalog; import org.apache.iceberg.relocated.com.google.common.collect.Maps; import org.apache.iceberg.util.PropertyUtil; +import org.eclipse.jetty.compression.server.CompressionHandler; +import org.eclipse.jetty.ee10.servlet.ServletContextHandler; +import org.eclipse.jetty.ee10.servlet.ServletHolder; import org.eclipse.jetty.server.Connector; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.ServerConnector; -import org.eclipse.jetty.server.handler.gzip.GzipHandler; -import org.eclipse.jetty.servlet.ServletContextHandler; -import org.eclipse.jetty.servlet.ServletHolder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -115,7 +115,7 @@ public void start(boolean join) throws Exception { ServletContextHandler context = new ServletContextHandler(ServletContextHandler.NO_SESSIONS); ServletHolder servletHolder = new ServletHolder(servlet); context.addServlet(servletHolder, "/*"); - context.insertHandler(new GzipHandler()); + context.insertHandler(new CompressionHandler()); this.httpServer = new Server( diff --git a/spark/v3.4/build.gradle b/spark/v3.4/build.gradle index 714ee703b842..37a90a6aec7e 100644 --- a/spark/v3.4/build.gradle +++ b/spark/v3.4/build.gradle @@ -107,9 +107,7 @@ project(":iceberg-spark:iceberg-spark-${sparkMajorVersion}_${scalaVersion}") { testImplementation project(path: ':iceberg-api', configuration: 'testArtifacts') testImplementation project(path: ':iceberg-core', configuration: 'testArtifacts') testImplementation project(path: ':iceberg-data', configuration: 'testArtifacts') - testImplementation (project(path: ':iceberg-open-api', configuration: 'testFixturesRuntimeElements')) { - transitive = false - } + testImplementation (project(path: ':iceberg-open-api', configuration: 'testFixturesRuntimeElements')) testImplementation libs.sqlite.jdbc testImplementation libs.awaitility // runtime dependencies for running REST Catalog based integration test @@ -174,11 +172,7 @@ project(":iceberg-spark:iceberg-spark-extensions-${sparkMajorVersion}_${scalaVer testImplementation project(path: ':iceberg-core', configuration: 'testArtifacts') testImplementation project(path: ':iceberg-hive-metastore', configuration: 'testArtifacts') testImplementation project(path: ":iceberg-spark:iceberg-spark-${sparkMajorVersion}_${scalaVersion}", configuration: 'testArtifacts') - testImplementation (project(path: ':iceberg-open-api', configuration: 'testFixturesRuntimeElements')) { - transitive = false - } - // runtime dependencies for running REST Catalog based integration test - testRuntimeOnly libs.jetty.servlet + testImplementation project(path: ':iceberg-open-api', configuration: 'testFixturesRuntimeElements') testRuntimeOnly libs.sqlite.jdbc testImplementation libs.avro.avro @@ -272,10 +266,7 @@ project(":iceberg-spark:iceberg-spark-runtime-${sparkMajorVersion}_${scalaVersio integrationRuntimeOnly project(':iceberg-hive-metastore') // runtime dependencies for running REST Catalog based integration test integrationRuntimeOnly project(path: ':iceberg-core', configuration: 'testArtifacts') - integrationRuntimeOnly (project(path: ':iceberg-open-api', configuration: 'testFixturesRuntimeElements')) { - transitive = false - } - integrationRuntimeOnly libs.jetty.servlet + integrationRuntimeOnly project(path: ':iceberg-open-api', configuration: 'testFixturesRuntimeElements') integrationRuntimeOnly libs.sqlite.jdbc // Not allowed on our classpath, only the runtime jar is allowed diff --git a/spark/v3.4/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/ExtensionsTestBase.java b/spark/v3.4/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/ExtensionsTestBase.java index 4c1a5095916c..1c9c347401c9 100644 --- a/spark/v3.4/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/ExtensionsTestBase.java +++ b/spark/v3.4/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/ExtensionsTestBase.java @@ -18,9 +18,6 @@ */ package org.apache.iceberg.spark.extensions; -import static org.apache.hadoop.hive.conf.HiveConf.ConfVars.METASTOREURIS; - -import java.net.InetAddress; import java.util.Random; import java.util.concurrent.ThreadLocalRandom; import org.apache.iceberg.CatalogUtil; @@ -49,14 +46,11 @@ public static void startMetastoreAndSpark() { TestBase.spark = SparkSession.builder() .master("local[2]") - .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(baseConfigs(hiveConf)) .config("spark.testing", "true") - .config(SQLConf.PARTITION_OVERWRITE_MODE().key(), "dynamic") .config("spark.sql.extensions", IcebergSparkSessionExtensions.class.getName()) - .config("spark.hadoop." + METASTOREURIS.varname, hiveConf.get(METASTOREURIS.varname)) .config("spark.sql.shuffle.partitions", "4") .config("spark.sql.hive.metastorePartitionPruningFallbackOnException", "true") - .config("spark.sql.legacy.respectNullabilityInTextDatasetConversion", "true") .config( SQLConf.ADAPTIVE_EXECUTION_ENABLED().key(), String.valueOf(RANDOM.nextBoolean())) .enableHiveSupport() diff --git a/spark/v3.4/spark/src/test/java/org/apache/iceberg/SparkDistributedDataScanTestBase.java b/spark/v3.4/spark/src/test/java/org/apache/iceberg/SparkDistributedDataScanTestBase.java index 404ba7284606..b86a5f35bf6b 100644 --- a/spark/v3.4/spark/src/test/java/org/apache/iceberg/SparkDistributedDataScanTestBase.java +++ b/spark/v3.4/spark/src/test/java/org/apache/iceberg/SparkDistributedDataScanTestBase.java @@ -89,6 +89,7 @@ protected static SparkSession initSpark(String serializer) { return SparkSession.builder() .master("local[2]") .config("spark.serializer", serializer) + .config(org.apache.iceberg.spark.TestBase.DISABLE_UI_CONFIGS) .config(SQLConf.SHUFFLE_PARTITIONS().key(), "4") .getOrCreate(); } diff --git a/spark/v3.4/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanDeletes.java b/spark/v3.4/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanDeletes.java index 9361c63176e0..b59f1ec08eb8 100644 --- a/spark/v3.4/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanDeletes.java +++ b/spark/v3.4/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanDeletes.java @@ -67,6 +67,7 @@ public static void startSpark() { TestSparkDistributedDataScanDeletes.spark = SparkSession.builder() .master("local[2]") + .config(org.apache.iceberg.spark.TestBase.DISABLE_UI_CONFIGS) .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer") .config(SQLConf.SHUFFLE_PARTITIONS().key(), "4") .getOrCreate(); diff --git a/spark/v3.4/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanFilterFiles.java b/spark/v3.4/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanFilterFiles.java index a218f965ea65..58bd643b10e7 100644 --- a/spark/v3.4/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanFilterFiles.java +++ b/spark/v3.4/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanFilterFiles.java @@ -60,6 +60,7 @@ public static void startSpark() { TestSparkDistributedDataScanFilterFiles.spark = SparkSession.builder() .master("local[2]") + .config(org.apache.iceberg.spark.TestBase.DISABLE_UI_CONFIGS) .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer") .config(SQLConf.SHUFFLE_PARTITIONS().key(), "4") .getOrCreate(); diff --git a/spark/v3.4/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanReporting.java b/spark/v3.4/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanReporting.java index acd4688440d1..962f8738e12c 100644 --- a/spark/v3.4/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanReporting.java +++ b/spark/v3.4/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanReporting.java @@ -57,6 +57,7 @@ public static void startSpark() { TestSparkDistributedDataScanReporting.spark = SparkSession.builder() .master("local[2]") + .config(org.apache.iceberg.spark.TestBase.DISABLE_UI_CONFIGS) .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer") .config(SQLConf.SHUFFLE_PARTITIONS().key(), "4") .getOrCreate(); diff --git a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/DummyMetricsServlet.java b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/DummyMetricsServlet.java new file mode 100644 index 000000000000..ee1f29e56fb3 --- /dev/null +++ b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/DummyMetricsServlet.java @@ -0,0 +1,62 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.iceberg.spark; + +import com.codahale.metrics.MetricRegistry; +import java.util.Properties; +import org.apache.spark.SparkConf; +import org.apache.spark.metrics.sink.MetricsServlet; +import org.sparkproject.jetty.servlet.ServletContextHandler; + +/** + * A dummy implementation of {@link MetricsServlet} that does not start a server or report metrics. + * This is used in tests to avoid conflicts with Spark's jetty dependencies. + */ +public class DummyMetricsServlet extends MetricsServlet { + + /** + * Constructor required by Spark's reflection-based instantiation. + * + * @param properties Metrics properties + * @param registry Metric registry + */ + public DummyMetricsServlet(Properties properties, MetricRegistry registry) { + super(properties, registry); + } + + @Override + public ServletContextHandler[] getHandlers(SparkConf conf) { + return new ServletContextHandler[] {}; + } + + @Override + public void start() { + // No-op for tests + } + + @Override + public void stop() { + // No-op for tests + } + + @Override + public void report() { + // No-op for tests + } +} diff --git a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/TestBase.java b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/TestBase.java index 3c32b4693684..a6781fe518d6 100644 --- a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/TestBase.java +++ b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/TestBase.java @@ -65,6 +65,12 @@ public abstract class TestBase extends SparkTestHelperBase { protected static SparkSession spark = null; protected static JavaSparkContext sparkContext = null; protected static HiveCatalog catalog = null; + public static final Map DISABLE_UI_CONFIGS = + ImmutableMap.of( + "spark.ui.enabled", + "false", + "spark.metrics.conf.*.sink.servlet.class", + "org.apache.iceberg.spark.DummyMetricsServlet"); @BeforeAll public static void startMetastoreAndSpark() { @@ -75,10 +81,7 @@ public static void startMetastoreAndSpark() { TestBase.spark = SparkSession.builder() .master("local[2]") - .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) - .config(SQLConf.PARTITION_OVERWRITE_MODE().key(), "dynamic") - .config("spark.hadoop." + METASTOREURIS.varname, hiveConf.get(METASTOREURIS.varname)) - .config("spark.sql.legacy.respectNullabilityInTextDatasetConversion", "true") + .config(baseConfigs(hiveConf)) .enableHiveSupport() .getOrCreate(); @@ -110,6 +113,16 @@ public static void stopMetastoreAndSpark() throws Exception { } } + protected static Map baseConfigs(HiveConf conf) { + return ImmutableMap.builder() + .put("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .put("spark.hadoop." + METASTOREURIS.varname, conf.get(METASTOREURIS.varname)) + .put(SQLConf.PARTITION_OVERWRITE_MODE().key(), "dynamic") + .put("spark.sql.legacy.respectNullabilityInTextDatasetConversion", "true") + .putAll(DISABLE_UI_CONFIGS) + .build(); + } + protected long waitUntilAfter(long timestampMillis) { long current = System.currentTimeMillis(); while (current <= timestampMillis) { diff --git a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/ScanTestBase.java b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/ScanTestBase.java index 6647a1b483e0..e2b59499e697 100644 --- a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/ScanTestBase.java +++ b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/ScanTestBase.java @@ -37,6 +37,7 @@ import org.apache.iceberg.TableProperties; import org.apache.iceberg.hadoop.HadoopTables; import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.AvroDataTestBase; import org.apache.iceberg.spark.data.RandomData; import org.apache.iceberg.spark.data.TestHelpers; @@ -61,6 +62,7 @@ public static void startSpark() { ScanTestBase.spark = SparkSession.builder() .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI_CONFIGS) .master("local[2]") .getOrCreate(); ScanTestBase.sc = JavaSparkContext.fromSparkContext(spark.sparkContext()); diff --git a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestFilteredScan.java b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestFilteredScan.java index 308b1bd2c646..599b7bd5fa7d 100644 --- a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestFilteredScan.java +++ b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestFilteredScan.java @@ -58,6 +58,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.spark.SparkReadOptions; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.GenericsHelpers; import org.apache.iceberg.transforms.Transforms; import org.apache.iceberg.types.Types; @@ -125,6 +126,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI_CONFIGS) .getOrCreate(); // define UDFs used by partition tests diff --git a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestForwardCompatibility.java b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestForwardCompatibility.java index c03f7b94eca9..41485ce81512 100644 --- a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestForwardCompatibility.java +++ b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestForwardCompatibility.java @@ -46,6 +46,7 @@ import org.apache.iceberg.io.OutputFile; import org.apache.iceberg.parquet.Parquet; import org.apache.iceberg.relocated.com.google.common.collect.Lists; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.RandomData; import org.apache.iceberg.spark.data.TestHelpers; import org.apache.iceberg.types.Types; @@ -99,6 +100,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI_CONFIGS) .getOrCreate(); } diff --git a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestIcebergSpark.java b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestIcebergSpark.java index f4f57157e479..515d33299793 100644 --- a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestIcebergSpark.java +++ b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestIcebergSpark.java @@ -28,6 +28,7 @@ import java.sql.Timestamp; import java.util.List; import org.apache.iceberg.spark.IcebergSpark; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.transforms.Transforms; import org.apache.iceberg.types.Types; import org.apache.spark.sql.Row; @@ -51,6 +52,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI_CONFIGS) .getOrCreate(); } diff --git a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionPruning.java b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionPruning.java index e1402396fa7f..42dc5e37ef7b 100644 --- a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionPruning.java +++ b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionPruning.java @@ -59,6 +59,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Sets; import org.apache.iceberg.spark.SparkReadOptions; import org.apache.iceberg.spark.SparkSchemaUtil; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.transforms.Transforms; import org.apache.iceberg.types.Types; import org.apache.spark.api.java.JavaRDD; @@ -118,6 +119,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI_CONFIGS) .getOrCreate(); TestPartitionPruning.sparkContext = JavaSparkContext.fromSparkContext(spark.sparkContext()); diff --git a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionValues.java b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionValues.java index b0ad930487b1..7eb2d4bf8d75 100644 --- a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionValues.java +++ b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionValues.java @@ -46,6 +46,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.spark.SparkReadOptions; import org.apache.iceberg.spark.SparkWriteOptions; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.RandomData; import org.apache.iceberg.spark.data.TestHelpers; import org.apache.iceberg.types.Types; @@ -112,6 +113,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI_CONFIGS) .getOrCreate(); } diff --git a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestSnapshotSelection.java b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestSnapshotSelection.java index 11865db7fce5..334f8bc0b0ff 100644 --- a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestSnapshotSelection.java +++ b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestSnapshotSelection.java @@ -42,6 +42,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.spark.SparkReadOptions; import org.apache.iceberg.spark.SparkSchemaUtil; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.types.Types; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Encoders; @@ -91,6 +92,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI_CONFIGS) .getOrCreate(); } diff --git a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataFile.java b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataFile.java index 833a0b6b5eb0..395a57e17e5b 100644 --- a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataFile.java +++ b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataFile.java @@ -59,6 +59,7 @@ import org.apache.iceberg.spark.SparkDataFile; import org.apache.iceberg.spark.SparkDeleteFile; import org.apache.iceberg.spark.SparkSchemaUtil; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.RandomData; import org.apache.iceberg.types.Conversions; import org.apache.iceberg.types.Types; @@ -125,6 +126,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI_CONFIGS) .getOrCreate(); TestSparkDataFile.sparkContext = JavaSparkContext.fromSparkContext(spark.sparkContext()); } diff --git a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataWrite.java b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataWrite.java index 4f4c0c7f7aea..8cd87a770540 100644 --- a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataWrite.java +++ b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataWrite.java @@ -52,6 +52,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.spark.SparkWriteOptions; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.types.Types; import org.apache.iceberg.util.SnapshotUtil; import org.apache.spark.sql.Dataset; @@ -100,6 +101,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI_CONFIGS) .getOrCreate(); } diff --git a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReadProjection.java b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReadProjection.java index 8ccea303d0c1..acd128172b30 100644 --- a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReadProjection.java +++ b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReadProjection.java @@ -50,6 +50,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Maps; import org.apache.iceberg.spark.SparkReadOptions; import org.apache.iceberg.spark.SparkValueConverter; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.types.Type; import org.apache.iceberg.types.TypeUtil; import org.apache.iceberg.types.Types; @@ -88,6 +89,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI_CONFIGS) .getOrCreate(); ImmutableMap config = ImmutableMap.of( diff --git a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderDeletes.java b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderDeletes.java index 64dbcbf5d0f3..128cd6982b43 100644 --- a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderDeletes.java +++ b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderDeletes.java @@ -71,6 +71,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Sets; import org.apache.iceberg.spark.SparkSchemaUtil; import org.apache.iceberg.spark.SparkStructLike; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.RandomData; import org.apache.iceberg.spark.data.SparkParquetWriters; import org.apache.iceberg.spark.source.metrics.NumDeletes; @@ -132,6 +133,7 @@ public static void startMetastoreAndSpark() { .config("spark.ui.liveUpdate.period", 0) .config(SQLConf.PARTITION_OVERWRITE_MODE().key(), "dynamic") .config("spark.hadoop." + METASTOREURIS.varname, hiveConf.get(METASTOREURIS.varname)) + .config(TestBase.DISABLE_UI_CONFIGS) .enableHiveSupport() .getOrCreate(); diff --git a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderWithBloomFilter.java b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderWithBloomFilter.java index d22ecb02d483..45e70f8b2ddb 100644 --- a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderWithBloomFilter.java +++ b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderWithBloomFilter.java @@ -64,6 +64,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.relocated.com.google.common.collect.Maps; import org.apache.iceberg.spark.SparkValueConverter; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.types.Types; import org.apache.iceberg.util.PropertyUtil; import org.apache.spark.sql.Dataset; @@ -182,6 +183,7 @@ public static void startMetastoreAndSpark() { SparkSession.builder() .master("local[2]") .config("spark.hadoop." + METASTOREURIS.varname, hiveConf.get(METASTOREURIS.varname)) + .config(TestBase.DISABLE_UI_CONFIGS) .enableHiveSupport() .getOrCreate(); diff --git a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestStructuredStreaming.java b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestStructuredStreaming.java index dc4fc7e187fb..2d746d58d0c4 100644 --- a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestStructuredStreaming.java +++ b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestStructuredStreaming.java @@ -34,6 +34,7 @@ import org.apache.iceberg.Table; import org.apache.iceberg.hadoop.HadoopTables; import org.apache.iceberg.relocated.com.google.common.collect.Lists; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.types.Types; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Encoder; @@ -68,6 +69,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI_CONFIGS) .config("spark.sql.shuffle.partitions", 4) .getOrCreate(); } diff --git a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestWriteMetricsConfig.java b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestWriteMetricsConfig.java index c3fac70dd3fc..ffcc7da2053b 100644 --- a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestWriteMetricsConfig.java +++ b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestWriteMetricsConfig.java @@ -41,6 +41,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.relocated.com.google.common.collect.Maps; import org.apache.iceberg.spark.SparkWriteOptions; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.RandomData; import org.apache.iceberg.types.Types; import org.apache.iceberg.util.ByteBuffers; @@ -84,6 +85,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI_CONFIGS) .getOrCreate(); TestWriteMetricsConfig.sc = JavaSparkContext.fromSparkContext(spark.sparkContext()); } diff --git a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/sql/TestAggregatePushDown.java b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/sql/TestAggregatePushDown.java index 5ce56b4feca7..f03af44317cc 100644 --- a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/sql/TestAggregatePushDown.java +++ b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/sql/TestAggregatePushDown.java @@ -26,58 +26,22 @@ import java.util.Arrays; import java.util.List; import java.util.Locale; -import org.apache.iceberg.CatalogUtil; import org.apache.iceberg.ParameterizedTestExtension; import org.apache.iceberg.TableProperties; -import org.apache.iceberg.catalog.Namespace; -import org.apache.iceberg.exceptions.AlreadyExistsException; -import org.apache.iceberg.hive.HiveCatalog; -import org.apache.iceberg.hive.TestHiveMetastore; -import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.spark.CatalogTestBase; import org.apache.iceberg.spark.SparkReadOptions; -import org.apache.iceberg.spark.TestBase; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Row; -import org.apache.spark.sql.SparkSession; import org.apache.spark.sql.execution.ExplainMode; import org.apache.spark.sql.functions; import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.TestTemplate; import org.junit.jupiter.api.extension.ExtendWith; @ExtendWith(ParameterizedTestExtension.class) public class TestAggregatePushDown extends CatalogTestBase { - @BeforeAll - public static void startMetastoreAndSpark() { - TestBase.metastore = new TestHiveMetastore(); - metastore.start(); - TestBase.hiveConf = metastore.hiveConf(); - - TestBase.spark.close(); - - TestBase.spark = - SparkSession.builder() - .master("local[2]") - .config("spark.sql.iceberg.aggregate_pushdown", "true") - .enableHiveSupport() - .getOrCreate(); - - TestBase.catalog = - (HiveCatalog) - CatalogUtil.loadCatalog( - HiveCatalog.class.getName(), "hive", ImmutableMap.of(), hiveConf); - - try { - catalog.createNamespace(Namespace.of("default")); - } catch (AlreadyExistsException ignored) { - // the default namespace already exists. ignore the create error - } - } - @AfterEach public void removeTables() { sql("DROP TABLE IF EXISTS %s", tableName); diff --git a/spark/v3.5/build.gradle b/spark/v3.5/build.gradle index 20a9cfb007bd..cda6cf57cd4a 100644 --- a/spark/v3.5/build.gradle +++ b/spark/v3.5/build.gradle @@ -107,14 +107,10 @@ project(":iceberg-spark:iceberg-spark-${sparkMajorVersion}_${scalaVersion}") { testImplementation project(path: ':iceberg-api', configuration: 'testArtifacts') testImplementation project(path: ':iceberg-core', configuration: 'testArtifacts') testImplementation project(path: ':iceberg-data', configuration: 'testArtifacts') - testImplementation (project(path: ':iceberg-open-api', configuration: 'testFixturesRuntimeElements')) { - transitive = false - } + testImplementation (project(path: ':iceberg-open-api', configuration: 'testFixturesRuntimeElements')) testImplementation libs.sqlite.jdbc testImplementation libs.awaitility testImplementation(testFixtures(project(':iceberg-parquet'))) - // runtime dependencies for running REST Catalog based integration test - testRuntimeOnly libs.jetty.servlet } test { @@ -175,11 +171,7 @@ project(":iceberg-spark:iceberg-spark-extensions-${sparkMajorVersion}_${scalaVer testImplementation project(path: ':iceberg-data', configuration: 'testArtifacts') testImplementation project(path: ':iceberg-hive-metastore', configuration: 'testArtifacts') testImplementation project(path: ":iceberg-spark:iceberg-spark-${sparkMajorVersion}_${scalaVersion}", configuration: 'testArtifacts') - testImplementation (project(path: ':iceberg-open-api', configuration: 'testFixturesRuntimeElements')) { - transitive = false - } - // runtime dependencies for running REST Catalog based integration test - testRuntimeOnly libs.jetty.servlet + testImplementation project(path: ':iceberg-open-api', configuration: 'testFixturesRuntimeElements') testRuntimeOnly libs.sqlite.jdbc testImplementation libs.avro.avro @@ -274,10 +266,7 @@ project(":iceberg-spark:iceberg-spark-runtime-${sparkMajorVersion}_${scalaVersio integrationRuntimeOnly project(':iceberg-hive-metastore') // runtime dependencies for running REST Catalog based integration test integrationRuntimeOnly project(path: ':iceberg-core', configuration: 'testArtifacts') - integrationRuntimeOnly (project(path: ':iceberg-open-api', configuration: 'testFixturesRuntimeElements')) { - transitive = false - } - integrationRuntimeOnly libs.jetty.servlet + integrationRuntimeOnly project(path: ':iceberg-open-api', configuration: 'testFixturesRuntimeElements') integrationRuntimeOnly libs.sqlite.jdbc // Not allowed on our classpath, only the runtime jar is allowed diff --git a/spark/v3.5/spark-extensions/src/test/java/org/apache/iceberg/spark/TestExtendedParser.java b/spark/v3.5/spark-extensions/src/test/java/org/apache/iceberg/spark/TestExtendedParser.java index bfcb5af235d3..fd8f6ed12515 100644 --- a/spark/v3.5/spark-extensions/src/test/java/org/apache/iceberg/spark/TestExtendedParser.java +++ b/spark/v3.5/spark-extensions/src/test/java/org/apache/iceberg/spark/TestExtendedParser.java @@ -49,7 +49,12 @@ public class TestExtendedParser { @BeforeAll public static void before() { - spark = SparkSession.builder().master("local").appName("TestExtendedParser").getOrCreate(); + spark = + SparkSession.builder() + .master("local") + .appName("TestExtendedParser") + .config(TestBase.DISABLE_UI_CONFIGS) + .getOrCreate(); } @AfterAll diff --git a/spark/v3.5/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/ExtensionsTestBase.java b/spark/v3.5/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/ExtensionsTestBase.java index 4c1a5095916c..1c9c347401c9 100644 --- a/spark/v3.5/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/ExtensionsTestBase.java +++ b/spark/v3.5/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/ExtensionsTestBase.java @@ -18,9 +18,6 @@ */ package org.apache.iceberg.spark.extensions; -import static org.apache.hadoop.hive.conf.HiveConf.ConfVars.METASTOREURIS; - -import java.net.InetAddress; import java.util.Random; import java.util.concurrent.ThreadLocalRandom; import org.apache.iceberg.CatalogUtil; @@ -49,14 +46,11 @@ public static void startMetastoreAndSpark() { TestBase.spark = SparkSession.builder() .master("local[2]") - .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(baseConfigs(hiveConf)) .config("spark.testing", "true") - .config(SQLConf.PARTITION_OVERWRITE_MODE().key(), "dynamic") .config("spark.sql.extensions", IcebergSparkSessionExtensions.class.getName()) - .config("spark.hadoop." + METASTOREURIS.varname, hiveConf.get(METASTOREURIS.varname)) .config("spark.sql.shuffle.partitions", "4") .config("spark.sql.hive.metastorePartitionPruningFallbackOnException", "true") - .config("spark.sql.legacy.respectNullabilityInTextDatasetConversion", "true") .config( SQLConf.ADAPTIVE_EXECUTION_ENABLED().key(), String.valueOf(RANDOM.nextBoolean())) .enableHiveSupport() diff --git a/spark/v3.5/spark/src/test/java/org/apache/iceberg/SparkDistributedDataScanTestBase.java b/spark/v3.5/spark/src/test/java/org/apache/iceberg/SparkDistributedDataScanTestBase.java index 404ba7284606..b86a5f35bf6b 100644 --- a/spark/v3.5/spark/src/test/java/org/apache/iceberg/SparkDistributedDataScanTestBase.java +++ b/spark/v3.5/spark/src/test/java/org/apache/iceberg/SparkDistributedDataScanTestBase.java @@ -89,6 +89,7 @@ protected static SparkSession initSpark(String serializer) { return SparkSession.builder() .master("local[2]") .config("spark.serializer", serializer) + .config(org.apache.iceberg.spark.TestBase.DISABLE_UI_CONFIGS) .config(SQLConf.SHUFFLE_PARTITIONS().key(), "4") .getOrCreate(); } diff --git a/spark/v3.5/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanDeletes.java b/spark/v3.5/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanDeletes.java index 659507e4c5e3..ef80c8d6a3ba 100644 --- a/spark/v3.5/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanDeletes.java +++ b/spark/v3.5/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanDeletes.java @@ -71,6 +71,7 @@ public static void startSpark() { TestSparkDistributedDataScanDeletes.spark = SparkSession.builder() .master("local[2]") + .config(org.apache.iceberg.spark.TestBase.DISABLE_UI_CONFIGS) .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer") .config(SQLConf.SHUFFLE_PARTITIONS().key(), "4") .getOrCreate(); diff --git a/spark/v3.5/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanFilterFiles.java b/spark/v3.5/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanFilterFiles.java index a218f965ea65..58bd643b10e7 100644 --- a/spark/v3.5/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanFilterFiles.java +++ b/spark/v3.5/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanFilterFiles.java @@ -60,6 +60,7 @@ public static void startSpark() { TestSparkDistributedDataScanFilterFiles.spark = SparkSession.builder() .master("local[2]") + .config(org.apache.iceberg.spark.TestBase.DISABLE_UI_CONFIGS) .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer") .config(SQLConf.SHUFFLE_PARTITIONS().key(), "4") .getOrCreate(); diff --git a/spark/v3.5/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanReporting.java b/spark/v3.5/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanReporting.java index 2665d7ba8d3b..cabe55e285f2 100644 --- a/spark/v3.5/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanReporting.java +++ b/spark/v3.5/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanReporting.java @@ -61,6 +61,7 @@ public static void startSpark() { TestSparkDistributedDataScanReporting.spark = SparkSession.builder() .master("local[2]") + .config(org.apache.iceberg.spark.TestBase.DISABLE_UI_CONFIGS) .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer") .config(SQLConf.SHUFFLE_PARTITIONS().key(), "4") .getOrCreate(); diff --git a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/DummyMetricsServlet.java b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/DummyMetricsServlet.java new file mode 100644 index 000000000000..ee1f29e56fb3 --- /dev/null +++ b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/DummyMetricsServlet.java @@ -0,0 +1,62 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.iceberg.spark; + +import com.codahale.metrics.MetricRegistry; +import java.util.Properties; +import org.apache.spark.SparkConf; +import org.apache.spark.metrics.sink.MetricsServlet; +import org.sparkproject.jetty.servlet.ServletContextHandler; + +/** + * A dummy implementation of {@link MetricsServlet} that does not start a server or report metrics. + * This is used in tests to avoid conflicts with Spark's jetty dependencies. + */ +public class DummyMetricsServlet extends MetricsServlet { + + /** + * Constructor required by Spark's reflection-based instantiation. + * + * @param properties Metrics properties + * @param registry Metric registry + */ + public DummyMetricsServlet(Properties properties, MetricRegistry registry) { + super(properties, registry); + } + + @Override + public ServletContextHandler[] getHandlers(SparkConf conf) { + return new ServletContextHandler[] {}; + } + + @Override + public void start() { + // No-op for tests + } + + @Override + public void stop() { + // No-op for tests + } + + @Override + public void report() { + // No-op for tests + } +} diff --git a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/TestBase.java b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/TestBase.java index daf4e29ac075..ce924f47abb3 100644 --- a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/TestBase.java +++ b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/TestBase.java @@ -65,6 +65,12 @@ public abstract class TestBase extends SparkTestHelperBase { protected static SparkSession spark = null; protected static JavaSparkContext sparkContext = null; protected static HiveCatalog catalog = null; + public static final Map DISABLE_UI_CONFIGS = + ImmutableMap.of( + "spark.ui.enabled", + "false", + "spark.metrics.conf.*.sink.servlet.class", + "org.apache.iceberg.spark.DummyMetricsServlet"); @BeforeAll public static void startMetastoreAndSpark() { @@ -75,10 +81,7 @@ public static void startMetastoreAndSpark() { TestBase.spark = SparkSession.builder() .master("local[2]") - .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) - .config(SQLConf.PARTITION_OVERWRITE_MODE().key(), "dynamic") - .config("spark.hadoop." + METASTOREURIS.varname, hiveConf.get(METASTOREURIS.varname)) - .config("spark.sql.legacy.respectNullabilityInTextDatasetConversion", "true") + .config(baseConfigs(hiveConf)) .enableHiveSupport() .getOrCreate(); @@ -110,6 +113,16 @@ public static void stopMetastoreAndSpark() throws Exception { } } + protected static Map baseConfigs(HiveConf conf) { + return ImmutableMap.builder() + .put("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .put("spark.hadoop." + METASTOREURIS.varname, conf.get(METASTOREURIS.varname)) + .put(SQLConf.PARTITION_OVERWRITE_MODE().key(), "dynamic") + .put("spark.sql.legacy.respectNullabilityInTextDatasetConversion", "true") + .putAll(DISABLE_UI_CONFIGS) + .build(); + } + protected long waitUntilAfter(long timestampMillis) { long current = System.currentTimeMillis(); while (current <= timestampMillis) { diff --git a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/data/vectorized/parquet/TestParquetDictionaryEncodedVectorizedReads.java b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/data/vectorized/parquet/TestParquetDictionaryEncodedVectorizedReads.java index 284fa0b0552f..45cc64a921c9 100644 --- a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/data/vectorized/parquet/TestParquetDictionaryEncodedVectorizedReads.java +++ b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/data/vectorized/parquet/TestParquetDictionaryEncodedVectorizedReads.java @@ -43,6 +43,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.ImmutableList; import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; import org.apache.iceberg.relocated.com.google.common.collect.Iterables; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.TestHelpers; import org.apache.iceberg.spark.data.vectorized.VectorizedSparkParquetReaders; import org.apache.iceberg.types.Types; @@ -64,6 +65,7 @@ public static void startSpark() { spark = SparkSession.builder() .master("local[2]") + .config(TestBase.DISABLE_UI_CONFIGS) .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) .getOrCreate(); } diff --git a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/ScanTestBase.java b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/ScanTestBase.java index 1c5905744a75..1cddc26bde6d 100644 --- a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/ScanTestBase.java +++ b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/ScanTestBase.java @@ -37,6 +37,7 @@ import org.apache.iceberg.TableProperties; import org.apache.iceberg.hadoop.HadoopTables; import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.AvroDataTestBase; import org.apache.iceberg.spark.data.RandomData; import org.apache.iceberg.spark.data.TestHelpers; @@ -61,6 +62,7 @@ public static void startSpark() { ScanTestBase.spark = SparkSession.builder() .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI_CONFIGS) .master("local[2]") .getOrCreate(); ScanTestBase.sc = JavaSparkContext.fromSparkContext(spark.sparkContext()); diff --git a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestFilteredScan.java b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestFilteredScan.java index 61d6501a6847..f3a4e7f6dabe 100644 --- a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestFilteredScan.java +++ b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestFilteredScan.java @@ -54,6 +54,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.spark.SparkReadOptions; import org.apache.iceberg.spark.SparkWriteOptions; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.GenericsHelpers; import org.apache.iceberg.types.Types; import org.apache.spark.sql.Dataset; @@ -116,6 +117,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI_CONFIGS) .getOrCreate(); } diff --git a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestForwardCompatibility.java b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestForwardCompatibility.java index 153564f7d129..46aa9cc15854 100644 --- a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestForwardCompatibility.java +++ b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestForwardCompatibility.java @@ -46,6 +46,7 @@ import org.apache.iceberg.io.OutputFile; import org.apache.iceberg.parquet.Parquet; import org.apache.iceberg.relocated.com.google.common.collect.Lists; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.RandomData; import org.apache.iceberg.spark.data.TestHelpers; import org.apache.iceberg.types.Types; @@ -98,6 +99,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI_CONFIGS) .getOrCreate(); } diff --git a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestIcebergSpark.java b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestIcebergSpark.java index f4f57157e479..515d33299793 100644 --- a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestIcebergSpark.java +++ b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestIcebergSpark.java @@ -28,6 +28,7 @@ import java.sql.Timestamp; import java.util.List; import org.apache.iceberg.spark.IcebergSpark; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.transforms.Transforms; import org.apache.iceberg.types.Types; import org.apache.spark.sql.Row; @@ -51,6 +52,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI_CONFIGS) .getOrCreate(); } diff --git a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionPruning.java b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionPruning.java index e1402396fa7f..42dc5e37ef7b 100644 --- a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionPruning.java +++ b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionPruning.java @@ -59,6 +59,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Sets; import org.apache.iceberg.spark.SparkReadOptions; import org.apache.iceberg.spark.SparkSchemaUtil; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.transforms.Transforms; import org.apache.iceberg.types.Types; import org.apache.spark.api.java.JavaRDD; @@ -118,6 +119,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI_CONFIGS) .getOrCreate(); TestPartitionPruning.sparkContext = JavaSparkContext.fromSparkContext(spark.sparkContext()); diff --git a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionValues.java b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionValues.java index 0b6ab2052b66..135f98e733ee 100644 --- a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionValues.java +++ b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionValues.java @@ -46,6 +46,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.spark.SparkReadOptions; import org.apache.iceberg.spark.SparkWriteOptions; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.RandomData; import org.apache.iceberg.spark.data.TestHelpers; import org.apache.iceberg.types.Types; @@ -112,6 +113,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI_CONFIGS) .getOrCreate(); } diff --git a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestSnapshotSelection.java b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestSnapshotSelection.java index 11865db7fce5..334f8bc0b0ff 100644 --- a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestSnapshotSelection.java +++ b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestSnapshotSelection.java @@ -42,6 +42,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.spark.SparkReadOptions; import org.apache.iceberg.spark.SparkSchemaUtil; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.types.Types; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Encoders; @@ -91,6 +92,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI_CONFIGS) .getOrCreate(); } diff --git a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataFile.java b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataFile.java index 3051e27d7202..b087eb8ca9fd 100644 --- a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataFile.java +++ b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataFile.java @@ -59,6 +59,7 @@ import org.apache.iceberg.spark.SparkDataFile; import org.apache.iceberg.spark.SparkDeleteFile; import org.apache.iceberg.spark.SparkSchemaUtil; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.RandomData; import org.apache.iceberg.types.Conversions; import org.apache.iceberg.types.Types; @@ -125,6 +126,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI_CONFIGS) .getOrCreate(); TestSparkDataFile.sparkContext = JavaSparkContext.fromSparkContext(spark.sparkContext()); } diff --git a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataWrite.java b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataWrite.java index 94547c2cf8fb..fc2cebd0730b 100644 --- a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataWrite.java +++ b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataWrite.java @@ -52,6 +52,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.spark.SparkWriteOptions; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.types.Types; import org.apache.iceberg.util.SnapshotUtil; import org.apache.spark.sql.Dataset; @@ -100,6 +101,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI_CONFIGS) .getOrCreate(); } diff --git a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReadProjection.java b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReadProjection.java index 8ccea303d0c1..acd128172b30 100644 --- a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReadProjection.java +++ b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReadProjection.java @@ -50,6 +50,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Maps; import org.apache.iceberg.spark.SparkReadOptions; import org.apache.iceberg.spark.SparkValueConverter; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.types.Type; import org.apache.iceberg.types.TypeUtil; import org.apache.iceberg.types.Types; @@ -88,6 +89,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI_CONFIGS) .getOrCreate(); ImmutableMap config = ImmutableMap.of( diff --git a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderDeletes.java b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderDeletes.java index c7716d343024..c7a90b124ef9 100644 --- a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderDeletes.java +++ b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderDeletes.java @@ -76,6 +76,7 @@ import org.apache.iceberg.spark.ParquetReaderType; import org.apache.iceberg.spark.SparkSchemaUtil; import org.apache.iceberg.spark.SparkStructLike; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.RandomData; import org.apache.iceberg.spark.data.SparkParquetWriters; import org.apache.iceberg.spark.source.metrics.NumDeletes; @@ -138,6 +139,7 @@ public static void startMetastoreAndSpark() { .config("spark.ui.liveUpdate.period", 0) .config(SQLConf.PARTITION_OVERWRITE_MODE().key(), "dynamic") .config("spark.hadoop." + METASTOREURIS.varname, hiveConf.get(METASTOREURIS.varname)) + .config(TestBase.DISABLE_UI_CONFIGS) .enableHiveSupport() .getOrCreate(); diff --git a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderWithBloomFilter.java b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderWithBloomFilter.java index d22ecb02d483..45e70f8b2ddb 100644 --- a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderWithBloomFilter.java +++ b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderWithBloomFilter.java @@ -64,6 +64,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.relocated.com.google.common.collect.Maps; import org.apache.iceberg.spark.SparkValueConverter; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.types.Types; import org.apache.iceberg.util.PropertyUtil; import org.apache.spark.sql.Dataset; @@ -182,6 +183,7 @@ public static void startMetastoreAndSpark() { SparkSession.builder() .master("local[2]") .config("spark.hadoop." + METASTOREURIS.varname, hiveConf.get(METASTOREURIS.varname)) + .config(TestBase.DISABLE_UI_CONFIGS) .enableHiveSupport() .getOrCreate(); diff --git a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestStructuredStreaming.java b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestStructuredStreaming.java index 54048bbf218a..82e0ba27e3bd 100644 --- a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestStructuredStreaming.java +++ b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestStructuredStreaming.java @@ -34,6 +34,7 @@ import org.apache.iceberg.Table; import org.apache.iceberg.hadoop.HadoopTables; import org.apache.iceberg.relocated.com.google.common.collect.Lists; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.types.Types; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Encoder; @@ -68,6 +69,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI_CONFIGS) .config("spark.sql.shuffle.partitions", 4) .getOrCreate(); } diff --git a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestWriteMetricsConfig.java b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestWriteMetricsConfig.java index c3fac70dd3fc..ffcc7da2053b 100644 --- a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestWriteMetricsConfig.java +++ b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestWriteMetricsConfig.java @@ -41,6 +41,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.relocated.com.google.common.collect.Maps; import org.apache.iceberg.spark.SparkWriteOptions; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.RandomData; import org.apache.iceberg.types.Types; import org.apache.iceberg.util.ByteBuffers; @@ -84,6 +85,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI_CONFIGS) .getOrCreate(); TestWriteMetricsConfig.sc = JavaSparkContext.fromSparkContext(spark.sparkContext()); } diff --git a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/sql/TestAggregatePushDown.java b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/sql/TestAggregatePushDown.java index 5ce56b4feca7..f03af44317cc 100644 --- a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/sql/TestAggregatePushDown.java +++ b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/sql/TestAggregatePushDown.java @@ -26,58 +26,22 @@ import java.util.Arrays; import java.util.List; import java.util.Locale; -import org.apache.iceberg.CatalogUtil; import org.apache.iceberg.ParameterizedTestExtension; import org.apache.iceberg.TableProperties; -import org.apache.iceberg.catalog.Namespace; -import org.apache.iceberg.exceptions.AlreadyExistsException; -import org.apache.iceberg.hive.HiveCatalog; -import org.apache.iceberg.hive.TestHiveMetastore; -import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.spark.CatalogTestBase; import org.apache.iceberg.spark.SparkReadOptions; -import org.apache.iceberg.spark.TestBase; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Row; -import org.apache.spark.sql.SparkSession; import org.apache.spark.sql.execution.ExplainMode; import org.apache.spark.sql.functions; import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.TestTemplate; import org.junit.jupiter.api.extension.ExtendWith; @ExtendWith(ParameterizedTestExtension.class) public class TestAggregatePushDown extends CatalogTestBase { - @BeforeAll - public static void startMetastoreAndSpark() { - TestBase.metastore = new TestHiveMetastore(); - metastore.start(); - TestBase.hiveConf = metastore.hiveConf(); - - TestBase.spark.close(); - - TestBase.spark = - SparkSession.builder() - .master("local[2]") - .config("spark.sql.iceberg.aggregate_pushdown", "true") - .enableHiveSupport() - .getOrCreate(); - - TestBase.catalog = - (HiveCatalog) - CatalogUtil.loadCatalog( - HiveCatalog.class.getName(), "hive", ImmutableMap.of(), hiveConf); - - try { - catalog.createNamespace(Namespace.of("default")); - } catch (AlreadyExistsException ignored) { - // the default namespace already exists. ignore the create error - } - } - @AfterEach public void removeTables() { sql("DROP TABLE IF EXISTS %s", tableName); diff --git a/spark/v4.0/build.gradle b/spark/v4.0/build.gradle index fb9df4e0e0f2..3800cff0e3ee 100644 --- a/spark/v4.0/build.gradle +++ b/spark/v4.0/build.gradle @@ -116,14 +116,10 @@ project(":iceberg-spark:iceberg-spark-${sparkMajorVersion}_${scalaVersion}") { testImplementation project(path: ':iceberg-api', configuration: 'testArtifacts') testImplementation project(path: ':iceberg-core', configuration: 'testArtifacts') testImplementation project(path: ':iceberg-data', configuration: 'testArtifacts') - testImplementation (project(path: ':iceberg-open-api', configuration: 'testFixturesRuntimeElements')) { - transitive = false - } + testImplementation (project(path: ':iceberg-open-api', configuration: 'testFixturesRuntimeElements')) testImplementation libs.sqlite.jdbc testImplementation libs.awaitility testImplementation(testFixtures(project(':iceberg-parquet'))) - // runtime dependencies for running REST Catalog based integration test - testRuntimeOnly libs.jetty.servlet } test { @@ -184,11 +180,7 @@ project(":iceberg-spark:iceberg-spark-extensions-${sparkMajorVersion}_${scalaVer testImplementation project(path: ':iceberg-data', configuration: 'testArtifacts') testImplementation project(path: ':iceberg-hive-metastore', configuration: 'testArtifacts') testImplementation project(path: ":iceberg-spark:iceberg-spark-${sparkMajorVersion}_${scalaVersion}", configuration: 'testArtifacts') - testImplementation (project(path: ':iceberg-open-api', configuration: 'testFixturesRuntimeElements')) { - transitive = false - } - // runtime dependencies for running REST Catalog based integration test - testRuntimeOnly libs.jetty.servlet + testImplementation project(path: ':iceberg-open-api', configuration: 'testFixturesRuntimeElements') testRuntimeOnly libs.sqlite.jdbc testImplementation libs.avro.avro @@ -283,10 +275,7 @@ project(":iceberg-spark:iceberg-spark-runtime-${sparkMajorVersion}_${scalaVersio integrationRuntimeOnly project(':iceberg-hive-metastore') // runtime dependencies for running REST Catalog based integration test integrationRuntimeOnly project(path: ':iceberg-core', configuration: 'testArtifacts') - integrationRuntimeOnly (project(path: ':iceberg-open-api', configuration: 'testFixturesRuntimeElements')) { - transitive = false - } - integrationRuntimeOnly libs.jetty.servlet + integrationRuntimeOnly project(path: ':iceberg-open-api', configuration: 'testFixturesRuntimeElements') integrationRuntimeOnly libs.sqlite.jdbc // Not allowed on our classpath, only the runtime jar is allowed diff --git a/spark/v4.0/spark-extensions/src/test/java/org/apache/iceberg/spark/TestExtendedParser.java b/spark/v4.0/spark-extensions/src/test/java/org/apache/iceberg/spark/TestExtendedParser.java index bfcb5af235d3..fd8f6ed12515 100644 --- a/spark/v4.0/spark-extensions/src/test/java/org/apache/iceberg/spark/TestExtendedParser.java +++ b/spark/v4.0/spark-extensions/src/test/java/org/apache/iceberg/spark/TestExtendedParser.java @@ -49,7 +49,12 @@ public class TestExtendedParser { @BeforeAll public static void before() { - spark = SparkSession.builder().master("local").appName("TestExtendedParser").getOrCreate(); + spark = + SparkSession.builder() + .master("local") + .appName("TestExtendedParser") + .config(TestBase.DISABLE_UI_CONFIGS) + .getOrCreate(); } @AfterAll diff --git a/spark/v4.0/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/ExtensionsTestBase.java b/spark/v4.0/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/ExtensionsTestBase.java index 796c47b545cc..9bf59fbfaaf8 100644 --- a/spark/v4.0/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/ExtensionsTestBase.java +++ b/spark/v4.0/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/ExtensionsTestBase.java @@ -18,9 +18,6 @@ */ package org.apache.iceberg.spark.extensions; -import static org.apache.hadoop.hive.conf.HiveConf.ConfVars.METASTOREURIS; - -import java.net.InetAddress; import java.util.Random; import java.util.concurrent.ThreadLocalRandom; import org.apache.iceberg.CatalogUtil; @@ -49,14 +46,11 @@ public static void startMetastoreAndSpark() { TestBase.spark = SparkSession.builder() .master("local[2]") - .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(baseConfigs(hiveConf)) .config("spark.testing", "true") - .config(SQLConf.PARTITION_OVERWRITE_MODE().key(), "dynamic") .config("spark.sql.extensions", IcebergSparkSessionExtensions.class.getName()) - .config("spark.hadoop." + METASTOREURIS.varname, hiveConf.get(METASTOREURIS.varname)) .config("spark.sql.shuffle.partitions", "4") .config("spark.sql.hive.metastorePartitionPruningFallbackOnException", "true") - .config("spark.sql.legacy.respectNullabilityInTextDatasetConversion", "true") .config( SQLConf.ADAPTIVE_EXECUTION_ENABLED().key(), String.valueOf(RANDOM.nextBoolean())) .enableHiveSupport() diff --git a/spark/v4.0/spark/src/test/java/org/apache/iceberg/SparkDistributedDataScanTestBase.java b/spark/v4.0/spark/src/test/java/org/apache/iceberg/SparkDistributedDataScanTestBase.java index 404ba7284606..b86a5f35bf6b 100644 --- a/spark/v4.0/spark/src/test/java/org/apache/iceberg/SparkDistributedDataScanTestBase.java +++ b/spark/v4.0/spark/src/test/java/org/apache/iceberg/SparkDistributedDataScanTestBase.java @@ -89,6 +89,7 @@ protected static SparkSession initSpark(String serializer) { return SparkSession.builder() .master("local[2]") .config("spark.serializer", serializer) + .config(org.apache.iceberg.spark.TestBase.DISABLE_UI_CONFIGS) .config(SQLConf.SHUFFLE_PARTITIONS().key(), "4") .getOrCreate(); } diff --git a/spark/v4.0/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanDeletes.java b/spark/v4.0/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanDeletes.java index 659507e4c5e3..ef80c8d6a3ba 100644 --- a/spark/v4.0/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanDeletes.java +++ b/spark/v4.0/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanDeletes.java @@ -71,6 +71,7 @@ public static void startSpark() { TestSparkDistributedDataScanDeletes.spark = SparkSession.builder() .master("local[2]") + .config(org.apache.iceberg.spark.TestBase.DISABLE_UI_CONFIGS) .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer") .config(SQLConf.SHUFFLE_PARTITIONS().key(), "4") .getOrCreate(); diff --git a/spark/v4.0/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanFilterFiles.java b/spark/v4.0/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanFilterFiles.java index a218f965ea65..58bd643b10e7 100644 --- a/spark/v4.0/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanFilterFiles.java +++ b/spark/v4.0/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanFilterFiles.java @@ -60,6 +60,7 @@ public static void startSpark() { TestSparkDistributedDataScanFilterFiles.spark = SparkSession.builder() .master("local[2]") + .config(org.apache.iceberg.spark.TestBase.DISABLE_UI_CONFIGS) .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer") .config(SQLConf.SHUFFLE_PARTITIONS().key(), "4") .getOrCreate(); diff --git a/spark/v4.0/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanReporting.java b/spark/v4.0/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanReporting.java index 2665d7ba8d3b..cabe55e285f2 100644 --- a/spark/v4.0/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanReporting.java +++ b/spark/v4.0/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanReporting.java @@ -61,6 +61,7 @@ public static void startSpark() { TestSparkDistributedDataScanReporting.spark = SparkSession.builder() .master("local[2]") + .config(org.apache.iceberg.spark.TestBase.DISABLE_UI_CONFIGS) .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer") .config(SQLConf.SHUFFLE_PARTITIONS().key(), "4") .getOrCreate(); diff --git a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/DummyMetricsServlet.java b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/DummyMetricsServlet.java new file mode 100644 index 000000000000..ee1f29e56fb3 --- /dev/null +++ b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/DummyMetricsServlet.java @@ -0,0 +1,62 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.iceberg.spark; + +import com.codahale.metrics.MetricRegistry; +import java.util.Properties; +import org.apache.spark.SparkConf; +import org.apache.spark.metrics.sink.MetricsServlet; +import org.sparkproject.jetty.servlet.ServletContextHandler; + +/** + * A dummy implementation of {@link MetricsServlet} that does not start a server or report metrics. + * This is used in tests to avoid conflicts with Spark's jetty dependencies. + */ +public class DummyMetricsServlet extends MetricsServlet { + + /** + * Constructor required by Spark's reflection-based instantiation. + * + * @param properties Metrics properties + * @param registry Metric registry + */ + public DummyMetricsServlet(Properties properties, MetricRegistry registry) { + super(properties, registry); + } + + @Override + public ServletContextHandler[] getHandlers(SparkConf conf) { + return new ServletContextHandler[] {}; + } + + @Override + public void start() { + // No-op for tests + } + + @Override + public void stop() { + // No-op for tests + } + + @Override + public void report() { + // No-op for tests + } +} diff --git a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/TestBase.java b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/TestBase.java index daf4e29ac075..ce924f47abb3 100644 --- a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/TestBase.java +++ b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/TestBase.java @@ -65,6 +65,12 @@ public abstract class TestBase extends SparkTestHelperBase { protected static SparkSession spark = null; protected static JavaSparkContext sparkContext = null; protected static HiveCatalog catalog = null; + public static final Map DISABLE_UI_CONFIGS = + ImmutableMap.of( + "spark.ui.enabled", + "false", + "spark.metrics.conf.*.sink.servlet.class", + "org.apache.iceberg.spark.DummyMetricsServlet"); @BeforeAll public static void startMetastoreAndSpark() { @@ -75,10 +81,7 @@ public static void startMetastoreAndSpark() { TestBase.spark = SparkSession.builder() .master("local[2]") - .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) - .config(SQLConf.PARTITION_OVERWRITE_MODE().key(), "dynamic") - .config("spark.hadoop." + METASTOREURIS.varname, hiveConf.get(METASTOREURIS.varname)) - .config("spark.sql.legacy.respectNullabilityInTextDatasetConversion", "true") + .config(baseConfigs(hiveConf)) .enableHiveSupport() .getOrCreate(); @@ -110,6 +113,16 @@ public static void stopMetastoreAndSpark() throws Exception { } } + protected static Map baseConfigs(HiveConf conf) { + return ImmutableMap.builder() + .put("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .put("spark.hadoop." + METASTOREURIS.varname, conf.get(METASTOREURIS.varname)) + .put(SQLConf.PARTITION_OVERWRITE_MODE().key(), "dynamic") + .put("spark.sql.legacy.respectNullabilityInTextDatasetConversion", "true") + .putAll(DISABLE_UI_CONFIGS) + .build(); + } + protected long waitUntilAfter(long timestampMillis) { long current = System.currentTimeMillis(); while (current <= timestampMillis) { diff --git a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/data/vectorized/parquet/TestParquetDictionaryEncodedVectorizedReads.java b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/data/vectorized/parquet/TestParquetDictionaryEncodedVectorizedReads.java index 284fa0b0552f..45cc64a921c9 100644 --- a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/data/vectorized/parquet/TestParquetDictionaryEncodedVectorizedReads.java +++ b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/data/vectorized/parquet/TestParquetDictionaryEncodedVectorizedReads.java @@ -43,6 +43,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.ImmutableList; import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; import org.apache.iceberg.relocated.com.google.common.collect.Iterables; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.TestHelpers; import org.apache.iceberg.spark.data.vectorized.VectorizedSparkParquetReaders; import org.apache.iceberg.types.Types; @@ -64,6 +65,7 @@ public static void startSpark() { spark = SparkSession.builder() .master("local[2]") + .config(TestBase.DISABLE_UI_CONFIGS) .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) .getOrCreate(); } diff --git a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/ScanTestBase.java b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/ScanTestBase.java index 33c842d94be1..316fb7407c40 100644 --- a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/ScanTestBase.java +++ b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/ScanTestBase.java @@ -38,6 +38,7 @@ import org.apache.iceberg.data.Record; import org.apache.iceberg.hadoop.HadoopTables; import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.AvroDataTestBase; import org.apache.iceberg.spark.data.GenericsHelpers; import org.apache.iceberg.types.TypeUtil; @@ -61,6 +62,7 @@ public static void startSpark() { ScanTestBase.spark = SparkSession.builder() .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI_CONFIGS) .master("local[2]") .getOrCreate(); ScanTestBase.sc = JavaSparkContext.fromSparkContext(spark.sparkContext()); diff --git a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestFilteredScan.java b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestFilteredScan.java index 61d6501a6847..f3a4e7f6dabe 100644 --- a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestFilteredScan.java +++ b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestFilteredScan.java @@ -54,6 +54,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.spark.SparkReadOptions; import org.apache.iceberg.spark.SparkWriteOptions; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.GenericsHelpers; import org.apache.iceberg.types.Types; import org.apache.spark.sql.Dataset; @@ -116,6 +117,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI_CONFIGS) .getOrCreate(); } diff --git a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestForwardCompatibility.java b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestForwardCompatibility.java index c4e0d26c1c31..19e7d11aa726 100644 --- a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestForwardCompatibility.java +++ b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestForwardCompatibility.java @@ -46,6 +46,7 @@ import org.apache.iceberg.io.OutputFile; import org.apache.iceberg.parquet.Parquet; import org.apache.iceberg.relocated.com.google.common.collect.Lists; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.RandomData; import org.apache.iceberg.spark.data.TestHelpers; import org.apache.iceberg.types.Types; @@ -98,6 +99,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI_CONFIGS) .getOrCreate(); } diff --git a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestIcebergSpark.java b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestIcebergSpark.java index f4f57157e479..515d33299793 100644 --- a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestIcebergSpark.java +++ b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestIcebergSpark.java @@ -28,6 +28,7 @@ import java.sql.Timestamp; import java.util.List; import org.apache.iceberg.spark.IcebergSpark; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.transforms.Transforms; import org.apache.iceberg.types.Types; import org.apache.spark.sql.Row; @@ -51,6 +52,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI_CONFIGS) .getOrCreate(); } diff --git a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionPruning.java b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionPruning.java index b31880e8e958..f181b172941d 100644 --- a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionPruning.java +++ b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionPruning.java @@ -60,6 +60,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Sets; import org.apache.iceberg.spark.SparkReadOptions; import org.apache.iceberg.spark.SparkSchemaUtil; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.transforms.Transforms; import org.apache.iceberg.types.Types; import org.apache.spark.api.java.JavaRDD; @@ -119,6 +120,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI_CONFIGS) .getOrCreate(); TestPartitionPruning.sparkContext = JavaSparkContext.fromSparkContext(spark.sparkContext()); diff --git a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionValues.java b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionValues.java index 0b6ab2052b66..135f98e733ee 100644 --- a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionValues.java +++ b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionValues.java @@ -46,6 +46,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.spark.SparkReadOptions; import org.apache.iceberg.spark.SparkWriteOptions; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.RandomData; import org.apache.iceberg.spark.data.TestHelpers; import org.apache.iceberg.types.Types; @@ -112,6 +113,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI_CONFIGS) .getOrCreate(); } diff --git a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestSnapshotSelection.java b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestSnapshotSelection.java index 11865db7fce5..334f8bc0b0ff 100644 --- a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestSnapshotSelection.java +++ b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestSnapshotSelection.java @@ -42,6 +42,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.spark.SparkReadOptions; import org.apache.iceberg.spark.SparkSchemaUtil; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.types.Types; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Encoders; @@ -91,6 +92,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI_CONFIGS) .getOrCreate(); } diff --git a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataFile.java b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataFile.java index 35a837b6b88b..8f407eedb97f 100644 --- a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataFile.java +++ b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataFile.java @@ -60,6 +60,7 @@ import org.apache.iceberg.spark.SparkDataFile; import org.apache.iceberg.spark.SparkDeleteFile; import org.apache.iceberg.spark.SparkSchemaUtil; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.RandomData; import org.apache.iceberg.types.Conversions; import org.apache.iceberg.types.Types; @@ -126,6 +127,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI_CONFIGS) .getOrCreate(); TestSparkDataFile.sparkContext = JavaSparkContext.fromSparkContext(spark.sparkContext()); } diff --git a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataWrite.java b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataWrite.java index 94547c2cf8fb..fc2cebd0730b 100644 --- a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataWrite.java +++ b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataWrite.java @@ -52,6 +52,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.spark.SparkWriteOptions; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.types.Types; import org.apache.iceberg.util.SnapshotUtil; import org.apache.spark.sql.Dataset; @@ -100,6 +101,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI_CONFIGS) .getOrCreate(); } diff --git a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReadProjection.java b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReadProjection.java index 8ccea303d0c1..acd128172b30 100644 --- a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReadProjection.java +++ b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReadProjection.java @@ -50,6 +50,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Maps; import org.apache.iceberg.spark.SparkReadOptions; import org.apache.iceberg.spark.SparkValueConverter; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.types.Type; import org.apache.iceberg.types.TypeUtil; import org.apache.iceberg.types.Types; @@ -88,6 +89,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI_CONFIGS) .getOrCreate(); ImmutableMap config = ImmutableMap.of( diff --git a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderDeletes.java b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderDeletes.java index c7716d343024..c7a90b124ef9 100644 --- a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderDeletes.java +++ b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderDeletes.java @@ -76,6 +76,7 @@ import org.apache.iceberg.spark.ParquetReaderType; import org.apache.iceberg.spark.SparkSchemaUtil; import org.apache.iceberg.spark.SparkStructLike; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.RandomData; import org.apache.iceberg.spark.data.SparkParquetWriters; import org.apache.iceberg.spark.source.metrics.NumDeletes; @@ -138,6 +139,7 @@ public static void startMetastoreAndSpark() { .config("spark.ui.liveUpdate.period", 0) .config(SQLConf.PARTITION_OVERWRITE_MODE().key(), "dynamic") .config("spark.hadoop." + METASTOREURIS.varname, hiveConf.get(METASTOREURIS.varname)) + .config(TestBase.DISABLE_UI_CONFIGS) .enableHiveSupport() .getOrCreate(); diff --git a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderWithBloomFilter.java b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderWithBloomFilter.java index d22ecb02d483..45e70f8b2ddb 100644 --- a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderWithBloomFilter.java +++ b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderWithBloomFilter.java @@ -64,6 +64,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.relocated.com.google.common.collect.Maps; import org.apache.iceberg.spark.SparkValueConverter; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.types.Types; import org.apache.iceberg.util.PropertyUtil; import org.apache.spark.sql.Dataset; @@ -182,6 +183,7 @@ public static void startMetastoreAndSpark() { SparkSession.builder() .master("local[2]") .config("spark.hadoop." + METASTOREURIS.varname, hiveConf.get(METASTOREURIS.varname)) + .config(TestBase.DISABLE_UI_CONFIGS) .enableHiveSupport() .getOrCreate(); diff --git a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestStructuredStreaming.java b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestStructuredStreaming.java index 54048bbf218a..82e0ba27e3bd 100644 --- a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestStructuredStreaming.java +++ b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestStructuredStreaming.java @@ -34,6 +34,7 @@ import org.apache.iceberg.Table; import org.apache.iceberg.hadoop.HadoopTables; import org.apache.iceberg.relocated.com.google.common.collect.Lists; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.types.Types; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Encoder; @@ -68,6 +69,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI_CONFIGS) .config("spark.sql.shuffle.partitions", 4) .getOrCreate(); } diff --git a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestWriteMetricsConfig.java b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestWriteMetricsConfig.java index e2b5d8920e9f..a6f5b50c8e4e 100644 --- a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestWriteMetricsConfig.java +++ b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestWriteMetricsConfig.java @@ -42,6 +42,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.relocated.com.google.common.collect.Maps; import org.apache.iceberg.spark.SparkWriteOptions; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.RandomData; import org.apache.iceberg.types.Types; import org.apache.iceberg.util.ByteBuffers; @@ -85,6 +86,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI_CONFIGS) .getOrCreate(); TestWriteMetricsConfig.sc = JavaSparkContext.fromSparkContext(spark.sparkContext()); } diff --git a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/sql/TestAggregatePushDown.java b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/sql/TestAggregatePushDown.java index ce0a0f26a096..f03af44317cc 100644 --- a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/sql/TestAggregatePushDown.java +++ b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/sql/TestAggregatePushDown.java @@ -26,58 +26,22 @@ import java.util.Arrays; import java.util.List; import java.util.Locale; -import org.apache.iceberg.CatalogUtil; import org.apache.iceberg.ParameterizedTestExtension; import org.apache.iceberg.TableProperties; -import org.apache.iceberg.catalog.Namespace; -import org.apache.iceberg.exceptions.AlreadyExistsException; -import org.apache.iceberg.hive.HiveCatalog; -import org.apache.iceberg.hive.TestHiveMetastore; -import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.spark.CatalogTestBase; import org.apache.iceberg.spark.SparkReadOptions; -import org.apache.iceberg.spark.TestBase; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Row; -import org.apache.spark.sql.SparkSession; import org.apache.spark.sql.execution.ExplainMode; import org.apache.spark.sql.functions; import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.TestTemplate; import org.junit.jupiter.api.extension.ExtendWith; @ExtendWith(ParameterizedTestExtension.class) public class TestAggregatePushDown extends CatalogTestBase { - @BeforeAll - public static void startMetastoreAndSpark() { - TestBase.metastore = new TestHiveMetastore(); - metastore.start(); - TestBase.hiveConf = metastore.hiveConf(); - - TestBase.spark.stop(); - - TestBase.spark = - SparkSession.builder() - .master("local[2]") - .config("spark.sql.iceberg.aggregate_pushdown", "true") - .enableHiveSupport() - .getOrCreate(); - - TestBase.catalog = - (HiveCatalog) - CatalogUtil.loadCatalog( - HiveCatalog.class.getName(), "hive", ImmutableMap.of(), hiveConf); - - try { - catalog.createNamespace(Namespace.of("default")); - } catch (AlreadyExistsException ignored) { - // the default namespace already exists. ignore the create error - } - } - @AfterEach public void removeTables() { sql("DROP TABLE IF EXISTS %s", tableName); diff --git a/spark/v4.1/build.gradle b/spark/v4.1/build.gradle index c189b037c8ba..bc844982b657 100644 --- a/spark/v4.1/build.gradle +++ b/spark/v4.1/build.gradle @@ -125,14 +125,10 @@ project(":iceberg-spark:iceberg-spark-${sparkMajorVersion}_${scalaVersion}") { testImplementation project(path: ':iceberg-api', configuration: 'testArtifacts') testImplementation project(path: ':iceberg-core', configuration: 'testArtifacts') testImplementation project(path: ':iceberg-data', configuration: 'testArtifacts') - testImplementation (project(path: ':iceberg-open-api', configuration: 'testFixturesRuntimeElements')) { - transitive = false - } + testImplementation (project(path: ':iceberg-open-api', configuration: 'testFixturesRuntimeElements')) testImplementation libs.sqlite.jdbc testImplementation libs.awaitility testImplementation(testFixtures(project(':iceberg-parquet'))) - // runtime dependencies for running REST Catalog based integration test - testRuntimeOnly libs.jetty.servlet } test { @@ -193,11 +189,7 @@ project(":iceberg-spark:iceberg-spark-extensions-${sparkMajorVersion}_${scalaVer testImplementation project(path: ':iceberg-data', configuration: 'testArtifacts') testImplementation project(path: ':iceberg-hive-metastore', configuration: 'testArtifacts') testImplementation project(path: ":iceberg-spark:iceberg-spark-${sparkMajorVersion}_${scalaVersion}", configuration: 'testArtifacts') - testImplementation (project(path: ':iceberg-open-api', configuration: 'testFixturesRuntimeElements')) { - transitive = false - } - // runtime dependencies for running REST Catalog based integration test - testRuntimeOnly libs.jetty.servlet + testImplementation project(path: ':iceberg-open-api', configuration: 'testFixturesRuntimeElements') testRuntimeOnly libs.sqlite.jdbc testImplementation libs.avro.avro @@ -293,10 +285,7 @@ project(":iceberg-spark:iceberg-spark-runtime-${sparkMajorVersion}_${scalaVersio integrationRuntimeOnly project(':iceberg-hive-metastore') // runtime dependencies for running REST Catalog based integration test integrationRuntimeOnly project(path: ':iceberg-core', configuration: 'testArtifacts') - integrationRuntimeOnly (project(path: ':iceberg-open-api', configuration: 'testFixturesRuntimeElements')) { - transitive = false - } - integrationRuntimeOnly libs.jetty.servlet + integrationRuntimeOnly project(path: ':iceberg-open-api', configuration: 'testFixturesRuntimeElements') integrationRuntimeOnly libs.sqlite.jdbc // Not allowed on our classpath, only the runtime jar is allowed diff --git a/spark/v4.1/spark-extensions/src/test/java/org/apache/iceberg/spark/TestExtendedParser.java b/spark/v4.1/spark-extensions/src/test/java/org/apache/iceberg/spark/TestExtendedParser.java index bfcb5af235d3..fd8f6ed12515 100644 --- a/spark/v4.1/spark-extensions/src/test/java/org/apache/iceberg/spark/TestExtendedParser.java +++ b/spark/v4.1/spark-extensions/src/test/java/org/apache/iceberg/spark/TestExtendedParser.java @@ -49,7 +49,12 @@ public class TestExtendedParser { @BeforeAll public static void before() { - spark = SparkSession.builder().master("local").appName("TestExtendedParser").getOrCreate(); + spark = + SparkSession.builder() + .master("local") + .appName("TestExtendedParser") + .config(TestBase.DISABLE_UI_CONFIGS) + .getOrCreate(); } @AfterAll diff --git a/spark/v4.1/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/ExtensionsTestBase.java b/spark/v4.1/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/ExtensionsTestBase.java index 796c47b545cc..9bf59fbfaaf8 100644 --- a/spark/v4.1/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/ExtensionsTestBase.java +++ b/spark/v4.1/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/ExtensionsTestBase.java @@ -18,9 +18,6 @@ */ package org.apache.iceberg.spark.extensions; -import static org.apache.hadoop.hive.conf.HiveConf.ConfVars.METASTOREURIS; - -import java.net.InetAddress; import java.util.Random; import java.util.concurrent.ThreadLocalRandom; import org.apache.iceberg.CatalogUtil; @@ -49,14 +46,11 @@ public static void startMetastoreAndSpark() { TestBase.spark = SparkSession.builder() .master("local[2]") - .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(baseConfigs(hiveConf)) .config("spark.testing", "true") - .config(SQLConf.PARTITION_OVERWRITE_MODE().key(), "dynamic") .config("spark.sql.extensions", IcebergSparkSessionExtensions.class.getName()) - .config("spark.hadoop." + METASTOREURIS.varname, hiveConf.get(METASTOREURIS.varname)) .config("spark.sql.shuffle.partitions", "4") .config("spark.sql.hive.metastorePartitionPruningFallbackOnException", "true") - .config("spark.sql.legacy.respectNullabilityInTextDatasetConversion", "true") .config( SQLConf.ADAPTIVE_EXECUTION_ENABLED().key(), String.valueOf(RANDOM.nextBoolean())) .enableHiveSupport() diff --git a/spark/v4.1/spark/src/test/java/org/apache/iceberg/SparkDistributedDataScanTestBase.java b/spark/v4.1/spark/src/test/java/org/apache/iceberg/SparkDistributedDataScanTestBase.java index 404ba7284606..b86a5f35bf6b 100644 --- a/spark/v4.1/spark/src/test/java/org/apache/iceberg/SparkDistributedDataScanTestBase.java +++ b/spark/v4.1/spark/src/test/java/org/apache/iceberg/SparkDistributedDataScanTestBase.java @@ -89,6 +89,7 @@ protected static SparkSession initSpark(String serializer) { return SparkSession.builder() .master("local[2]") .config("spark.serializer", serializer) + .config(org.apache.iceberg.spark.TestBase.DISABLE_UI_CONFIGS) .config(SQLConf.SHUFFLE_PARTITIONS().key(), "4") .getOrCreate(); } diff --git a/spark/v4.1/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanDeletes.java b/spark/v4.1/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanDeletes.java index 659507e4c5e3..ef80c8d6a3ba 100644 --- a/spark/v4.1/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanDeletes.java +++ b/spark/v4.1/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanDeletes.java @@ -71,6 +71,7 @@ public static void startSpark() { TestSparkDistributedDataScanDeletes.spark = SparkSession.builder() .master("local[2]") + .config(org.apache.iceberg.spark.TestBase.DISABLE_UI_CONFIGS) .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer") .config(SQLConf.SHUFFLE_PARTITIONS().key(), "4") .getOrCreate(); diff --git a/spark/v4.1/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanFilterFiles.java b/spark/v4.1/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanFilterFiles.java index a218f965ea65..58bd643b10e7 100644 --- a/spark/v4.1/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanFilterFiles.java +++ b/spark/v4.1/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanFilterFiles.java @@ -60,6 +60,7 @@ public static void startSpark() { TestSparkDistributedDataScanFilterFiles.spark = SparkSession.builder() .master("local[2]") + .config(org.apache.iceberg.spark.TestBase.DISABLE_UI_CONFIGS) .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer") .config(SQLConf.SHUFFLE_PARTITIONS().key(), "4") .getOrCreate(); diff --git a/spark/v4.1/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanReporting.java b/spark/v4.1/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanReporting.java index 2665d7ba8d3b..cabe55e285f2 100644 --- a/spark/v4.1/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanReporting.java +++ b/spark/v4.1/spark/src/test/java/org/apache/iceberg/TestSparkDistributedDataScanReporting.java @@ -61,6 +61,7 @@ public static void startSpark() { TestSparkDistributedDataScanReporting.spark = SparkSession.builder() .master("local[2]") + .config(org.apache.iceberg.spark.TestBase.DISABLE_UI_CONFIGS) .config("spark.serializer", "org.apache.spark.serializer.KryoSerializer") .config(SQLConf.SHUFFLE_PARTITIONS().key(), "4") .getOrCreate(); diff --git a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/DummyMetricsServlet.java b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/DummyMetricsServlet.java new file mode 100644 index 000000000000..ee1f29e56fb3 --- /dev/null +++ b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/DummyMetricsServlet.java @@ -0,0 +1,62 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.iceberg.spark; + +import com.codahale.metrics.MetricRegistry; +import java.util.Properties; +import org.apache.spark.SparkConf; +import org.apache.spark.metrics.sink.MetricsServlet; +import org.sparkproject.jetty.servlet.ServletContextHandler; + +/** + * A dummy implementation of {@link MetricsServlet} that does not start a server or report metrics. + * This is used in tests to avoid conflicts with Spark's jetty dependencies. + */ +public class DummyMetricsServlet extends MetricsServlet { + + /** + * Constructor required by Spark's reflection-based instantiation. + * + * @param properties Metrics properties + * @param registry Metric registry + */ + public DummyMetricsServlet(Properties properties, MetricRegistry registry) { + super(properties, registry); + } + + @Override + public ServletContextHandler[] getHandlers(SparkConf conf) { + return new ServletContextHandler[] {}; + } + + @Override + public void start() { + // No-op for tests + } + + @Override + public void stop() { + // No-op for tests + } + + @Override + public void report() { + // No-op for tests + } +} diff --git a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/TestBase.java b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/TestBase.java index daf4e29ac075..a5100e14ed25 100644 --- a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/TestBase.java +++ b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/TestBase.java @@ -65,6 +65,13 @@ public abstract class TestBase extends SparkTestHelperBase { protected static SparkSession spark = null; protected static JavaSparkContext sparkContext = null; protected static HiveCatalog catalog = null; + // Disable Spark UI and MetricsServlet to avoid dependency conflicts with Spark's Jetty 11 + public static final Map DISABLE_UI_CONFIGS = + ImmutableMap.of( + "spark.ui.enabled", + "false", + "spark.metrics.conf.*.sink.servlet.class", + "org.apache.iceberg.spark.DummyMetricsServlet"); @BeforeAll public static void startMetastoreAndSpark() { @@ -75,10 +82,7 @@ public static void startMetastoreAndSpark() { TestBase.spark = SparkSession.builder() .master("local[2]") - .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) - .config(SQLConf.PARTITION_OVERWRITE_MODE().key(), "dynamic") - .config("spark.hadoop." + METASTOREURIS.varname, hiveConf.get(METASTOREURIS.varname)) - .config("spark.sql.legacy.respectNullabilityInTextDatasetConversion", "true") + .config(baseConfigs(hiveConf)) .enableHiveSupport() .getOrCreate(); @@ -110,6 +114,16 @@ public static void stopMetastoreAndSpark() throws Exception { } } + protected static Map baseConfigs(HiveConf conf) { + return ImmutableMap.builder() + .put("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .put("spark.hadoop." + METASTOREURIS.varname, conf.get(METASTOREURIS.varname)) + .put(SQLConf.PARTITION_OVERWRITE_MODE().key(), "dynamic") + .put("spark.sql.legacy.respectNullabilityInTextDatasetConversion", "true") + .putAll(DISABLE_UI_CONFIGS) + .build(); + } + protected long waitUntilAfter(long timestampMillis) { long current = System.currentTimeMillis(); while (current <= timestampMillis) { diff --git a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/data/vectorized/parquet/TestParquetDictionaryEncodedVectorizedReads.java b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/data/vectorized/parquet/TestParquetDictionaryEncodedVectorizedReads.java index 284fa0b0552f..45cc64a921c9 100644 --- a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/data/vectorized/parquet/TestParquetDictionaryEncodedVectorizedReads.java +++ b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/data/vectorized/parquet/TestParquetDictionaryEncodedVectorizedReads.java @@ -43,6 +43,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.ImmutableList; import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; import org.apache.iceberg.relocated.com.google.common.collect.Iterables; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.TestHelpers; import org.apache.iceberg.spark.data.vectorized.VectorizedSparkParquetReaders; import org.apache.iceberg.types.Types; @@ -64,6 +65,7 @@ public static void startSpark() { spark = SparkSession.builder() .master("local[2]") + .config(TestBase.DISABLE_UI_CONFIGS) .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) .getOrCreate(); } diff --git a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/ScanTestBase.java b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/ScanTestBase.java index 33c842d94be1..316fb7407c40 100644 --- a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/ScanTestBase.java +++ b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/ScanTestBase.java @@ -38,6 +38,7 @@ import org.apache.iceberg.data.Record; import org.apache.iceberg.hadoop.HadoopTables; import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.AvroDataTestBase; import org.apache.iceberg.spark.data.GenericsHelpers; import org.apache.iceberg.types.TypeUtil; @@ -61,6 +62,7 @@ public static void startSpark() { ScanTestBase.spark = SparkSession.builder() .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI_CONFIGS) .master("local[2]") .getOrCreate(); ScanTestBase.sc = JavaSparkContext.fromSparkContext(spark.sparkContext()); diff --git a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestFilteredScan.java b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestFilteredScan.java index 61d6501a6847..f3a4e7f6dabe 100644 --- a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestFilteredScan.java +++ b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestFilteredScan.java @@ -54,6 +54,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.spark.SparkReadOptions; import org.apache.iceberg.spark.SparkWriteOptions; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.GenericsHelpers; import org.apache.iceberg.types.Types; import org.apache.spark.sql.Dataset; @@ -116,6 +117,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI_CONFIGS) .getOrCreate(); } diff --git a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestForwardCompatibility.java b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestForwardCompatibility.java index 290e73c3bd1e..7f523983d0a0 100644 --- a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestForwardCompatibility.java +++ b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestForwardCompatibility.java @@ -46,6 +46,7 @@ import org.apache.iceberg.io.OutputFile; import org.apache.iceberg.parquet.Parquet; import org.apache.iceberg.relocated.com.google.common.collect.Lists; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.RandomData; import org.apache.iceberg.spark.data.TestHelpers; import org.apache.iceberg.types.Types; @@ -97,6 +98,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI_CONFIGS) .getOrCreate(); } diff --git a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestIcebergSpark.java b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestIcebergSpark.java index f4f57157e479..515d33299793 100644 --- a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestIcebergSpark.java +++ b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestIcebergSpark.java @@ -28,6 +28,7 @@ import java.sql.Timestamp; import java.util.List; import org.apache.iceberg.spark.IcebergSpark; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.transforms.Transforms; import org.apache.iceberg.types.Types; import org.apache.spark.sql.Row; @@ -51,6 +52,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI_CONFIGS) .getOrCreate(); } diff --git a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionPruning.java b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionPruning.java index b31880e8e958..f181b172941d 100644 --- a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionPruning.java +++ b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionPruning.java @@ -60,6 +60,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Sets; import org.apache.iceberg.spark.SparkReadOptions; import org.apache.iceberg.spark.SparkSchemaUtil; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.transforms.Transforms; import org.apache.iceberg.types.Types; import org.apache.spark.api.java.JavaRDD; @@ -119,6 +120,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI_CONFIGS) .getOrCreate(); TestPartitionPruning.sparkContext = JavaSparkContext.fromSparkContext(spark.sparkContext()); diff --git a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionValues.java b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionValues.java index 0b6ab2052b66..135f98e733ee 100644 --- a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionValues.java +++ b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestPartitionValues.java @@ -46,6 +46,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.spark.SparkReadOptions; import org.apache.iceberg.spark.SparkWriteOptions; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.RandomData; import org.apache.iceberg.spark.data.TestHelpers; import org.apache.iceberg.types.Types; @@ -112,6 +113,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI_CONFIGS) .getOrCreate(); } diff --git a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestSnapshotSelection.java b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestSnapshotSelection.java index 11865db7fce5..334f8bc0b0ff 100644 --- a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestSnapshotSelection.java +++ b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestSnapshotSelection.java @@ -42,6 +42,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.spark.SparkReadOptions; import org.apache.iceberg.spark.SparkSchemaUtil; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.types.Types; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Encoders; @@ -91,6 +92,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI_CONFIGS) .getOrCreate(); } diff --git a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataFile.java b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataFile.java index 35a837b6b88b..8f407eedb97f 100644 --- a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataFile.java +++ b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataFile.java @@ -60,6 +60,7 @@ import org.apache.iceberg.spark.SparkDataFile; import org.apache.iceberg.spark.SparkDeleteFile; import org.apache.iceberg.spark.SparkSchemaUtil; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.RandomData; import org.apache.iceberg.types.Conversions; import org.apache.iceberg.types.Types; @@ -126,6 +127,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI_CONFIGS) .getOrCreate(); TestSparkDataFile.sparkContext = JavaSparkContext.fromSparkContext(spark.sparkContext()); } diff --git a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataWrite.java b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataWrite.java index 94547c2cf8fb..fc2cebd0730b 100644 --- a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataWrite.java +++ b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkDataWrite.java @@ -52,6 +52,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.spark.SparkWriteOptions; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.types.Types; import org.apache.iceberg.util.SnapshotUtil; import org.apache.spark.sql.Dataset; @@ -100,6 +101,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI_CONFIGS) .getOrCreate(); } diff --git a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReadProjection.java b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReadProjection.java index 8ccea303d0c1..acd128172b30 100644 --- a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReadProjection.java +++ b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReadProjection.java @@ -50,6 +50,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Maps; import org.apache.iceberg.spark.SparkReadOptions; import org.apache.iceberg.spark.SparkValueConverter; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.types.Type; import org.apache.iceberg.types.TypeUtil; import org.apache.iceberg.types.Types; @@ -88,6 +89,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI_CONFIGS) .getOrCreate(); ImmutableMap config = ImmutableMap.of( diff --git a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderDeletes.java b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderDeletes.java index c7716d343024..c7a90b124ef9 100644 --- a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderDeletes.java +++ b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderDeletes.java @@ -76,6 +76,7 @@ import org.apache.iceberg.spark.ParquetReaderType; import org.apache.iceberg.spark.SparkSchemaUtil; import org.apache.iceberg.spark.SparkStructLike; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.RandomData; import org.apache.iceberg.spark.data.SparkParquetWriters; import org.apache.iceberg.spark.source.metrics.NumDeletes; @@ -138,6 +139,7 @@ public static void startMetastoreAndSpark() { .config("spark.ui.liveUpdate.period", 0) .config(SQLConf.PARTITION_OVERWRITE_MODE().key(), "dynamic") .config("spark.hadoop." + METASTOREURIS.varname, hiveConf.get(METASTOREURIS.varname)) + .config(TestBase.DISABLE_UI_CONFIGS) .enableHiveSupport() .getOrCreate(); diff --git a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderWithBloomFilter.java b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderWithBloomFilter.java index d22ecb02d483..45e70f8b2ddb 100644 --- a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderWithBloomFilter.java +++ b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderWithBloomFilter.java @@ -64,6 +64,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.relocated.com.google.common.collect.Maps; import org.apache.iceberg.spark.SparkValueConverter; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.types.Types; import org.apache.iceberg.util.PropertyUtil; import org.apache.spark.sql.Dataset; @@ -182,6 +183,7 @@ public static void startMetastoreAndSpark() { SparkSession.builder() .master("local[2]") .config("spark.hadoop." + METASTOREURIS.varname, hiveConf.get(METASTOREURIS.varname)) + .config(TestBase.DISABLE_UI_CONFIGS) .enableHiveSupport() .getOrCreate(); diff --git a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestStructuredStreaming.java b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestStructuredStreaming.java index 635229f6a005..79c3951c5d16 100644 --- a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestStructuredStreaming.java +++ b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestStructuredStreaming.java @@ -34,6 +34,7 @@ import org.apache.iceberg.Table; import org.apache.iceberg.hadoop.HadoopTables; import org.apache.iceberg.relocated.com.google.common.collect.Lists; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.types.Types; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Encoder; @@ -67,6 +68,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI_CONFIGS) .config("spark.sql.shuffle.partitions", 4) .getOrCreate(); } diff --git a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestWriteMetricsConfig.java b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestWriteMetricsConfig.java index e2b5d8920e9f..a6f5b50c8e4e 100644 --- a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestWriteMetricsConfig.java +++ b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/source/TestWriteMetricsConfig.java @@ -42,6 +42,7 @@ import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.relocated.com.google.common.collect.Maps; import org.apache.iceberg.spark.SparkWriteOptions; +import org.apache.iceberg.spark.TestBase; import org.apache.iceberg.spark.data.RandomData; import org.apache.iceberg.types.Types; import org.apache.iceberg.util.ByteBuffers; @@ -85,6 +86,7 @@ public static void startSpark() { SparkSession.builder() .master("local[2]") .config("spark.driver.host", InetAddress.getLoopbackAddress().getHostAddress()) + .config(TestBase.DISABLE_UI_CONFIGS) .getOrCreate(); TestWriteMetricsConfig.sc = JavaSparkContext.fromSparkContext(spark.sparkContext()); } diff --git a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/sql/TestAggregatePushDown.java b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/sql/TestAggregatePushDown.java index ce0a0f26a096..f03af44317cc 100644 --- a/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/sql/TestAggregatePushDown.java +++ b/spark/v4.1/spark/src/test/java/org/apache/iceberg/spark/sql/TestAggregatePushDown.java @@ -26,58 +26,22 @@ import java.util.Arrays; import java.util.List; import java.util.Locale; -import org.apache.iceberg.CatalogUtil; import org.apache.iceberg.ParameterizedTestExtension; import org.apache.iceberg.TableProperties; -import org.apache.iceberg.catalog.Namespace; -import org.apache.iceberg.exceptions.AlreadyExistsException; -import org.apache.iceberg.hive.HiveCatalog; -import org.apache.iceberg.hive.TestHiveMetastore; -import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap; import org.apache.iceberg.relocated.com.google.common.collect.Lists; import org.apache.iceberg.spark.CatalogTestBase; import org.apache.iceberg.spark.SparkReadOptions; -import org.apache.iceberg.spark.TestBase; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Row; -import org.apache.spark.sql.SparkSession; import org.apache.spark.sql.execution.ExplainMode; import org.apache.spark.sql.functions; import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.TestTemplate; import org.junit.jupiter.api.extension.ExtendWith; @ExtendWith(ParameterizedTestExtension.class) public class TestAggregatePushDown extends CatalogTestBase { - @BeforeAll - public static void startMetastoreAndSpark() { - TestBase.metastore = new TestHiveMetastore(); - metastore.start(); - TestBase.hiveConf = metastore.hiveConf(); - - TestBase.spark.stop(); - - TestBase.spark = - SparkSession.builder() - .master("local[2]") - .config("spark.sql.iceberg.aggregate_pushdown", "true") - .enableHiveSupport() - .getOrCreate(); - - TestBase.catalog = - (HiveCatalog) - CatalogUtil.loadCatalog( - HiveCatalog.class.getName(), "hive", ImmutableMap.of(), hiveConf); - - try { - catalog.createNamespace(Namespace.of("default")); - } catch (AlreadyExistsException ignored) { - // the default namespace already exists. ignore the create error - } - } - @AfterEach public void removeTables() { sql("DROP TABLE IF EXISTS %s", tableName);