Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -37,10 +37,10 @@
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
import org.apache.iceberg.rest.auth.OAuth2Properties;
import org.apache.iceberg.util.ThreadPools;
import org.eclipse.jetty.ee10.servlet.ServletContextHandler;
import org.eclipse.jetty.ee10.servlet.ServletHolder;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.server.handler.gzip.GzipHandler;
import org.eclipse.jetty.servlet.ServletContextHandler;
import org.eclipse.jetty.servlet.ServletHolder;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeAll;
Expand Down Expand Up @@ -181,6 +181,7 @@ public void before() throws Exception {
CreateMultipartUploadRequest.builder().bucket(BUCKET).key("random/multipart-key").build());
}

@SuppressWarnings("removal")
Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this is needed because GzipHandler is now deprecated. We'll be switching to Jetty's new Compression API in #15043

private static Server initHttpServer() throws Exception {
S3SignerServlet.SignRequestValidator deleteObjectsWithBody =
new S3SignerServlet.SignRequestValidator(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,10 +34,10 @@
import org.apache.iceberg.inmemory.InMemoryCatalog;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
import org.apache.iceberg.rest.responses.ErrorResponse;
import org.eclipse.jetty.ee10.servlet.ServletContextHandler;
import org.eclipse.jetty.ee10.servlet.ServletHolder;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.server.handler.gzip.GzipHandler;
import org.eclipse.jetty.servlet.ServletContextHandler;
import org.eclipse.jetty.servlet.ServletHolder;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.io.TempDir;
Expand All @@ -61,6 +61,7 @@ public abstract class TestBaseWithRESTServer {

@TempDir private Path temp;

@SuppressWarnings("removal")
@BeforeEach
public void before() throws Exception {
File warehouse = temp.toFile();
Expand Down
14 changes: 12 additions & 2 deletions core/src/test/java/org/apache/iceberg/rest/TestRESTCatalog.java
Original file line number Diff line number Diff line change
Expand Up @@ -113,10 +113,10 @@
import org.apache.iceberg.util.Pair;
import org.assertj.core.api.InstanceOfAssertFactories;
import org.awaitility.Awaitility;
import org.eclipse.jetty.ee10.servlet.ServletContextHandler;
import org.eclipse.jetty.ee10.servlet.ServletHolder;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.server.handler.gzip.GzipHandler;
import org.eclipse.jetty.servlet.ServletContextHandler;
import org.eclipse.jetty.servlet.ServletHolder;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
Expand Down Expand Up @@ -264,6 +264,7 @@ protected <T extends RESTResponse> T execute(
private Server httpServer;
private HeaderValidatingAdapter adapterForRESTServer;

@SuppressWarnings("removal")
@BeforeEach
public void createCatalog() throws Exception {
File warehouse = temp.toFile();
Expand Down Expand Up @@ -409,6 +410,15 @@ protected boolean requiresNamespaceCreate() {
return true;
}

@Override
protected boolean supportsNamesWithSlashes() {
// names with slashes are rejected and considered as suspicious characters after upgrading Jetty
// and the Servlet API. See also
// https://jakarta.ee/specifications/servlet/6.0/jakarta-servlet-spec-6.0.html#uri-path-canonicalization
// for additional details
return false;
}

/* RESTCatalog specific tests */

@Test
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,10 +57,10 @@
import org.apache.iceberg.rest.responses.LoadViewResponse;
import org.apache.iceberg.view.ViewCatalogTests;
import org.apache.iceberg.view.ViewMetadata;
import org.eclipse.jetty.ee10.servlet.ServletContextHandler;
import org.eclipse.jetty.ee10.servlet.ServletHolder;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.server.handler.gzip.GzipHandler;
import org.eclipse.jetty.servlet.ServletContextHandler;
import org.eclipse.jetty.servlet.ServletHolder;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
Expand All @@ -78,6 +78,7 @@ public class TestRESTViewCatalog extends ViewCatalogTests<RESTCatalog> {
protected InMemoryCatalog backendCatalog;
protected Server httpServer;

@SuppressWarnings("removal")
@BeforeEach
public void createCatalog() throws Exception {
File warehouse = temp.toFile();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,14 +31,15 @@
import org.apache.iceberg.inmemory.InMemoryCatalog;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
import org.apache.iceberg.rest.responses.ConfigResponse;
import org.eclipse.jetty.ee10.servlet.ServletContextHandler;
import org.eclipse.jetty.ee10.servlet.ServletHolder;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.server.handler.gzip.GzipHandler;
import org.eclipse.jetty.servlet.ServletContextHandler;
import org.eclipse.jetty.servlet.ServletHolder;
import org.junit.jupiter.api.BeforeEach;

public class TestRESTViewCatalogWithAssumedViewSupport extends TestRESTViewCatalog {

@SuppressWarnings("removal")
@BeforeEach
public void createCatalog() throws Exception {
File warehouse = temp.toFile();
Expand Down
4 changes: 2 additions & 2 deletions gradle/libs.versions.toml
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ jakarta-el-api = "3.0.3"
jakarta-servlet-api = "6.1.0"
jaxb-api = "2.3.1"
jaxb-runtime = "2.3.9"
jetty = "11.0.26"
jetty = "12.1.5"
junit = "5.14.3"
junit-platform = "1.14.3"
junit-pioneer = "2.3.0"
Expand Down Expand Up @@ -204,7 +204,7 @@ guava-testlib = { module = "com.google.guava:guava-testlib", version.ref = "guav
jakarta-el-api = { module = "jakarta.el:jakarta.el-api", version.ref = "jakarta-el-api" }
jakarta-servlet = {module = "jakarta.servlet:jakarta.servlet-api", version.ref = "jakarta-servlet-api"}
jetty-server = { module = "org.eclipse.jetty:jetty-server", version.ref = "jetty" }
jetty-servlet = { module = "org.eclipse.jetty:jetty-servlet", version.ref = "jetty" }
jetty-servlet = { module = "org.eclipse.jetty.ee10:jetty-ee10-servlet", version.ref = "jetty" }
junit-jupiter = { module = "org.junit.jupiter:junit-jupiter", version.ref = "junit" }
junit-jupiter-engine = { module = "org.junit.jupiter:junit-jupiter-engine", version.ref = "junit" }
junit-pioneer = { module = "org.junit-pioneer:junit-pioneer", version.ref = "junit-pioneer" }
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -97,4 +97,13 @@ protected boolean supportsNamesWithDot() {
return PropertyUtil.propertyAsBoolean(
restCatalog.properties(), RESTCompatibilityKitSuite.RCK_SUPPORTS_NAMES_WITH_DOT, false);
}

@Override
protected boolean supportsNamesWithSlashes() {
// names with slashes are rejected and considered as suspicious characters after upgrading Jetty
// and the Servlet API. See also
// https://jakarta.ee/specifications/servlet/6.0/jakarta-servlet-spec-6.0.html#uri-path-canonicalization
// for additional details
return false;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -28,12 +28,12 @@
import org.apache.iceberg.jdbc.JdbcCatalog;
import org.apache.iceberg.relocated.com.google.common.collect.Maps;
import org.apache.iceberg.util.PropertyUtil;
import org.eclipse.jetty.ee10.servlet.ServletContextHandler;
import org.eclipse.jetty.ee10.servlet.ServletHolder;
import org.eclipse.jetty.server.Connector;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.server.ServerConnector;
import org.eclipse.jetty.server.handler.gzip.GzipHandler;
import org.eclipse.jetty.servlet.ServletContextHandler;
import org.eclipse.jetty.servlet.ServletHolder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand Down Expand Up @@ -106,6 +106,7 @@ private CatalogContext initializeBackendCatalog() throws IOException {
catalogProperties);
}

@SuppressWarnings("removal")
public void start(boolean join) throws Exception {
CatalogContext catalogContext = initializeBackendCatalog();

Expand Down
21 changes: 3 additions & 18 deletions spark/v3.4/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -107,13 +107,8 @@ project(":iceberg-spark:iceberg-spark-${sparkMajorVersion}_${scalaVersion}") {
testImplementation project(path: ':iceberg-api', configuration: 'testArtifacts')
testImplementation project(path: ':iceberg-core', configuration: 'testArtifacts')
testImplementation project(path: ':iceberg-data', configuration: 'testArtifacts')
testImplementation (project(path: ':iceberg-open-api', configuration: 'testFixturesRuntimeElements')) {
transitive = false
}
testImplementation libs.sqlite.jdbc
testImplementation (project(path: ':iceberg-open-api', configuration: 'testFixturesRuntimeElements'))
testImplementation libs.awaitility
// runtime dependencies for running REST Catalog based integration test
testRuntimeOnly libs.jetty.servlet
}

test {
Expand Down Expand Up @@ -174,13 +169,7 @@ project(":iceberg-spark:iceberg-spark-extensions-${sparkMajorVersion}_${scalaVer
testImplementation project(path: ':iceberg-core', configuration: 'testArtifacts')
testImplementation project(path: ':iceberg-hive-metastore', configuration: 'testArtifacts')
testImplementation project(path: ":iceberg-spark:iceberg-spark-${sparkMajorVersion}_${scalaVersion}", configuration: 'testArtifacts')
testImplementation (project(path: ':iceberg-open-api', configuration: 'testFixturesRuntimeElements')) {
transitive = false
Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this was initially done because there were conflicts. However, given that we don't have any conflicts anymore we can remove this

}
// runtime dependencies for running REST Catalog based integration test
testRuntimeOnly libs.jetty.servlet
testRuntimeOnly libs.sqlite.jdbc

testImplementation (project(path: ':iceberg-open-api', configuration: 'testFixturesRuntimeElements'))
testImplementation libs.avro.avro
testImplementation libs.parquet.hadoop
testImplementation libs.awaitility
Expand Down Expand Up @@ -272,11 +261,7 @@ project(":iceberg-spark:iceberg-spark-runtime-${sparkMajorVersion}_${scalaVersio
integrationRuntimeOnly project(':iceberg-hive-metastore')
// runtime dependencies for running REST Catalog based integration test
integrationRuntimeOnly project(path: ':iceberg-core', configuration: 'testArtifacts')
integrationRuntimeOnly (project(path: ':iceberg-open-api', configuration: 'testFixturesRuntimeElements')) {
transitive = false
}
integrationRuntimeOnly libs.jetty.servlet
integrationRuntimeOnly libs.sqlite.jdbc
integrationRuntimeOnly (project(path: ':iceberg-open-api', configuration: 'testFixturesRuntimeElements'))

// Not allowed on our classpath, only the runtime jar is allowed
integrationCompileOnly project(":iceberg-spark:iceberg-spark-extensions-${sparkMajorVersion}_${scalaVersion}")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
import org.apache.iceberg.relocated.com.google.common.collect.Lists;
import org.apache.iceberg.spark.Spark3Util;
import org.apache.iceberg.spark.SparkSessionCatalog;
import org.apache.iceberg.spark.TestBase;
import org.apache.iceberg.spark.extensions.IcebergSparkSessionExtensions;
import org.apache.iceberg.util.ThreadPools;
import org.apache.spark.sql.SparkSession;
Expand Down Expand Up @@ -157,7 +158,7 @@ private void initDataAndDeletes() {
private void setupSpark() {
this.spark =
SparkSession.builder()
.config("spark.ui.enabled", false)
.config(TestBase.DISABLE_UI)
.config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
.config("spark.sql.extensions", IcebergSparkSessionExtensions.class.getName())
.config("spark.sql.catalog.spark_catalog", SparkSessionCatalog.class.getName())
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -155,7 +155,7 @@ private void runBenchmark(RowLevelOperationMode mode, double updatePercentage) {
private void setupSpark() {
this.spark =
SparkSession.builder()
.config("spark.ui.enabled", false)
.config(TestBase.DISABLE_UI)
Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The main issue with Spark is that Spark brings its own (older) Jetty version for the UI, which conflicts with the Jetty version we're using. The downside is that we need to disable the UI pretty much everywhere

Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Spark only upgraded to jetty 12 after 4.2, so I believe it's worthwhile. As a follow-up, I plan to create a util method to start SparkSession such that we don't need to copy the same config everywhere.

Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Does this mean it's going to be an issue for downstream users? If not why don't we switch these benchmarks to use the fat jar?

.config("spark.sql.extensions", IcebergSparkSessionExtensions.class.getName())
.config("spark.sql.catalog.spark_catalog", SparkSessionCatalog.class.getName())
.config("spark.sql.catalog.spark_catalog.type", "hadoop")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,7 @@ public void localPlanningWithoutFilterWithStats(Blackhole blackhole) {
private void setupSpark() {
this.spark =
SparkSession.builder()
.config("spark.ui.enabled", false)
.config(TestBase.DISABLE_UI)
.config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
.config("spark.sql.extensions", IcebergSparkSessionExtensions.class.getName())
.config("spark.sql.catalog.spark_catalog", SparkSessionCatalog.class.getName())
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -240,7 +240,7 @@ private Dataset<Row> randomDataDF(Schema schema, int numRows) {
private void setupSpark() {
this.spark =
SparkSession.builder()
.config("spark.ui.enabled", false)
.config(TestBase.DISABLE_UI)
.config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
.config("spark.sql.extensions", IcebergSparkSessionExtensions.class.getName())
.config("spark.sql.catalog.spark_catalog", SparkSessionCatalog.class.getName())
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,7 @@ private void runBenchmark(RowLevelOperationMode mode, double updatePercentage) {
private void setupSpark() {
this.spark =
SparkSession.builder()
.config("spark.ui.enabled", false)
.config(TestBase.DISABLE_UI)
.config("spark.sql.extensions", IcebergSparkSessionExtensions.class.getName())
.config("spark.sql.catalog.spark_catalog", SparkSessionCatalog.class.getName())
.config("spark.sql.catalog.spark_catalog.type", "hadoop")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@ public static void startMetastoreAndSpark() {
.config("spark.sql.legacy.respectNullabilityInTextDatasetConversion", "true")
.config(
SQLConf.ADAPTIVE_EXECUTION_ENABLED().key(), String.valueOf(RANDOM.nextBoolean()))
.config(TestBase.DISABLE_UI)
.enableHiveSupport()
.getOrCreate();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@
import org.apache.iceberg.relocated.com.google.common.collect.Lists;
import org.apache.iceberg.spark.Spark3Util;
import org.apache.iceberg.spark.SparkSessionCatalog;
import org.apache.iceberg.spark.TestBase;
import org.apache.iceberg.spark.actions.SparkActions;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Encoders;
Expand Down Expand Up @@ -179,6 +180,7 @@ private void setupSpark() {
.config("spark.sql.catalog.spark_catalog", SparkSessionCatalog.class.getName())
.config("spark.sql.catalog.spark_catalog.type", "hadoop")
.config("spark.sql.catalog.spark_catalog.warehouse", catalogWarehouse())
.config(TestBase.DISABLE_UI)
.master("local");
spark = builder.getOrCreate();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@
import org.apache.iceberg.spark.Spark3Util;
import org.apache.iceberg.spark.SparkSchemaUtil;
import org.apache.iceberg.spark.SparkSessionCatalog;
import org.apache.iceberg.spark.TestBase;
import org.apache.iceberg.spark.actions.SparkActions;
import org.apache.iceberg.types.Types;
import org.apache.spark.sql.Dataset;
Expand Down Expand Up @@ -393,6 +394,7 @@ protected void setupSpark() {
"spark.sql.catalog.spark_catalog", "org.apache.iceberg.spark.SparkSessionCatalog")
.config("spark.sql.catalog.spark_catalog.type", "hadoop")
.config("spark.sql.catalog.spark_catalog.warehouse", getCatalogWarehouse())
.config(TestBase.DISABLE_UI)
.master("local[*]");
spark = builder.getOrCreate();
Configuration sparkHadoopConf = spark.sessionState().newHadoopConf();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
import org.apache.iceberg.UpdateProperties;
import org.apache.iceberg.relocated.com.google.common.collect.Maps;
import org.apache.iceberg.spark.SparkSchemaUtil;
import org.apache.iceberg.spark.TestBase;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SaveMode;
Expand Down Expand Up @@ -94,7 +95,7 @@ protected void cleanupFiles() throws IOException {
}

protected void setupSpark(boolean enableDictionaryEncoding) {
SparkSession.Builder builder = SparkSession.builder().config("spark.ui.enabled", false);
SparkSession.Builder builder = SparkSession.builder().config(TestBase.DISABLE_UI);
if (!enableDictionaryEncoding) {
builder
.config("parquet.dictionary.page.size", "1")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
import java.util.List;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
import org.apache.iceberg.spark.SparkReadConf;
import org.apache.iceberg.spark.TestBase;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.internal.SQLConf;
import org.junit.jupiter.api.BeforeEach;
Expand Down Expand Up @@ -90,6 +91,7 @@ protected static SparkSession initSpark(String serializer) {
.master("local[2]")
.config("spark.serializer", serializer)
.config(SQLConf.SHUFFLE_PARTITIONS().key(), "4")
.config(TestBase.DISABLE_UI)
.getOrCreate();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
import java.util.List;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
import org.apache.iceberg.spark.SparkReadConf;
import org.apache.iceberg.spark.TestBase;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.internal.SQLConf;
import org.junit.jupiter.api.AfterAll;
Expand Down Expand Up @@ -69,6 +70,7 @@ public static void startSpark() {
.master("local[2]")
.config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
.config(SQLConf.SHUFFLE_PARTITIONS().key(), "4")
.config(TestBase.DISABLE_UI)
.getOrCreate();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@

import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
import org.apache.iceberg.spark.SparkReadConf;
import org.apache.iceberg.spark.TestBase;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.internal.SQLConf;
import org.junit.jupiter.api.AfterAll;
Expand Down Expand Up @@ -62,6 +63,7 @@ public static void startSpark() {
.master("local[2]")
.config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
.config(SQLConf.SHUFFLE_PARTITIONS().key(), "4")
.config(TestBase.DISABLE_UI)
.getOrCreate();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
import java.util.List;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
import org.apache.iceberg.spark.SparkReadConf;
import org.apache.iceberg.spark.TestBase;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.sql.internal.SQLConf;
import org.junit.jupiter.api.AfterAll;
Expand Down Expand Up @@ -59,6 +60,7 @@ public static void startSpark() {
.master("local[2]")
.config("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
.config(SQLConf.SHUFFLE_PARTITIONS().key(), "4")
.config(TestBase.DISABLE_UI)
.getOrCreate();
}

Expand Down
Loading