From ffdf8ee5315068f59f781572a690081fa1df6adb Mon Sep 17 00:00:00 2001 From: capistrant Date: Fri, 16 May 2025 17:21:24 -0500 Subject: [PATCH 01/42] jetty 10 upgrade --- indexing-service/pom.xml | 13 ++----------- .../overlord/hrtr/HttpRemoteTaskRunnerTest.java | 3 +-- licenses.yaml | 4 ++-- pom.xml | 16 ++++++++++++++-- .../druid/guice/http/JettyHttpClientModule.java | 6 +++++- .../apache/druid/server/QueryResultPusher.java | 4 ++-- .../jetty/JettyMonitoringConnectionFactory.java | 2 +- .../jetty/JettyServerInitUtils.java | 6 +++--- .../initialization/jetty/JettyServerModule.java | 2 +- .../StandardResponseHeaderFilterHolder.java | 2 +- .../apache/druid/server/QueryResourceTest.java | 8 ++++---- .../server/AsyncQueryForwardingServlet.java | 7 ++++--- .../server/AsyncQueryForwardingServletTest.java | 2 +- sql/pom.xml | 5 +++++ 14 files changed, 46 insertions(+), 34 deletions(-) diff --git a/indexing-service/pom.xml b/indexing-service/pom.xml index 5c722530acf8..3efb243805a6 100644 --- a/indexing-service/pom.xml +++ b/indexing-service/pom.xml @@ -105,10 +105,6 @@ org.apache.zookeeper zookeeper-jute - - javax.servlet - javax.servlet-api - com.fasterxml.jackson.core jackson-core @@ -129,11 +125,6 @@ jakarta.validation jakarta.validation-api - - javax.servlet - servlet-api - provided - io.netty netty-handler @@ -147,8 +138,8 @@ commons-codec - org.eclipse.jetty - jetty-util + javax.servlet + javax.servlet-api com.fasterxml.jackson.jaxrs diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/overlord/hrtr/HttpRemoteTaskRunnerTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/overlord/hrtr/HttpRemoteTaskRunnerTest.java index 3e5c862d7a63..3a3dcc3a7a12 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/overlord/hrtr/HttpRemoteTaskRunnerTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/overlord/hrtr/HttpRemoteTaskRunnerTest.java @@ -65,7 +65,6 @@ import org.apache.druid.server.initialization.ZkPathsConfig; import org.apache.druid.server.metrics.NoopServiceEmitter; import org.easymock.EasyMock; -import org.eclipse.jetty.util.ConcurrentHashSet; import org.joda.time.Period; import org.junit.Assert; import org.junit.Before; @@ -902,7 +901,7 @@ protected WorkerHolder createWorkerHolder( ); AtomicInteger ticks = new AtomicInteger(); - Set actualShutdowns = new ConcurrentHashSet<>(); + Set actualShutdowns = Collections.newSetFromMap(new ConcurrentHashMap<>()); workerHolders.put( "host:1234", diff --git a/licenses.yaml b/licenses.yaml index 1a2f66f94c95..c345f7a697cd 100644 --- a/licenses.yaml +++ b/licenses.yaml @@ -2113,7 +2113,7 @@ name: Jetty license_category: binary module: java-core license_name: Apache License version 2.0 -version: 9.4.57.v20241219 +version: 10.0.25 libraries: - org.eclipse.jetty: jetty-client - org.eclipse.jetty: jetty-continuation @@ -3659,7 +3659,7 @@ name: Java Servlet API license_category: binary module: java-core license_name: CDDL 1.1 -version: 3.1.0 +version: 4.0.1 copyright: Oracle and/or its affiliates. license_file_path: licenses/bin/javax.CDDL11 libraries: diff --git a/pom.xml b/pom.xml index 68ee72ef2e30..a78cc6265eea 100644 --- a/pom.xml +++ b/pom.xml @@ -98,7 +98,7 @@ 32.0.1-jre 5.1.0 1.3 - 9.4.57.v20241219 + 10.0.25 1.19.4 2.12.7.20221012 1.9.13 @@ -749,6 +749,12 @@ org.eclipse.jetty jetty-server ${jetty.version} + + + org.eclipse.jetty.toolchain + jetty-servlet-api + + org.eclipse.jetty @@ -769,6 +775,12 @@ org.eclipse.jetty jetty-rewrite ${jetty.version} + + + org.eclipse.jetty.toolchain + jetty-servlet-api + + org.eclipse.jetty @@ -1031,7 +1043,7 @@ javax.servlet javax.servlet-api - 3.1.0 + 4.0.1 javax.activation diff --git a/server/src/main/java/org/apache/druid/guice/http/JettyHttpClientModule.java b/server/src/main/java/org/apache/druid/guice/http/JettyHttpClientModule.java index c4eefa027042..e2da287257b5 100644 --- a/server/src/main/java/org/apache/druid/guice/http/JettyHttpClientModule.java +++ b/server/src/main/java/org/apache/druid/guice/http/JettyHttpClientModule.java @@ -28,6 +28,8 @@ import org.apache.druid.guice.annotations.Global; import org.apache.druid.java.util.common.lifecycle.Lifecycle; import org.eclipse.jetty.client.HttpClient; +import org.eclipse.jetty.client.dynamic.HttpClientTransportDynamic; +import org.eclipse.jetty.io.ClientConnector; import org.eclipse.jetty.util.ssl.SslContextFactory; import org.eclipse.jetty.util.thread.QueuedThreadPool; @@ -80,7 +82,9 @@ public HttpClient get() if (sslContextBinding != null) { final SslContextFactory.Client sslContextFactory = new SslContextFactory.Client(); sslContextFactory.setSslContext(sslContextBinding.getProvider().get()); - httpClient = new HttpClient(sslContextFactory); + ClientConnector clientConnector = new ClientConnector(); + clientConnector.setSslContextFactory(sslContextFactory); + httpClient = new HttpClient(new HttpClientTransportDynamic(clientConnector)); } else { httpClient = new HttpClient(); } diff --git a/server/src/main/java/org/apache/druid/server/QueryResultPusher.java b/server/src/main/java/org/apache/druid/server/QueryResultPusher.java index 1b6ed98122c7..b9b0246fc962 100644 --- a/server/src/main/java/org/apache/druid/server/QueryResultPusher.java +++ b/server/src/main/java/org/apache/druid/server/QueryResultPusher.java @@ -66,7 +66,7 @@ public abstract class QueryResultPusher private final QueryResource.QueryMetricCounter counter; private final MediaType contentType; private final Map extraHeaders; - private final HttpFields trailerFields; + private final HttpFields.Mutable trailerFields; private StreamingHttpResponseAccumulator accumulator; private AsyncContext asyncContext; @@ -91,7 +91,7 @@ public QueryResultPusher( this.counter = counter; this.contentType = contentType; this.extraHeaders = extraHeaders; - this.trailerFields = new HttpFields(); + this.trailerFields = HttpFields.build(); } /** diff --git a/server/src/main/java/org/apache/druid/server/initialization/jetty/JettyMonitoringConnectionFactory.java b/server/src/main/java/org/apache/druid/server/initialization/jetty/JettyMonitoringConnectionFactory.java index 07955bb0339a..85ded3250628 100644 --- a/server/src/main/java/org/apache/druid/server/initialization/jetty/JettyMonitoringConnectionFactory.java +++ b/server/src/main/java/org/apache/druid/server/initialization/jetty/JettyMonitoringConnectionFactory.java @@ -56,7 +56,7 @@ public String getProtocol() public Connection newConnection(Connector connector, EndPoint endPoint) { final Connection connection = connectionFactory.newConnection(connector, endPoint); - connection.addListener( + connection.addEventListener( new Connection.Listener() { @Override diff --git a/server/src/main/java/org/apache/druid/server/initialization/jetty/JettyServerInitUtils.java b/server/src/main/java/org/apache/druid/server/initialization/jetty/JettyServerInitUtils.java index 72ddbe9c4387..86a6c066be55 100644 --- a/server/src/main/java/org/apache/druid/server/initialization/jetty/JettyServerInitUtils.java +++ b/server/src/main/java/org/apache/druid/server/initialization/jetty/JettyServerInitUtils.java @@ -34,6 +34,8 @@ import org.eclipse.jetty.servlet.FilterHolder; import org.eclipse.jetty.servlet.FilterMapping; import org.eclipse.jetty.servlet.ServletContextHandler; +import org.eclipse.jetty.util.compression.CompressionPool; +import org.eclipse.jetty.util.compression.DeflaterPool; import javax.ws.rs.HttpMethod; @@ -51,10 +53,8 @@ public static GzipHandler wrapWithDefaultGzipHandler(final Handler handler, int gzipHandler.setMinGzipSize(0); gzipHandler.setIncludedMethods(GZIP_METHODS); gzipHandler.setInflateBufferSize(inflateBufferSize); - gzipHandler.setCompressionLevel(compressionLevel); + gzipHandler.setDeflaterPool(new DeflaterPool(CompressionPool.DEFAULT_CAPACITY, compressionLevel, true)); - // We don't actually have any precomputed .gz resources, and checking for them inside jars is expensive. - gzipHandler.setCheckGzExists(false); gzipHandler.setHandler(handler); return gzipHandler; } diff --git a/server/src/main/java/org/apache/druid/server/initialization/jetty/JettyServerModule.java b/server/src/main/java/org/apache/druid/server/initialization/jetty/JettyServerModule.java index 8ce48871c155..7a0afce4e133 100644 --- a/server/src/main/java/org/apache/druid/server/initialization/jetty/JettyServerModule.java +++ b/server/src/main/java/org/apache/druid/server/initialization/jetty/JettyServerModule.java @@ -363,7 +363,7 @@ static Server makeAndInitializeServer( if (gracefulStop > 0) { server.setStopTimeout(gracefulStop); } - server.addLifeCycleListener(new LifeCycle.Listener() + server.addEventListener(new LifeCycle.Listener() { @Override public void lifeCycleStarting(LifeCycle event) diff --git a/server/src/main/java/org/apache/druid/server/initialization/jetty/StandardResponseHeaderFilterHolder.java b/server/src/main/java/org/apache/druid/server/initialization/jetty/StandardResponseHeaderFilterHolder.java index 2f12ff2402c8..050bd01927f9 100644 --- a/server/src/main/java/org/apache/druid/server/initialization/jetty/StandardResponseHeaderFilterHolder.java +++ b/server/src/main/java/org/apache/druid/server/initialization/jetty/StandardResponseHeaderFilterHolder.java @@ -74,7 +74,7 @@ public static void deduplicateHeadersInProxyServlet( ) { for (final String headerName : StandardResponseHeaderFilterHolder.STANDARD_HEADERS) { - if (serverResponse.getHeaders().containsKey(headerName) && proxyResponse.containsHeader(headerName)) { + if (serverResponse.getHeaders().contains(headerName) && proxyResponse.containsHeader(headerName)) { ((org.eclipse.jetty.server.Response) proxyResponse).getHttpFields().remove(headerName); } } diff --git a/server/src/test/java/org/apache/druid/server/QueryResourceTest.java b/server/src/test/java/org/apache/druid/server/QueryResourceTest.java index 684092904848..66bbb781f8bc 100644 --- a/server/src/test/java/org/apache/druid/server/QueryResourceTest.java +++ b/server/src/test/java/org/apache/druid/server/QueryResourceTest.java @@ -477,13 +477,13 @@ public QueryRunner getQueryRunnerForSegments( Assert.assertEquals(response.getHeader(HttpHeader.TRAILER.toString()), QueryResultPusher.RESULT_TRAILER_HEADERS); final HttpFields fields = response.getTrailers().get(); - Assert.assertTrue(fields.containsKey(QueryResource.ERROR_MESSAGE_TRAILER_HEADER)); + Assert.assertTrue(fields.contains(QueryResource.ERROR_MESSAGE_TRAILER_HEADER)); Assert.assertEquals( fields.get(QueryResource.ERROR_MESSAGE_TRAILER_HEADER), "Query did not complete within configured timeout period. You can increase query timeout or tune the performance of query." ); - Assert.assertTrue(fields.containsKey(QueryResource.RESPONSE_COMPLETE_TRAILER_HEADER)); + Assert.assertTrue(fields.contains(QueryResource.RESPONSE_COMPLETE_TRAILER_HEADER)); Assert.assertEquals(fields.get(QueryResource.RESPONSE_COMPLETE_TRAILER_HEADER), "false"); } @@ -522,9 +522,9 @@ public void testSuccessResponseWithTrailerHeader() throws IOException Assert.assertTrue(response.containsHeader(HttpHeader.TRAILER.toString())); final HttpFields fields = response.getTrailers().get(); - Assert.assertFalse(fields.containsKey(QueryResource.ERROR_MESSAGE_TRAILER_HEADER)); + Assert.assertFalse(fields.contains(QueryResource.ERROR_MESSAGE_TRAILER_HEADER)); - Assert.assertTrue(fields.containsKey(QueryResource.RESPONSE_COMPLETE_TRAILER_HEADER)); + Assert.assertTrue(fields.contains(QueryResource.RESPONSE_COMPLETE_TRAILER_HEADER)); Assert.assertEquals(fields.get(QueryResource.RESPONSE_COMPLETE_TRAILER_HEADER), "true"); } diff --git a/services/src/main/java/org/apache/druid/server/AsyncQueryForwardingServlet.java b/services/src/main/java/org/apache/druid/server/AsyncQueryForwardingServlet.java index f626b45d97f3..94e30836ed48 100644 --- a/services/src/main/java/org/apache/druid/server/AsyncQueryForwardingServlet.java +++ b/services/src/main/java/org/apache/druid/server/AsyncQueryForwardingServlet.java @@ -64,7 +64,7 @@ import org.eclipse.jetty.client.api.Response; import org.eclipse.jetty.client.api.Result; import org.eclipse.jetty.client.util.BytesContentProvider; -import org.eclipse.jetty.http.HttpHeader; +import org.eclipse.jetty.client.util.BytesRequestContent; import org.eclipse.jetty.http.HttpMethod; import org.eclipse.jetty.proxy.AsyncProxyServlet; @@ -468,8 +468,9 @@ private void setProxyRequestContent(Request proxyRequest, HttpServletRequest cli final ObjectMapper objectMapper = (ObjectMapper) clientRequest.getAttribute(OBJECTMAPPER_ATTRIBUTE); try { byte[] bytes = objectMapper.writeValueAsBytes(content); - proxyRequest.content(new BytesContentProvider(bytes)); - proxyRequest.getHeaders().put(HttpHeader.CONTENT_LENGTH, String.valueOf(bytes.length)); + // In jetty 10+ BytesContentProvider will automatically set the content length when added to Request + Request.Content requestContent = new BytesRequestContent(bytes); + proxyRequest.body(requestContent); } catch (JsonProcessingException e) { throw new RuntimeException(e); diff --git a/services/src/test/java/org/apache/druid/server/AsyncQueryForwardingServletTest.java b/services/src/test/java/org/apache/druid/server/AsyncQueryForwardingServletTest.java index 6be671580890..95f269cfbbbd 100644 --- a/services/src/test/java/org/apache/druid/server/AsyncQueryForwardingServletTest.java +++ b/services/src/test/java/org/apache/druid/server/AsyncQueryForwardingServletTest.java @@ -633,7 +633,7 @@ public int read() @Override public HttpFields getHeaders() { - HttpFields httpFields = new HttpFields(); + HttpFields.Mutable httpFields = HttpFields.build(); if (isJDBCSql) { httpFields.add(new HttpField("X-Druid-SQL-Query-Id", "jdbcDummy")); } else if (isNativeSql) { diff --git a/sql/pom.xml b/sql/pom.xml index 5afdc8b26888..0822aa93de20 100644 --- a/sql/pom.xml +++ b/sql/pom.xml @@ -86,6 +86,11 @@ org.eclipse.jetty jetty-http + + + javax.servlet + javax.servlet-api + From f4eef97d43905a5ac59c1f042e7996ca587ebf3a Mon Sep 17 00:00:00 2001 From: capistrant Date: Tue, 27 May 2025 18:07:18 -0500 Subject: [PATCH 02/42] checkpoint --- .../server/initialization/jetty/JettyServerInitUtils.java | 2 ++ .../apache/druid/server/AsyncQueryForwardingServlet.java | 7 ++++++- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/server/src/main/java/org/apache/druid/server/initialization/jetty/JettyServerInitUtils.java b/server/src/main/java/org/apache/druid/server/initialization/jetty/JettyServerInitUtils.java index 86a6c066be55..e58ab3c54cf5 100644 --- a/server/src/main/java/org/apache/druid/server/initialization/jetty/JettyServerInitUtils.java +++ b/server/src/main/java/org/apache/druid/server/initialization/jetty/JettyServerInitUtils.java @@ -36,6 +36,7 @@ import org.eclipse.jetty.servlet.ServletContextHandler; import org.eclipse.jetty.util.compression.CompressionPool; import org.eclipse.jetty.util.compression.DeflaterPool; +import org.eclipse.jetty.util.compression.InflaterPool; import javax.ws.rs.HttpMethod; @@ -54,6 +55,7 @@ public static GzipHandler wrapWithDefaultGzipHandler(final Handler handler, int gzipHandler.setIncludedMethods(GZIP_METHODS); gzipHandler.setInflateBufferSize(inflateBufferSize); gzipHandler.setDeflaterPool(new DeflaterPool(CompressionPool.DEFAULT_CAPACITY, compressionLevel, true)); + gzipHandler.setInflaterPool(new InflaterPool(CompressionPool.DEFAULT_CAPACITY, true)); gzipHandler.setHandler(handler); return gzipHandler; diff --git a/services/src/main/java/org/apache/druid/server/AsyncQueryForwardingServlet.java b/services/src/main/java/org/apache/druid/server/AsyncQueryForwardingServlet.java index 94e30836ed48..2d884e0455ad 100644 --- a/services/src/main/java/org/apache/druid/server/AsyncQueryForwardingServlet.java +++ b/services/src/main/java/org/apache/druid/server/AsyncQueryForwardingServlet.java @@ -60,11 +60,14 @@ import org.apache.druid.sql.http.SqlQuery; import org.apache.druid.sql.http.SqlResource; import org.eclipse.jetty.client.HttpClient; +import org.eclipse.jetty.client.HttpResponse; import org.eclipse.jetty.client.api.Request; import org.eclipse.jetty.client.api.Response; import org.eclipse.jetty.client.api.Result; import org.eclipse.jetty.client.util.BytesContentProvider; import org.eclipse.jetty.client.util.BytesRequestContent; +import org.eclipse.jetty.http.HttpFields; +import org.eclipse.jetty.http.HttpHeader; import org.eclipse.jetty.http.HttpMethod; import org.eclipse.jetty.proxy.AsyncProxyServlet; @@ -468,9 +471,11 @@ private void setProxyRequestContent(Request proxyRequest, HttpServletRequest cli final ObjectMapper objectMapper = (ObjectMapper) clientRequest.getAttribute(OBJECTMAPPER_ATTRIBUTE); try { byte[] bytes = objectMapper.writeValueAsBytes(content); - // In jetty 10+ BytesContentProvider will automatically set the content length when added to Request Request.Content requestContent = new BytesRequestContent(bytes); proxyRequest.body(requestContent); + proxyRequest.headers(headers -> { + headers.put(HttpHeader.CONTENT_LENGTH, String.valueOf(requestContent.getLength())); + }); } catch (JsonProcessingException e) { throw new RuntimeException(e); From 63f23c9cdcc5f1001abcec7abf36d78d6dc89c2a Mon Sep 17 00:00:00 2001 From: capistrant Date: Mon, 2 Jun 2025 16:26:52 -0500 Subject: [PATCH 03/42] compiling but broken cuz of at least one dep calcite depending on jetty9 --- extensions-core/druid-kerberos/pom.xml | 4 +- .../kerberos/KerberosAuthenticator.java | 10 +-- .../java/org/apache/druid/indexer/Utils.java | 2 +- .../cases/cluster/Common/dependencies.yaml | 1 + integration-tests-ex/tools/pom.xml | 4 +- .../testing/tools/CliCustomNodeRole.java | 11 +-- integration-tests/pom.xml | 4 +- .../apache/druid/cli/CliCustomNodeRole.java | 11 +-- pom.xml | 14 ++-- processing/pom.xml | 4 +- quidem-ut/pom.xml | 4 +- server/pom.xml | 12 +-- .../guice/http/JettyHttpClientModule.java | 2 +- .../AsyncManagementForwardingServlet.java | 6 +- .../druid/server/QueryResultPusher.java | 6 +- .../server/http/OverlordProxyServlet.java | 4 +- .../initialization/jetty/JettyBindings.java | 2 +- .../initialization/jetty/JettyRequestLog.java | 4 +- .../jetty/JettyServerInitUtils.java | 22 ++---- .../jetty/JettyServerModule.java | 21 ++---- .../StandardResponseHeaderFilterHolder.java | 6 +- .../server/security/AuthenticationUtils.java | 4 +- .../druid/server/security/Authenticator.java | 2 +- .../discovery/DruidLeaderClientTest.java | 12 +-- .../AsyncManagementForwardingServletTest.java | 22 +++--- .../druid/server/QueryResourceTest.java | 31 +++----- .../server/initialization/BaseJettyTest.java | 14 ++-- .../server/initialization/JettyQosTest.java | 2 +- .../server/initialization/JettyTest.java | 7 -- .../JettyWithResponseFilterEnabledTest.java | 8 -- services/pom.xml | 8 +- .../org/apache/druid/cli/CliOverlord.java | 27 +++---- .../CoordinatorJettyServerInitializer.java | 27 +++---- .../MiddleManagerJettyServerInitializer.java | 12 ++- .../cli/QueryJettyServerInitializer.java | 19 ++--- .../cli/RouterJettyServerInitializer.java | 27 +++---- .../cli/WebConsoleJettyServerInitializer.java | 5 +- .../server/AsyncQueryForwardingServlet.java | 15 ++-- .../AsyncQueryForwardingServletTest.java | 75 ++++++++++++++----- 39 files changed, 211 insertions(+), 260 deletions(-) diff --git a/extensions-core/druid-kerberos/pom.xml b/extensions-core/druid-kerberos/pom.xml index e165d217bd09..c9e26ef79386 100644 --- a/extensions-core/druid-kerberos/pom.xml +++ b/extensions-core/druid-kerberos/pom.xml @@ -302,8 +302,8 @@ provided - org.eclipse.jetty - jetty-servlet + org.eclipse.jetty.ee8 + jetty-ee8-servlet provided javax.xml.bind:jaxb-api + org.eclipse.jetty.toolchain:jetty-servlet-api From aba3dd05334cede9c969d168856ff67d3fdfbf49 Mon Sep 17 00:00:00 2001 From: capistrant Date: Wed, 20 Aug 2025 23:06:55 -0500 Subject: [PATCH 09/42] Fixup some licenses reporting issues --- .../cases/cluster/Common/dependencies.yaml | 1 - licenses.yaml | 11 +++++++---- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/integration-tests-ex/cases/cluster/Common/dependencies.yaml b/integration-tests-ex/cases/cluster/Common/dependencies.yaml index f85cf2332dfc..b63b6dd82e2b 100644 --- a/integration-tests-ex/cases/cluster/Common/dependencies.yaml +++ b/integration-tests-ex/cases/cluster/Common/dependencies.yaml @@ -131,7 +131,6 @@ services: ipv4_address: 172.172.172.6 entrypoint: > /bin/sh -c " - sleep 30; /usr/bin/mc alias set s3 http://minio:9000 ${AWS_ACCESS_KEY_ID} ${AWS_SECRET_ACCESS_KEY}; /usr/bin/mc mb s3/${DRUID_CLOUD_BUCKET}; /usr/bin/mc anonymous set public s3/${DRUID_CLOUD_BUCKET}; diff --git a/licenses.yaml b/licenses.yaml index e75d3c257139..f9adf7e3cecd 100644 --- a/licenses.yaml +++ b/licenses.yaml @@ -1707,7 +1707,7 @@ name: Apache Calcite Avatica license_category: binary module: java-core license_name: Apache License version 2.0 -version: 1.25.0 +version: 1.26.0 libraries: - org.apache.calcite.avatica: avatica-core - org.apache.calcite.avatica: avatica-metrics @@ -2137,20 +2137,23 @@ notices: name: Jetty license_category: binary module: java-core -license_name: Apache License version 2.0 +license_name: Eclipse Public License 2.0 version: 12.0.25 libraries: + - org.eclipse.jetty: jetty-alpn-client - org.eclipse.jetty: jetty-client - org.eclipse.jetty: jetty-http - org.eclipse.jetty: jetty-io - org.eclipse.jetty: jetty-rewrite - org.eclipse.jetty: jetty-security - org.eclipse.jetty: jetty-server + - org.eclipse.jetty: jetty-session - org.eclipse.jetty: jetty-util + - org.eclipse.jetty.ee8: jetty-ee8-nested + - org.eclipse.jetty.ee8: jetty-ee8-proxy + - org.eclipse.jetty.ee8: jetty-ee8-security - org.eclipse.jetty.ee8: jetty-ee8-servlet - org.eclipse.jetty.ee8: jetty-ee8-servlets - - org.eclipse.jetty.ee8: jetty-ee8-proxy - - org.eclipse.jetty.ee8: jetty-ee8-nested notice: | ============================================================== Jetty Web Container From 8042ece01c4a8ac9ffe843b19806cefc1f55b60b Mon Sep 17 00:00:00 2001 From: capistrant Date: Thu, 21 Aug 2025 10:11:50 -0500 Subject: [PATCH 10/42] remove jetty dependency from indexing hadoop in attempt to keep it alive --- indexing-hadoop/pom.xml | 4 -- .../java/org/apache/druid/indexer/Utils.java | 42 ++++++++++--------- 2 files changed, 23 insertions(+), 23 deletions(-) diff --git a/indexing-hadoop/pom.xml b/indexing-hadoop/pom.xml index 9772bc8b6699..2195606734d1 100644 --- a/indexing-hadoop/pom.xml +++ b/indexing-hadoop/pom.xml @@ -90,10 +90,6 @@ jakarta.validation jakarta.validation-api - - org.eclipse.jetty - jetty-client - org.apache.commons commons-lang3 diff --git a/indexing-hadoop/src/main/java/org/apache/druid/indexer/Utils.java b/indexing-hadoop/src/main/java/org/apache/druid/indexer/Utils.java index 0c0a2ae1c0c7..f4f9e71cf8fb 100644 --- a/indexing-hadoop/src/main/java/org/apache/druid/indexer/Utils.java +++ b/indexing-hadoop/src/main/java/org/apache/druid/indexer/Utils.java @@ -36,18 +36,18 @@ import org.apache.hadoop.mapreduce.TaskCompletionEvent; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.util.ReflectionUtils; -import org.eclipse.jetty.client.ContentResponse; -import org.eclipse.jetty.client.HttpClient; import javax.annotation.Nullable; - import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; +import java.net.URI; +import java.net.http.HttpClient; +import java.net.http.HttpRequest; +import java.net.http.HttpResponse; +import java.time.Duration; import java.util.HashMap; import java.util.Map; -import java.util.concurrent.ExecutionException; -import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicBoolean; /** @@ -56,6 +56,10 @@ public class Utils { private static final Logger log = new Logger(Utils.class); private static final ObjectMapper JSON_MAPPER = new DefaultObjectMapper(); + + // HTTP timeouts to mimic Jetty HttpClient default behavior (15s connect, 60s request) + private static final Duration HTTP_CONNECT_TIMEOUT = Duration.ofSeconds(15); + private static final Duration HTTP_REQUEST_TIMEOUT = Duration.ofSeconds(60); public static OutputStream makePathAndOutputStream(JobContext job, Path outputPath, boolean deleteExisting) throws IOException @@ -186,10 +190,11 @@ public static boolean checkAppSuccessForJobIOException( public static boolean checkAppSuccessFromYarnRM(Job job) { - final HttpClient httpClient = new HttpClient(); + final HttpClient httpClient = HttpClient.newBuilder() + .connectTimeout(HTTP_CONNECT_TIMEOUT) + .build(); final AtomicBoolean succeeded = new AtomicBoolean(false); try { - httpClient.start(); RetryUtils.retry( () -> { checkAppSuccessFromYarnRMOnce(httpClient, job, succeeded); @@ -207,31 +212,30 @@ public static boolean checkAppSuccessFromYarnRM(Job job) // we're already in a best-effort fallback failure handling case, just stop if we have issues with the http client return false; } - finally { - try { - httpClient.stop(); - } - catch (Exception e) { - log.error(e, "Got exception with httpClient.stop() while trying to contact YARN RM."); - } - } } private static void checkAppSuccessFromYarnRMOnce( HttpClient httpClient, Job job, AtomicBoolean succeeded - ) throws IOException, InterruptedException, ExecutionException, TimeoutException + ) throws IOException, InterruptedException { String appId = StringUtils.replace(job.getJobID().toString(), "job", "application"); String yarnRM = job.getConfiguration().get("yarn.resourcemanager.webapp.address"); String yarnEndpoint = StringUtils.format("http://%s/ws/v1/cluster/apps/%s", yarnRM, appId); log.info("Attempting to retrieve app status from YARN ResourceManager at [%s].", yarnEndpoint); - ContentResponse res = httpClient.GET(yarnEndpoint); - log.info("App status response from YARN RM: " + res.getContentAsString()); + HttpRequest request = HttpRequest.newBuilder() + .uri(URI.create(yarnEndpoint)) + .timeout(HTTP_REQUEST_TIMEOUT) + .GET() + .build(); + HttpResponse res = httpClient.send(request, HttpResponse.BodyHandlers.ofString()); + + String responseBody = res.body(); + log.info("App status response from YARN RM: " + responseBody); Map respMap = HadoopDruidIndexerConfig.JSON_MAPPER.readValue( - res.getContentAsString(), + responseBody, new TypeReference<>() {} ); From 11d296482bcfa42d1324acdbac385fcb639e2ea0 Mon Sep 17 00:00:00 2001 From: capistrant Date: Thu, 21 Aug 2025 21:47:44 -0500 Subject: [PATCH 11/42] stop doing static checks and build with jdk11 --- .github/workflows/static-checks.yml | 2 +- .github/workflows/unit-and-integration-tests-unified.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/static-checks.yml b/.github/workflows/static-checks.yml index 35f01b60a152..d92baff39767 100644 --- a/.github/workflows/static-checks.yml +++ b/.github/workflows/static-checks.yml @@ -42,7 +42,7 @@ jobs: fail-fast: false matrix: # Use JDK 21.0.4 to work around https://github.com/apache/druid/issues/17429 - java: [ '11', '17', '21.0.4' ] + java: [ '17', '21.0.4' ] runs-on: ubuntu-latest steps: - name: checkout branch diff --git a/.github/workflows/unit-and-integration-tests-unified.yml b/.github/workflows/unit-and-integration-tests-unified.yml index 95648f0087fa..17fbb910863d 100644 --- a/.github/workflows/unit-and-integration-tests-unified.yml +++ b/.github/workflows/unit-and-integration-tests-unified.yml @@ -80,7 +80,7 @@ jobs: fail-fast: false matrix: # Use JDK 21.0.4 to work around https://github.com/apache/druid/issues/17429 - jdk: [ '11', '17', '21.0.4' ] + jdk: [ '17', '21.0.4' ] runs-on: ubuntu-latest steps: - name: Checkout branch From 5f9ee3226bac5bbb3f9f17a46f8b5c5f06da0aca Mon Sep 17 00:00:00 2001 From: capistrant Date: Fri, 22 Aug 2025 10:33:31 -0500 Subject: [PATCH 12/42] Modifications to index_hadoop to try and keep it alive --- .../indexer/HadoopDruidIndexerConfig.java | 36 ++++++++++--------- .../org/apache/druid/indexer/JobHelper.java | 3 +- .../override-examples/hdfs | 3 +- .../script/copy_resources_template.sh | 2 +- 4 files changed, 25 insertions(+), 19 deletions(-) diff --git a/indexing-hadoop/src/main/java/org/apache/druid/indexer/HadoopDruidIndexerConfig.java b/indexing-hadoop/src/main/java/org/apache/druid/indexer/HadoopDruidIndexerConfig.java index 0cabc734617f..d00e2f5967ba 100644 --- a/indexing-hadoop/src/main/java/org/apache/druid/indexer/HadoopDruidIndexerConfig.java +++ b/indexing-hadoop/src/main/java/org/apache/druid/indexer/HadoopDruidIndexerConfig.java @@ -35,13 +35,15 @@ import com.google.inject.Module; import org.apache.druid.data.input.InputRow; import org.apache.druid.data.input.impl.InputRowParser; -import org.apache.druid.guice.GuiceInjectors; import org.apache.druid.guice.JsonConfigProvider; +import org.apache.druid.guice.LocalDataStorageDruidModule; +import org.apache.druid.guice.StartupInjectorBuilder; +import org.apache.druid.guice.StorageNodeModule; import org.apache.druid.guice.annotations.Self; import org.apache.druid.indexer.granularity.GranularitySpec; import org.apache.druid.indexer.partitions.DimensionBasedPartitionsSpec; import org.apache.druid.indexer.path.PathSpec; -import org.apache.druid.initialization.Initialization; +import org.apache.druid.initialization.CoreInjectorBuilder; import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.java.util.common.JodaUtils; import org.apache.druid.java.util.common.StringUtils; @@ -53,6 +55,7 @@ import org.apache.druid.segment.IndexMergerV9; import org.apache.druid.segment.IndexSpec; import org.apache.druid.segment.loading.DataSegmentPusher; +import org.apache.druid.segment.writeout.SegmentWriteOutMediumModule; import org.apache.druid.server.DruidNode; import org.apache.druid.timeline.DataSegment; import org.apache.druid.timeline.partition.ShardSpec; @@ -106,20 +109,21 @@ public class HadoopDruidIndexerConfig public static final Properties PROPERTIES; static { - INJECTOR = Initialization.makeInjectorWithModules( - GuiceInjectors.makeStartupInjector(), - ImmutableList.of( - (Module) binder -> { - JsonConfigProvider.bindInstance( - binder, - Key.get(DruidNode.class, Self.class), - new DruidNode("hadoop-indexer", null, false, null, null, true, false) - ); - JsonConfigProvider.bind(binder, "druid.hadoop.security.kerberos", HadoopKerberosConfig.class); - }, - new IndexingHadoopModule() - ) - ); + Injector baseInjector = new StartupInjectorBuilder().withEmptyProperties().build(); + INJECTOR = new CoreInjectorBuilder(baseInjector).addModules( + new StorageNodeModule(), + new SegmentWriteOutMediumModule(), + new LocalDataStorageDruidModule(), + (Module) binder -> { + JsonConfigProvider.bindInstance( + binder, + Key.get(DruidNode.class, Self.class), + new DruidNode("hadoop-indexer", null, false, null, null, true, false) + ); + JsonConfigProvider.bind(binder, "druid.hadoop.security.kerberos", HadoopKerberosConfig.class); + }, + new IndexingHadoopModule() + ).build(); JSON_MAPPER = INJECTOR.getInstance(ObjectMapper.class); INDEX_IO = INJECTOR.getInstance(IndexIO.class); INDEX_MERGER_V9 = INJECTOR.getInstance(IndexMergerV9.class); diff --git a/indexing-hadoop/src/main/java/org/apache/druid/indexer/JobHelper.java b/indexing-hadoop/src/main/java/org/apache/druid/indexer/JobHelper.java index 7639ba66b5f6..83c0671a2d05 100644 --- a/indexing-hadoop/src/main/java/org/apache/druid/indexer/JobHelper.java +++ b/indexing-hadoop/src/main/java/org/apache/druid/indexer/JobHelper.java @@ -155,7 +155,8 @@ public static void setupClasspath( for (String jarFilePath : jarFiles) { final File jarFile = new File(jarFilePath); - if (jarFile.getName().endsWith(".jar")) { + // Keep Druid jetty jars out of the classpath. They are not runnable in the < 17 hadoop java runtime + if (jarFile.getName().endsWith(".jar") && !jarFile.getName().contains("jetty")) { try { RetryUtils.retry( () -> { diff --git a/integration-tests/docker/environment-configs/override-examples/hdfs b/integration-tests/docker/environment-configs/override-examples/hdfs index 2e25b929b728..5e983a1c4c81 100644 --- a/integration-tests/docker/environment-configs/override-examples/hdfs +++ b/integration-tests/docker/environment-configs/override-examples/hdfs @@ -21,4 +21,5 @@ druid_storage_storageDirectory=/druid/segments # Depending on the test, additional extension(s) may be required. # Please refer to the other integration-tests/docker/environment-configs/override-examples/ files and Druid docs for # additional env vars to provide for each extension. -druid_extensions_loadList=["druid-hdfs-storage", "mysql-metadata-storage","druid-basic-security","simple-client-sslcontext","druid-testing-tools","druid-lookups-cached-global","druid-histogram","druid-datasketches","druid-parquet-extensions","druid-avro-extensions","druid-protobuf-extensions","druid-orc-extensions","druid-kafka-indexing-service","druid-s3-extensions"] \ No newline at end of file +druid_extensions_loadList=["druid-hdfs-storage", "mysql-metadata-storage","druid-basic-security","simple-client-sslcontext","druid-testing-tools","druid-lookups-cached-global","druid-histogram","druid-datasketches","druid-parquet-extensions","druid-avro-extensions","druid-protobuf-extensions","druid-orc-extensions","druid-kafka-indexing-service","druid-s3-extensions"] +druid_indexer_task_allowHadoopTaskExecution=true \ No newline at end of file diff --git a/integration-tests/script/copy_resources_template.sh b/integration-tests/script/copy_resources_template.sh index f5fa127b0c4c..dac5de8caf53 100755 --- a/integration-tests/script/copy_resources_template.sh +++ b/integration-tests/script/copy_resources_template.sh @@ -33,7 +33,7 @@ then pushd ../ rm -rf distribution/target/apache-druid-$DRUID_VERSION-integration-test-bin # using parallel build here may not yield significant speedups - mvn -B -Pskip-static-checks,skip-tests -Dweb.console.skip=true install -Pintegration-test + mvn -B -Pskip-static-checks -DskipTests -Dweb.console.skip=${DRUID_INTEGRATION_TEST_SKIP_WEB_CONSOLE:-true} install -Pintegration-test mv distribution/target/apache-druid-$DRUID_VERSION-integration-test-bin/bin $SHARED_DIR/docker/bin mv distribution/target/apache-druid-$DRUID_VERSION-integration-test-bin/lib $SHARED_DIR/docker/lib mv distribution/target/apache-druid-$DRUID_VERSION-integration-test-bin/extensions $SHARED_DIR/docker/extensions From 1f13540abd1ca97abd5c3b9aaeaf035d077187db Mon Sep 17 00:00:00 2001 From: capistrant Date: Fri, 22 Aug 2025 13:38:25 -0500 Subject: [PATCH 13/42] Modify duplicate header removal strategy --- .../jetty/StandardResponseHeaderFilterHolder.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/main/java/org/apache/druid/server/initialization/jetty/StandardResponseHeaderFilterHolder.java b/server/src/main/java/org/apache/druid/server/initialization/jetty/StandardResponseHeaderFilterHolder.java index 74ccc690a5fb..20e2781368e1 100644 --- a/server/src/main/java/org/apache/druid/server/initialization/jetty/StandardResponseHeaderFilterHolder.java +++ b/server/src/main/java/org/apache/druid/server/initialization/jetty/StandardResponseHeaderFilterHolder.java @@ -79,7 +79,7 @@ public static void deduplicateHeadersInProxyServlet( if (proxyResponse instanceof org.eclipse.jetty.server.Response) { ((org.eclipse.jetty.server.Response) proxyResponse).getHeaders().remove(headerName); } else if (proxyResponse instanceof org.eclipse.jetty.ee8.nested.Response) { - ((org.eclipse.jetty.ee8.nested.Response) proxyResponse).setHeader(headerName, ""); + ((org.eclipse.jetty.ee8.nested.Response) proxyResponse).getHttpFields().remove(headerName); } } } From 74f5c097f6cba95bdff2cfecc9a2f8e3b08b558e Mon Sep 17 00:00:00 2001 From: capistrant Date: Fri, 22 Aug 2025 16:58:05 -0500 Subject: [PATCH 14/42] Remove dependency on non public jetty nested packages --- pom.xml | 12 ------------ server/pom.xml | 4 ---- .../initialization/jetty/JettyServerInitUtils.java | 3 +-- .../jetty/StandardResponseHeaderFilterHolder.java | 8 ++------ 4 files changed, 3 insertions(+), 24 deletions(-) diff --git a/pom.xml b/pom.xml index 4d788d799a94..f2ea39d6fbb1 100644 --- a/pom.xml +++ b/pom.xml @@ -776,18 +776,6 @@ jetty-ee8-proxy ${jetty.version} - - org.eclipse.jetty.ee8 - jetty-ee8-nested - ${jetty.version} - - - org.eclipse.jetty.toolchain - jetty-servlet-api - - - - org.eclipse.jetty jetty-rewrite diff --git a/server/pom.xml b/server/pom.xml index b8d770e0cd3e..98dc2ce9a8eb 100644 --- a/server/pom.xml +++ b/server/pom.xml @@ -132,10 +132,6 @@ org.eclipse.jetty jetty-rewrite - - org.eclipse.jetty.ee8 - jetty-ee8-nested - com.google.code.findbugs jsr305 diff --git a/server/src/main/java/org/apache/druid/server/initialization/jetty/JettyServerInitUtils.java b/server/src/main/java/org/apache/druid/server/initialization/jetty/JettyServerInitUtils.java index b9e8525853b0..1b94ea2f7ce4 100644 --- a/server/src/main/java/org/apache/druid/server/initialization/jetty/JettyServerInitUtils.java +++ b/server/src/main/java/org/apache/druid/server/initialization/jetty/JettyServerInitUtils.java @@ -26,7 +26,6 @@ import org.apache.druid.java.util.common.ISE; import org.apache.druid.server.initialization.ServerConfig; import org.apache.druid.server.security.AllowHttpMethodsResourceFilter; -import org.eclipse.jetty.ee8.nested.ContextHandler; import org.eclipse.jetty.ee8.servlet.FilterHolder; import org.eclipse.jetty.ee8.servlet.FilterMapping; import org.eclipse.jetty.ee8.servlet.ServletContextHandler; @@ -47,7 +46,7 @@ public class JettyServerInitUtils { private static final String[] GZIP_METHODS = new String[]{HttpMethod.GET, HttpMethod.POST}; - public static GzipHandler wrapWithDefaultGzipHandler(final ContextHandler handler, int inflateBufferSize, int compressionLevel) + public static GzipHandler wrapWithDefaultGzipHandler(final ServletContextHandler handler, int inflateBufferSize, int compressionLevel) { GzipHandler gzipHandler = new GzipHandler(); gzipHandler.setMinGzipSize(0); diff --git a/server/src/main/java/org/apache/druid/server/initialization/jetty/StandardResponseHeaderFilterHolder.java b/server/src/main/java/org/apache/druid/server/initialization/jetty/StandardResponseHeaderFilterHolder.java index 20e2781368e1..1723d750c654 100644 --- a/server/src/main/java/org/apache/druid/server/initialization/jetty/StandardResponseHeaderFilterHolder.java +++ b/server/src/main/java/org/apache/druid/server/initialization/jetty/StandardResponseHeaderFilterHolder.java @@ -75,12 +75,8 @@ public static void deduplicateHeadersInProxyServlet( { for (final String headerName : StandardResponseHeaderFilterHolder.STANDARD_HEADERS) { if (serverResponse.getHeaders().contains(headerName) && proxyResponse.containsHeader(headerName)) { - // Use a type-safe approach instead of direct casting - if (proxyResponse instanceof org.eclipse.jetty.server.Response) { - ((org.eclipse.jetty.server.Response) proxyResponse).getHeaders().remove(headerName); - } else if (proxyResponse instanceof org.eclipse.jetty.ee8.nested.Response) { - ((org.eclipse.jetty.ee8.nested.Response) proxyResponse).getHttpFields().remove(headerName); - } + // In Jetty 12 EE8, use the standard servlet API method to remove headers + proxyResponse.setHeader(headerName, null); } } } From 97e764966c0d134e03fd44abf03732d25a6c68d8 Mon Sep 17 00:00:00 2001 From: capistrant Date: Fri, 22 Aug 2025 20:54:05 -0500 Subject: [PATCH 15/42] Revert back to verbose and dirty Injector setup to get hadoop index fully working --- .../indexer/HadoopDruidIndexerConfig.java | 36 +++++++------- .../initialization/CoreInjectorBuilder.java | 48 +++++++++++++++++++ .../druid/initialization/Initialization.java | 12 +++++ .../initialization/ServerInjectorBuilder.java | 48 +++++++++++++++++++ 4 files changed, 124 insertions(+), 20 deletions(-) diff --git a/indexing-hadoop/src/main/java/org/apache/druid/indexer/HadoopDruidIndexerConfig.java b/indexing-hadoop/src/main/java/org/apache/druid/indexer/HadoopDruidIndexerConfig.java index d00e2f5967ba..ca44a0fc2749 100644 --- a/indexing-hadoop/src/main/java/org/apache/druid/indexer/HadoopDruidIndexerConfig.java +++ b/indexing-hadoop/src/main/java/org/apache/druid/indexer/HadoopDruidIndexerConfig.java @@ -35,15 +35,13 @@ import com.google.inject.Module; import org.apache.druid.data.input.InputRow; import org.apache.druid.data.input.impl.InputRowParser; +import org.apache.druid.guice.GuiceInjectors; import org.apache.druid.guice.JsonConfigProvider; -import org.apache.druid.guice.LocalDataStorageDruidModule; -import org.apache.druid.guice.StartupInjectorBuilder; -import org.apache.druid.guice.StorageNodeModule; import org.apache.druid.guice.annotations.Self; import org.apache.druid.indexer.granularity.GranularitySpec; import org.apache.druid.indexer.partitions.DimensionBasedPartitionsSpec; import org.apache.druid.indexer.path.PathSpec; -import org.apache.druid.initialization.CoreInjectorBuilder; +import org.apache.druid.initialization.Initialization; import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.java.util.common.JodaUtils; import org.apache.druid.java.util.common.StringUtils; @@ -55,7 +53,6 @@ import org.apache.druid.segment.IndexMergerV9; import org.apache.druid.segment.IndexSpec; import org.apache.druid.segment.loading.DataSegmentPusher; -import org.apache.druid.segment.writeout.SegmentWriteOutMediumModule; import org.apache.druid.server.DruidNode; import org.apache.druid.timeline.DataSegment; import org.apache.druid.timeline.partition.ShardSpec; @@ -109,21 +106,20 @@ public class HadoopDruidIndexerConfig public static final Properties PROPERTIES; static { - Injector baseInjector = new StartupInjectorBuilder().withEmptyProperties().build(); - INJECTOR = new CoreInjectorBuilder(baseInjector).addModules( - new StorageNodeModule(), - new SegmentWriteOutMediumModule(), - new LocalDataStorageDruidModule(), - (Module) binder -> { - JsonConfigProvider.bindInstance( - binder, - Key.get(DruidNode.class, Self.class), - new DruidNode("hadoop-indexer", null, false, null, null, true, false) - ); - JsonConfigProvider.bind(binder, "druid.hadoop.security.kerberos", HadoopKerberosConfig.class); - }, - new IndexingHadoopModule() - ).build(); + INJECTOR = Initialization.makeInjectorWithoutJettyModules( + GuiceInjectors.makeStartupInjector(), + ImmutableList.of( + (Module) binder -> { + JsonConfigProvider.bindInstance( + binder, + Key.get(DruidNode.class, Self.class), + new DruidNode("hadoop-indexer", null, false, null, null, true, false) + ); + JsonConfigProvider.bind(binder, "druid.hadoop.security.kerberos", HadoopKerberosConfig.class); + }, + new IndexingHadoopModule() + ) + ); JSON_MAPPER = INJECTOR.getInstance(ObjectMapper.class); INDEX_IO = INJECTOR.getInstance(IndexIO.class); INDEX_MERGER_V9 = INJECTOR.getInstance(IndexMergerV9.class); diff --git a/server/src/main/java/org/apache/druid/initialization/CoreInjectorBuilder.java b/server/src/main/java/org/apache/druid/initialization/CoreInjectorBuilder.java index 602fe4ec3d2b..5e09059ed55f 100644 --- a/server/src/main/java/org/apache/druid/initialization/CoreInjectorBuilder.java +++ b/server/src/main/java/org/apache/druid/initialization/CoreInjectorBuilder.java @@ -137,4 +137,52 @@ public CoreInjectorBuilder forServer() ); return this; } + + /** + * Rough bridge solution for Hadoop indexing that needs server-like Injector but can't run jetty 12 + */ + @Deprecated + public CoreInjectorBuilder forServerWithoutJetty() + { + withLogging(); + withLifecycle(); + add( + ExtensionsModule.SecondaryModule.class, + new DruidAuthModule(), + new PolicyModule(), + TLSCertificateCheckerModule.class, + EmitterModule.class, + HttpClientModule.global(), + HttpClientModule.escalatedGlobal(), + new HttpClientModule("druid.broker.http", Client.class, true), + new HttpClientModule("druid.broker.http", EscalatedClient.class, true), + new CuratorModule(), + new AnnouncerModule(), + new MetricsModule(), + new SegmentWriteOutMediumModule(), + new ServerModule(), + new StorageNodeModule(), + new ExpressionModule(), + new BuiltInTypesModule(), + new DiscoveryModule(), + new ServerViewModule(), + new MetadataConfigModule(), + new DerbyMetadataStorageDruidModule(), + new JacksonConfigManagerModule(), + new LocalDataStorageDruidModule(), + new TombstoneDataStorageModule(), + new JavaScriptModule(), + new AuthenticatorModule(), + new AuthenticatorMapperModule(), + new EscalatorModule(), + new AuthorizerModule(), + new AuthorizerMapperModule(), + new StartupLoggingModule(), + new ExternalStorageAccessSecurityModule(), + new ServiceClientModule(), + new StorageConnectorModule(), + new CatalogCoreModule() + ); + return this; + } } diff --git a/server/src/main/java/org/apache/druid/initialization/Initialization.java b/server/src/main/java/org/apache/druid/initialization/Initialization.java index adf3136d3b1d..acdef3118e2f 100644 --- a/server/src/main/java/org/apache/druid/initialization/Initialization.java +++ b/server/src/main/java/org/apache/druid/initialization/Initialization.java @@ -62,4 +62,16 @@ public static Injector makeInjectorWithModules( { return ServerInjectorBuilder.makeServerInjector(baseInjector, ImmutableSet.of(), modules); } + + /** + * Rough bridge solution for Hadoop indexing that needs server-like Injector but can't run jetty 12 + */ + @Deprecated + public static Injector makeInjectorWithoutJettyModules( + final Injector baseInjector, + final Iterable modules + ) + { + return ServerInjectorBuilder.makeServerInjectorWithoutJettyModules(baseInjector, ImmutableSet.of(), modules); + } } diff --git a/server/src/main/java/org/apache/druid/initialization/ServerInjectorBuilder.java b/server/src/main/java/org/apache/druid/initialization/ServerInjectorBuilder.java index fd441b958bf7..d6402fc50617 100644 --- a/server/src/main/java/org/apache/druid/initialization/ServerInjectorBuilder.java +++ b/server/src/main/java/org/apache/druid/initialization/ServerInjectorBuilder.java @@ -73,6 +73,23 @@ public static Injector makeServerInjector( .build(); } + /** + * Rough bridge solution for Hadoop indexing that needs server-like Injector but can't run jetty 12 + */ + @Deprecated + @VisibleForTesting + public static Injector makeServerInjectorWithoutJettyModules( + final Injector baseInjector, + final Set nodeRoles, + final Iterable modules + ) + { + return new ServerInjectorBuilder(baseInjector) + .nodeRoles(nodeRoles) + .serviceModules(modules) + .buildWithoutJettyModules(); + } + public ServerInjectorBuilder(Injector baseInjector) { this.baseInjector = baseInjector; @@ -118,6 +135,37 @@ public Injector build() return new ExtensionInjectorBuilder(serviceBuilder).build(); } + /** + * Rough bridge solution for Hadoop indexing that needs server-like Injector but can't run jetty 12 + */ + public Injector buildWithoutJettyModules() + { + Preconditions.checkNotNull(baseInjector); + Preconditions.checkNotNull(nodeRoles); + + Module registerNodeRoleModule = registerNodeRoleModule(nodeRoles); + + // Child injector, with the registered node roles + Injector childInjector = baseInjector.createChildInjector(registerNodeRoleModule); + + // Create the core set of modules shared by all services. + // Here and below, the modules are filtered by the load modules list and + // the set of roles which this server provides. + CoreInjectorBuilder coreBuilder = new CoreInjectorBuilder(childInjector, nodeRoles).forServerWithoutJetty(); + + // Override with the per-service modules. + ServiceInjectorBuilder serviceBuilder = (ServiceInjectorBuilder) new ServiceInjectorBuilder(coreBuilder).addAll( + Iterables.concat( + // bind nodeRoles for the new injector as well + ImmutableList.of(registerNodeRoleModule), + modules + ) + ); + + // Override again with extensions. + return new ExtensionInjectorBuilder(serviceBuilder).build(); + } + public static Module registerNodeRoleModule(Set nodeRoles) { return binder -> { From 0e08e10c34e37aa97dec92b742c862632efcc98f Mon Sep 17 00:00:00 2001 From: capistrant Date: Fri, 22 Aug 2025 21:16:46 -0500 Subject: [PATCH 16/42] Fix http trailers for QueryResultPusher --- .../druid/server/QueryResultPusher.java | 24 +++++++------------ 1 file changed, 8 insertions(+), 16 deletions(-) diff --git a/server/src/main/java/org/apache/druid/server/QueryResultPusher.java b/server/src/main/java/org/apache/druid/server/QueryResultPusher.java index e473393b891a..2ee9c0d5d96f 100644 --- a/server/src/main/java/org/apache/druid/server/QueryResultPusher.java +++ b/server/src/main/java/org/apache/druid/server/QueryResultPusher.java @@ -39,7 +39,6 @@ import org.apache.druid.query.context.ResponseContext; import org.apache.druid.server.security.AuthConfig; import org.apache.druid.server.security.ForbiddenException; -import org.eclipse.jetty.http.HttpFields; import org.eclipse.jetty.http.HttpHeader; import javax.annotation.Nullable; @@ -52,6 +51,7 @@ import java.io.Closeable; import java.io.IOException; import java.io.OutputStream; +import java.util.HashMap; import java.util.Map; public abstract class QueryResultPusher @@ -67,7 +67,7 @@ public abstract class QueryResultPusher private final QueryResource.QueryMetricCounter counter; private final MediaType contentType; private final Map extraHeaders; - private final HttpFields.Mutable trailerFields; + private final Map trailerFields; private StreamingHttpResponseAccumulator accumulator; private AsyncContext asyncContext; @@ -92,7 +92,7 @@ public QueryResultPusher( this.counter = counter; this.contentType = contentType; this.extraHeaders = extraHeaders; - this.trailerFields = HttpFields.build(); + this.trailerFields = new HashMap<>(); } /** @@ -151,16 +151,11 @@ public Response push() response.setHeader(entry.getKey(), entry.getValue()); } - if (response instanceof org.eclipse.jetty.server.Response) { - org.eclipse.jetty.server.Response jettyResponse = (org.eclipse.jetty.server.Response) response; + response.setHeader(HttpHeader.TRAILER.toString(), RESULT_TRAILER_HEADERS); + response.setTrailerFields(() -> trailerFields); - jettyResponse.getHeaders().put(HttpHeader.TRAILER.toString(), RESULT_TRAILER_HEADERS); - jettyResponse.setTrailersSupplier(() -> trailerFields); - - // Start with complete status - - trailerFields.put(QueryResource.RESPONSE_COMPLETE_TRAILER_HEADER, "true"); - } + // Start with complete status + trailerFields.put(QueryResource.RESPONSE_COMPLETE_TRAILER_HEADER, "true"); accumulator = new StreamingHttpResponseAccumulator(queryResponse.getResponseContext(), resultsWriter); @@ -427,10 +422,7 @@ public void initialize() response.setContentType(contentType.toString()); - if (response instanceof org.eclipse.jetty.server.Response) { - org.eclipse.jetty.server.Response jettyResponse = (org.eclipse.jetty.server.Response) response; - jettyResponse.setTrailersSupplier(() -> trailerFields); - } + response.setTrailerFields(() -> trailerFields); try { out = new CountingOutputStream(response.getOutputStream()); From d2a5903c6c1f6a6c446c4f9d747781015ed32021 Mon Sep 17 00:00:00 2001 From: capistrant Date: Fri, 22 Aug 2025 22:50:07 -0500 Subject: [PATCH 17/42] Fix trailer related tests in QueryResourceTest --- .../druid/server/QueryResourceTest.java | 111 ++++-------------- .../server/mocks/MockHttpServletResponse.java | 21 ++++ 2 files changed, 45 insertions(+), 87 deletions(-) diff --git a/server/src/test/java/org/apache/druid/server/QueryResourceTest.java b/server/src/test/java/org/apache/druid/server/QueryResourceTest.java index 7c63a27c41e3..684c2c157342 100644 --- a/server/src/test/java/org/apache/druid/server/QueryResourceTest.java +++ b/server/src/test/java/org/apache/druid/server/QueryResourceTest.java @@ -74,7 +74,6 @@ import org.apache.druid.server.log.TestRequestLogger; import org.apache.druid.server.metrics.NoopServiceEmitter; import org.apache.druid.server.mocks.ExceptionalInputStream; -import org.apache.druid.server.mocks.MockAsyncContext; import org.apache.druid.server.mocks.MockHttpServletRequest; import org.apache.druid.server.mocks.MockHttpServletResponse; import org.apache.druid.server.scheduling.HiLoQueryLaningStrategy; @@ -91,8 +90,6 @@ import org.apache.druid.server.security.ForbiddenException; import org.apache.druid.server.security.Resource; import org.apache.http.HttpStatus; -import org.easymock.EasyMock; -import org.eclipse.jetty.http.HttpFields; import org.eclipse.jetty.http.HttpHeader; import org.hamcrest.CoreMatchers; import org.hamcrest.MatcherAssert; @@ -119,6 +116,7 @@ import java.util.Collections; import java.util.Iterator; import java.util.List; +import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.Executors; @@ -273,7 +271,15 @@ public void testGoodQuery() throws IOException { expectPermissiveHappyPathAuth(); - Assert.assertEquals(200, expectAsyncRequestFlow(SIMPLE_TIMESERIES_QUERY).getStatus()); + HttpServletResponse servletResponse = expectAsyncRequestFlow(SIMPLE_TIMESERIES_QUERY); + Assert.assertEquals(200, servletResponse.getStatus()); + Assert.assertTrue(servletResponse.containsHeader(HttpHeader.TRAILER.toString())); + + final Map fields = servletResponse.getTrailerFields().get(); + Assert.assertFalse(fields.containsKey(QueryResource.ERROR_MESSAGE_TRAILER_HEADER)); + + Assert.assertTrue(fields.containsKey(QueryResource.RESPONSE_COMPLETE_TRAILER_HEADER)); + Assert.assertEquals(fields.get(QueryResource.RESPONSE_COMPLETE_TRAILER_HEADER), "true"); } @Test @@ -485,69 +491,22 @@ public QueryRunner getQueryRunnerForSegments( expectPermissiveHappyPathAuth(); - org.eclipse.jetty.server.Response response = this.jettyResponseforRequest(testServletRequest); - Assert.assertNull(queryResource.doPost(new ByteArrayInputStream( - SIMPLE_TIMESERIES_QUERY.getBytes(StandardCharsets.UTF_8)), - null /*pretty*/, - testServletRequest - )); - Assert.assertTrue(response.getHeaders().contains(HttpHeader.TRAILER.toString())); - Assert.assertEquals(response.getHeaders().get(HttpHeader.TRAILER.toString()), QueryResultPusher.RESULT_TRAILER_HEADERS); + HttpServletResponse response = expectAsyncRequestFlow(testServletRequest, SIMPLE_TIMESERIES_QUERY.getBytes(StandardCharsets.UTF_8), queryResource); - final HttpFields fields = response.getTrailersSupplier().get(); - Assert.assertTrue(fields.contains(QueryResource.ERROR_MESSAGE_TRAILER_HEADER)); + Assert.assertTrue(response.containsHeader(HttpHeader.TRAILER.toString())); + Assert.assertEquals(response.getHeader(HttpHeader.TRAILER.toString()), QueryResultPusher.RESULT_TRAILER_HEADERS); + + final Map fields = response.getTrailerFields().get(); + Assert.assertTrue(fields.containsKey(QueryResource.ERROR_MESSAGE_TRAILER_HEADER)); Assert.assertEquals( fields.get(QueryResource.ERROR_MESSAGE_TRAILER_HEADER), "Query did not complete within configured timeout period. You can increase query timeout or tune the performance of query." ); - Assert.assertTrue(fields.contains(QueryResource.RESPONSE_COMPLETE_TRAILER_HEADER)); + Assert.assertTrue(fields.containsKey(QueryResource.RESPONSE_COMPLETE_TRAILER_HEADER)); Assert.assertEquals(fields.get(QueryResource.RESPONSE_COMPLETE_TRAILER_HEADER), "false"); } - @Test - public void testSuccessResponseWithTrailerHeader() throws IOException - { - queryResource = new QueryResource( - new QueryLifecycleFactory( - CONGLOMERATE, - TEST_SEGMENT_WALKER, - new DefaultGenericQueryMetricsFactory(), - new NoopServiceEmitter(), - testRequestLogger, - new AuthConfig(), - NoopPolicyEnforcer.instance(), - AuthTestUtils.TEST_AUTHORIZER_MAPPER, - Suppliers.ofInstance(new DefaultQueryConfig(ImmutableMap.of())) - ), - jsonMapper, - queryScheduler, - null, - new QueryResourceQueryResultPusherFactory( - jsonMapper, - ResponseContextConfig.newConfig(true), - DRUID_NODE - ), - new ResourceIOReaderWriterFactory(jsonMapper, smileMapper) - ); - - expectPermissiveHappyPathAuth(); - - org.eclipse.jetty.server.Response response = this.jettyResponseforRequest(testServletRequest); - Assert.assertNull(queryResource.doPost(new ByteArrayInputStream( - SIMPLE_TIMESERIES_QUERY.getBytes(StandardCharsets.UTF_8)), - null /*pretty*/, - testServletRequest - )); - Assert.assertTrue(response.getHeaders().contains(HttpHeader.TRAILER.toString())); - - final HttpFields fields = response.getTrailersSupplier().get(); - Assert.assertFalse(fields.contains(QueryResource.ERROR_MESSAGE_TRAILER_HEADER)); - - Assert.assertTrue(fields.contains(QueryResource.RESPONSE_COMPLETE_TRAILER_HEADER)); - Assert.assertEquals(fields.get(QueryResource.RESPONSE_COMPLETE_TRAILER_HEADER), "true"); - } - @Test public void testResponseContextContainsMissingSegments_whenLastSegmentIsMissing() throws IOException { @@ -633,27 +592,19 @@ public QueryRunner getQueryRunnerForSegments(Query query, Iterable fields = response.getTrailerFields().get(); + Assert.assertTrue(response.containsHeader(QueryResource.HEADER_RESPONSE_CONTEXT)); Assert.assertEquals( jsonMapper.writeValueAsString(ImmutableMap.of("missingSegments", ImmutableList.of(missingSegDesc))), - fields.get(QueryResource.HEADER_RESPONSE_CONTEXT) + response.getHeader(QueryResource.HEADER_RESPONSE_CONTEXT) ); - Assert.assertTrue(fields.contains(QueryResource.RESPONSE_COMPLETE_TRAILER_HEADER)); + Assert.assertTrue(fields.containsKey(QueryResource.RESPONSE_COMPLETE_TRAILER_HEADER)); Assert.assertEquals("true", fields.get(QueryResource.RESPONSE_COMPLETE_TRAILER_HEADER)); } @@ -1779,18 +1730,4 @@ private Response expectSynchronousRequestFlow( { return queryResource.doPost(new ByteArrayInputStream(bytes), null, req); } - - private org.eclipse.jetty.server.Response jettyResponseforRequest(MockHttpServletRequest req) - { - org.eclipse.jetty.server.Response responseMock = EasyMock.mock(org.eclipse.jetty.server.Response.class); - - EasyMock.expect(responseMock.isCommitted()).andReturn(true); - - req.newAsyncContext(() -> { - final MockAsyncContext retVal = new MockAsyncContext(); - retVal.response = (javax.servlet.ServletResponse) responseMock; - return retVal; - }); - return responseMock; - } } diff --git a/server/src/test/java/org/apache/druid/server/mocks/MockHttpServletResponse.java b/server/src/test/java/org/apache/druid/server/mocks/MockHttpServletResponse.java index 62e6ca4a2668..b5c723bc275b 100644 --- a/server/src/test/java/org/apache/druid/server/mocks/MockHttpServletResponse.java +++ b/server/src/test/java/org/apache/druid/server/mocks/MockHttpServletResponse.java @@ -34,6 +34,8 @@ import java.util.Collection; import java.util.LinkedHashMap; import java.util.Locale; +import java.util.Map; +import java.util.function.Supplier; /** * A fake HttpServletResponse used in tests. A lot of methods are implemented as @@ -59,6 +61,8 @@ public static MockHttpServletResponse forRequest(MockHttpServletRequest req) new LinkedHashMap<>(), ArrayList::new ); + private Supplier> trailerSupplier; + public final ByteArrayOutputStream baos = new ByteArrayOutputStream(); private int statusCode; @@ -145,9 +149,13 @@ public void addDateHeader(String name, long date) throw new UnsupportedOperationException(); } + /** + * HttpServletResponse 4.0.1 spec dictates that setHeader overwrites existing values. + */ @Override public void setHeader(String name, String value) { + headers.removeAll(name); headers.put(name, value); } @@ -337,4 +345,17 @@ public Locale getLocale() { throw new UnsupportedOperationException(); } + + + @Override + public void setTrailerFields(Supplier> supplier) + { + this.trailerSupplier = supplier; + } + + @Override + public Supplier> getTrailerFields() + { + return trailerSupplier; + } } From 8ddb9103c41c70d44389d18f1523e826e70b99f1 Mon Sep 17 00:00:00 2001 From: capistrant Date: Fri, 22 Aug 2025 23:41:05 -0500 Subject: [PATCH 18/42] add some UTs --- ...tandardResponseHeaderFilterHolderTest.java | 47 +++++++++- .../avatica/DruidAvaticaJsonHandlerTest.java | 86 +++++++++++++++++++ .../DruidAvaticaProtobufHandlerTest.java | 46 ++++++++++ 3 files changed, 177 insertions(+), 2 deletions(-) create mode 100644 sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaJsonHandlerTest.java diff --git a/server/src/test/java/org/apache/druid/server/initialization/jetty/StandardResponseHeaderFilterHolderTest.java b/server/src/test/java/org/apache/druid/server/initialization/jetty/StandardResponseHeaderFilterHolderTest.java index e85dc6a862fa..e5a5afdb57f0 100644 --- a/server/src/test/java/org/apache/druid/server/initialization/jetty/StandardResponseHeaderFilterHolderTest.java +++ b/server/src/test/java/org/apache/druid/server/initialization/jetty/StandardResponseHeaderFilterHolderTest.java @@ -23,6 +23,9 @@ import org.apache.druid.server.initialization.ServerConfig; import org.easymock.Capture; import org.easymock.EasyMock; +import org.eclipse.jetty.client.Response; +import org.eclipse.jetty.http.HttpField; +import org.eclipse.jetty.http.HttpFields; import org.hamcrest.CoreMatchers; import org.hamcrest.MatcherAssert; import org.junit.After; @@ -46,6 +49,9 @@ public class StandardResponseHeaderFilterHolderTest public HttpServletResponse httpResponse; public FilterChain filterChain; + public HttpServletResponse proxyResponse; + public Response clientResponse; + @Before public void setUp() { @@ -53,12 +59,15 @@ public void setUp() httpRequest = EasyMock.strictMock(HttpServletRequest.class); httpResponse = EasyMock.strictMock(HttpServletResponse.class); filterChain = EasyMock.strictMock(FilterChain.class); + + proxyResponse = EasyMock.strictMock(HttpServletResponse.class); + clientResponse = EasyMock.strictMock(Response.class); } @After public void tearDown() { - EasyMock.verify(serverConfig, httpRequest, httpResponse, filterChain); + EasyMock.verify(serverConfig, httpRequest, httpResponse, filterChain, proxyResponse, clientResponse); } @Test @@ -130,6 +139,40 @@ public void test_get_invalidContentSecurityPolicy() ); } + @Test + public void test_deduplicateHeadersInProxyServlet_duplicatesExist() + { + EasyMock.expect(proxyResponse.containsHeader("Cache-Control")).andReturn(true).once(); + proxyResponse.setHeader("Cache-Control", null); + EasyMock.expectLastCall().once(); + EasyMock.expect(proxyResponse.containsHeader("Strict-Transport-Security")).andReturn(false).once(); + + EasyMock.expect(clientResponse.getHeaders()).andReturn(HttpFields.from(new HttpField("Cache-Control", "true"), new HttpField("Strict-Transport-Security", "true"))).times(3); + + + replayAllMocks(); + + StandardResponseHeaderFilterHolder.deduplicateHeadersInProxyServlet(proxyResponse, clientResponse); + } + + @Test + public void test_duplicateHeadersInProxyServlet_noDuplicates() + { + EasyMock.expect(proxyResponse.containsHeader("Cache-Control")).andReturn(false).once(); + EasyMock.expect(proxyResponse.containsHeader("Strict-Transport-Security")).andReturn(false).once(); + + EasyMock.expect(clientResponse.getHeaders()) + .andReturn(HttpFields.from( + new HttpField("Cache-Control", "true"), + new HttpField("Strict-Transport-Security", "true") + )) + .times(3); + + replayAllMocks(); + + StandardResponseHeaderFilterHolder.deduplicateHeadersInProxyServlet(proxyResponse, clientResponse); + } + private StandardResponseHeaderFilterHolder.StandardResponseHeaderFilter makeFilter() { return (StandardResponseHeaderFilterHolder.StandardResponseHeaderFilter) @@ -163,6 +206,6 @@ private void runFilterAndVerifyHeaders(final Map expectedHeaders private void replayAllMocks() { - EasyMock.replay(serverConfig, httpRequest, httpResponse, filterChain); + EasyMock.replay(serverConfig, httpRequest, httpResponse, filterChain, proxyResponse, clientResponse); } } diff --git a/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaJsonHandlerTest.java b/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaJsonHandlerTest.java new file mode 100644 index 000000000000..be20b3bfa5c4 --- /dev/null +++ b/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaJsonHandlerTest.java @@ -0,0 +1,86 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.sql.avatica; + +import org.apache.druid.server.DruidNode; +import org.easymock.EasyMock; +import org.eclipse.jetty.http.HttpFields; +import org.eclipse.jetty.http.HttpURI; +import org.eclipse.jetty.server.Handler; +import org.eclipse.jetty.server.Request; +import org.eclipse.jetty.server.Response; +import org.eclipse.jetty.util.Callback; +import org.junit.Assert; +import org.junit.Test; + +public class DruidAvaticaJsonHandlerTest extends DruidAvaticaHandlerTest +{ + @Override + protected String getJdbcUrlTail() + { + return DruidAvaticaJsonHandler.AVATICA_PATH; + } + + @Override + protected Handler.Abstract getAvaticaHandler(final DruidMeta druidMeta) + { + return new DruidAvaticaJsonHandler( + druidMeta, + new DruidNode("dummy", "dummy", false, 1, null, true, false), + new AvaticaMonitor() + ); + } + + @Test + public void testNonPostRequestSucceeds() throws Exception + { + DruidMeta druidMeta = EasyMock.mock(DruidMeta.class); + DruidAvaticaJsonHandler handler = new DruidAvaticaJsonHandler( + druidMeta, + new DruidNode("dummy", "dummy", false, 1, null, true, false), + new AvaticaMonitor() + ); + + Request request = EasyMock.mock(Request.class); + Response response = EasyMock.mock(Response.class); + Callback callback = EasyMock.mock(Callback.class); + HttpURI httpURI = EasyMock.mock(HttpURI.class); + HttpFields.Mutable headers = EasyMock.mock(HttpFields.Mutable.class); + + EasyMock.expect(request.getHttpURI()).andReturn(httpURI); + EasyMock.expect(httpURI.getPath()).andReturn(DruidAvaticaJsonHandler.AVATICA_PATH_NO_TRAILING_SLASH); + EasyMock.expect(request.getMethod()).andReturn("GET"); + + EasyMock.expect(response.getHeaders()).andReturn(headers); + + headers.put("Content-Type", "application/json;charset=utf-8"); + EasyMock.expectLastCall().andReturn(null); + + callback.succeeded(); + EasyMock.expectLastCall(); + + EasyMock.replay(request, response, callback, httpURI); + + boolean handled = handler.handle(request, response, callback); + + Assert.assertTrue("Handler should have handled the request", handled); + EasyMock.verify(request, response, callback, httpURI); + } +} diff --git a/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaProtobufHandlerTest.java b/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaProtobufHandlerTest.java index 026b0cdb1435..e649233fe15f 100644 --- a/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaProtobufHandlerTest.java +++ b/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaProtobufHandlerTest.java @@ -21,7 +21,16 @@ import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.server.DruidNode; +import org.easymock.EasyMock; +import org.eclipse.jetty.http.HttpURI; import org.eclipse.jetty.server.Handler; +import org.eclipse.jetty.server.Request; +import org.eclipse.jetty.server.Response; +import org.eclipse.jetty.util.Callback; +import org.junit.Assert; +import org.junit.Test; + +import java.nio.ByteBuffer; public class DruidAvaticaProtobufHandlerTest extends DruidAvaticaHandlerTest { @@ -43,4 +52,41 @@ protected Handler.Abstract getAvaticaHandler(final DruidMeta druidMeta) new AvaticaMonitor() ); } + + @Test + public void testNonPostRequestReturns400() throws Exception + { + DruidMeta druidMeta = EasyMock.mock(DruidMeta.class); + DruidAvaticaProtobufHandler handler = new DruidAvaticaProtobufHandler( + druidMeta, + new DruidNode("dummy", "dummy", false, 1, null, true, false), + new AvaticaMonitor() + ); + + Request request = EasyMock.mock(Request.class); + Response response = EasyMock.mock(Response.class); + Callback callback = EasyMock.mock(Callback.class); + HttpURI httpURI = EasyMock.mock(HttpURI.class); + + EasyMock.expect(request.getHttpURI()).andReturn(httpURI); + EasyMock.expect(httpURI.getPath()).andReturn(DruidAvaticaProtobufHandler.AVATICA_PATH_NO_TRAILING_SLASH); + EasyMock.expect(request.getMethod()).andReturn("GET"); + + response.setStatus(400); + EasyMock.expectLastCall(); + + response.write( + EasyMock.eq(true), + EasyMock.anyObject(ByteBuffer.class), + EasyMock.eq(callback) + ); + EasyMock.expectLastCall(); + + EasyMock.replay(request, response, callback, httpURI); + + boolean handled = handler.handle(request, response, callback); + + Assert.assertTrue("Handler should have handled the request", handled); + EasyMock.verify(request, response, callback, httpURI); + } } From a2a224dfcec28838e89e4ffbdb82ae399452cee8 Mon Sep 17 00:00:00 2001 From: capistrant Date: Fri, 22 Aug 2025 23:46:42 -0500 Subject: [PATCH 19/42] prevent banned dep from getting pulled in --- pom.xml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/pom.xml b/pom.xml index f2ea39d6fbb1..1576235eb6f0 100644 --- a/pom.xml +++ b/pom.xml @@ -765,6 +765,12 @@ org.eclipse.jetty.ee8 jetty-ee8-servlet ${jetty.version} + + + org.eclipse.jetty.toolchain + jetty-servlet-api + + org.eclipse.jetty.ee8 From 8d9eb66d9684fcb241430be7524611851aa1f7bb Mon Sep 17 00:00:00 2001 From: capistrant Date: Sat, 23 Aug 2025 00:04:18 -0500 Subject: [PATCH 20/42] Run rewrite rules mvn goal to fixup test files --- .../apache/druid/sql/avatica/DruidAvaticaJsonHandlerTest.java | 2 +- .../druid/sql/avatica/DruidAvaticaProtobufHandlerTest.java | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaJsonHandlerTest.java b/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaJsonHandlerTest.java index be20b3bfa5c4..86331f9c3606 100644 --- a/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaJsonHandlerTest.java +++ b/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaJsonHandlerTest.java @@ -28,7 +28,7 @@ import org.eclipse.jetty.server.Response; import org.eclipse.jetty.util.Callback; import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; public class DruidAvaticaJsonHandlerTest extends DruidAvaticaHandlerTest { diff --git a/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaProtobufHandlerTest.java b/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaProtobufHandlerTest.java index e649233fe15f..88bdb2f296a3 100644 --- a/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaProtobufHandlerTest.java +++ b/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaProtobufHandlerTest.java @@ -28,7 +28,7 @@ import org.eclipse.jetty.server.Response; import org.eclipse.jetty.util.Callback; import org.junit.Assert; -import org.junit.Test; +import org.junit.jupiter.api.Test; import java.nio.ByteBuffer; From 36605209a587320892744c3a764fe45defec074c Mon Sep 17 00:00:00 2001 From: capistrant Date: Sat, 23 Aug 2025 19:37:14 -0500 Subject: [PATCH 21/42] stop accounting for buggy mock impl in sql test --- .../java/org/apache/druid/sql/http/SqlResourceTest.java | 8 -------- 1 file changed, 8 deletions(-) diff --git a/sql/src/test/java/org/apache/druid/sql/http/SqlResourceTest.java b/sql/src/test/java/org/apache/druid/sql/http/SqlResourceTest.java index 87e9285a30a9..a1040e9018a4 100644 --- a/sql/src/test/java/org/apache/druid/sql/http/SqlResourceTest.java +++ b/sql/src/test/java/org/apache/druid/sql/http/SqlResourceTest.java @@ -436,15 +436,7 @@ public void testCountStarWithMissingIntervalsContext() throws Exception final MockHttpServletResponse response = postForAsyncResponse(sqlQuery, makeRegularUserReq()); - // In tests, MockHttpServletResponse stores headers as a MultiMap. - // This allows the same header key to be set multiple times (e.g., once at the start and once at the end of query processing). - // As a result, we observe duplicate context entries for this test in the expected set. - // This differs from typical behavior for other headers, where a new value would overwrite any previously set value. final Object expectedMissingHeaders = ImmutableList.of( - ImmutableMap.of( - "uncoveredIntervals", "2030-01-01/78149827981274-01-01", - "uncoveredIntervalsOverflowed", "true" - ), ImmutableMap.of( "uncoveredIntervals", "2030-01-01/78149827981274-01-01", "uncoveredIntervalsOverflowed", "true" From 13e4f5c7a2ea256d8e48686c1969cdb13c76b56e Mon Sep 17 00:00:00 2001 From: capistrant Date: Mon, 25 Aug 2025 09:18:55 -0500 Subject: [PATCH 22/42] Add new http server config for Jetty UriCompliance mode. Default to allow backwards compatibility --- docs/configuration/index.md | 1 + .../server/initialization/ServerConfig.java | 20 ++++++++++++++++--- .../jetty/CliIndexerServerModule.java | 3 ++- .../jetty/JettyServerModule.java | 3 +++ .../initialization/ServerConfigTest.java | 5 ++++- 5 files changed, 27 insertions(+), 5 deletions(-) diff --git a/docs/configuration/index.md b/docs/configuration/index.md index 4c11e1253744..34f21237cf35 100644 --- a/docs/configuration/index.md +++ b/docs/configuration/index.md @@ -1496,6 +1496,7 @@ Druid uses Jetty to serve HTTP requests. |`druid.server.http.enableForwardedRequestCustomizer`|If enabled, adds Jetty ForwardedRequestCustomizer which reads X-Forwarded-* request headers to manipulate servlet request object when Druid is used behind a proxy.|false| |`druid.server.http.allowedHttpMethods`|List of HTTP methods that should be allowed in addition to the ones required by Druid APIs. Druid APIs require GET, PUT, POST, and DELETE, which are always allowed. This option is not useful unless you have installed an extension that needs these additional HTTP methods or that adds functionality related to CORS. None of Druid's bundled extensions require these methods.|`[]`| |`druid.server.http.contentSecurityPolicy`|Content-Security-Policy header value to set on each non-POST response. Setting this property to an empty string, or omitting it, both result in the default `frame-ancestors: none` being set.|`frame-ancestors 'none'`| +|`druid.server.http.uriCompliance`|Jetty UriCompliance mode for Druid's embedded Jetty servers. To modify, override this config with the string representation of any URiComplicance mode that Jetty supports.|LEGACY| #### Indexer processing resources diff --git a/server/src/main/java/org/apache/druid/server/initialization/ServerConfig.java b/server/src/main/java/org/apache/druid/server/initialization/ServerConfig.java index 8fdef26a4105..13c4245c86bb 100644 --- a/server/src/main/java/org/apache/druid/server/initialization/ServerConfig.java +++ b/server/src/main/java/org/apache/druid/server/initialization/ServerConfig.java @@ -29,6 +29,7 @@ import org.apache.druid.query.QueryContexts; import org.apache.druid.server.SubqueryGuardrailHelper; import org.apache.druid.utils.JvmUtils; +import org.eclipse.jetty.http.UriCompliance; import org.joda.time.Period; import javax.annotation.Nullable; @@ -73,7 +74,8 @@ public ServerConfig( boolean showDetailedJettyErrors, @NotNull ErrorResponseTransformStrategy errorResponseTransformStrategy, @Nullable String contentSecurityPolicy, - boolean enableHSTS + boolean enableHSTS, + @Nullable String uriCompliance ) { this.numThreads = numThreads; @@ -97,6 +99,7 @@ public ServerConfig( this.errorResponseTransformStrategy = errorResponseTransformStrategy; this.contentSecurityPolicy = contentSecurityPolicy; this.enableHSTS = enableHSTS; + this.uriCompliance = uriCompliance; } public ServerConfig() @@ -185,6 +188,9 @@ public ServerConfig(boolean enableQueryRequestsQueuing) @JsonProperty private boolean enableHSTS = false; + @JsonProperty + private String uriCompliance = UriCompliance.LEGACY.getName(); + /** * This feature flag enables query requests queuing when admins want to reserve some threads for * non-query requests. This feature flag is not documented and will be removed in the future. @@ -306,6 +312,11 @@ public boolean isEnableQueryRequestsQueuing() return enableQueryRequestsQueuing; } + public String getUriCompliance() + { + return uriCompliance; + } + @Override public boolean equals(Object o) { @@ -337,7 +348,8 @@ public boolean equals(Object o) errorResponseTransformStrategy.equals(that.errorResponseTransformStrategy) && Objects.equals(contentSecurityPolicy, that.getContentSecurityPolicy()) && enableHSTS == that.enableHSTS && - enableQueryRequestsQueuing == that.enableQueryRequestsQueuing; + enableQueryRequestsQueuing == that.enableQueryRequestsQueuing && + Objects.equals(uriCompliance, that.uriCompliance); } @Override @@ -365,7 +377,8 @@ public int hashCode() showDetailedJettyErrors, contentSecurityPolicy, enableHSTS, - enableQueryRequestsQueuing + enableQueryRequestsQueuing, + uriCompliance ); } @@ -395,6 +408,7 @@ public String toString() ", contentSecurityPolicy=" + contentSecurityPolicy + ", enableHSTS=" + enableHSTS + ", enableQueryRequestsQueuing=" + enableQueryRequestsQueuing + + ", uriCompliance=" + uriCompliance + '}'; } diff --git a/server/src/main/java/org/apache/druid/server/initialization/jetty/CliIndexerServerModule.java b/server/src/main/java/org/apache/druid/server/initialization/jetty/CliIndexerServerModule.java index feb61965daa3..4782211da492 100644 --- a/server/src/main/java/org/apache/druid/server/initialization/jetty/CliIndexerServerModule.java +++ b/server/src/main/java/org/apache/druid/server/initialization/jetty/CliIndexerServerModule.java @@ -166,7 +166,8 @@ public ServerConfig makeAdjustedServerConfig(ServerConfig oldConfig) oldConfig.isShowDetailedJettyErrors(), oldConfig.getErrorResponseTransformStrategy(), oldConfig.getContentSecurityPolicy(), - oldConfig.isEnableHSTS() + oldConfig.isEnableHSTS(), + oldConfig.getUriCompliance() ); } } diff --git a/server/src/main/java/org/apache/druid/server/initialization/jetty/JettyServerModule.java b/server/src/main/java/org/apache/druid/server/initialization/jetty/JettyServerModule.java index 8e45856e134a..0a3c6f153225 100644 --- a/server/src/main/java/org/apache/druid/server/initialization/jetty/JettyServerModule.java +++ b/server/src/main/java/org/apache/druid/server/initialization/jetty/JettyServerModule.java @@ -61,6 +61,7 @@ import org.eclipse.jetty.server.ConnectionFactory; import org.eclipse.jetty.server.ForwardedRequestCustomizer; import org.eclipse.jetty.server.Handler; +import org.eclipse.jetty.http.UriCompliance; import org.eclipse.jetty.server.HttpConfiguration; import org.eclipse.jetty.server.HttpConnectionFactory; import org.eclipse.jetty.server.Request; @@ -223,6 +224,7 @@ static Server makeAndInitializeServer( if (node.isEnablePlaintextPort()) { log.info("Creating http connector with port [%d]", node.getPlaintextPort()); HttpConfiguration httpConfiguration = new HttpConfiguration(); + httpConfiguration.setUriCompliance(UriCompliance.from(config.getUriCompliance())); if (config.isEnableForwardedRequestCustomizer()) { httpConfiguration.addCustomizer(new ForwardedRequestCustomizer()); } @@ -307,6 +309,7 @@ static Server makeAndInitializeServer( } final HttpConfiguration httpsConfiguration = new HttpConfiguration(); + httpsConfiguration.setUriCompliance(UriCompliance.from(config.getUriCompliance())); if (config.isEnableForwardedRequestCustomizer()) { httpsConfiguration.addCustomizer(new ForwardedRequestCustomizer()); } diff --git a/server/src/test/java/org/apache/druid/initialization/ServerConfigTest.java b/server/src/test/java/org/apache/druid/initialization/ServerConfigTest.java index 96745e163a00..8aad81979e56 100644 --- a/server/src/test/java/org/apache/druid/initialization/ServerConfigTest.java +++ b/server/src/test/java/org/apache/druid/initialization/ServerConfigTest.java @@ -25,6 +25,7 @@ import org.apache.druid.common.exception.AllowedRegexErrorResponseTransformStrategy; import org.apache.druid.jackson.DefaultObjectMapper; import org.apache.druid.server.initialization.ServerConfig; +import org.eclipse.jetty.http.UriCompliance; import org.junit.Assert; import org.junit.Test; @@ -65,7 +66,8 @@ public void testSerde() throws Exception true, new AllowedRegexErrorResponseTransformStrategy(ImmutableList.of(".*")), "my-cool-policy", - true + true, + UriCompliance.RFC3986.getName() ); String modifiedConfigJson = OBJECT_MAPPER.writeValueAsString(modifiedConfig); ServerConfig modifiedConfig2 = OBJECT_MAPPER.readValue(modifiedConfigJson, ServerConfig.class); @@ -79,6 +81,7 @@ public void testSerde() throws Exception Assert.assertEquals("my-cool-policy", modifiedConfig.getContentSecurityPolicy()); Assert.assertEquals("my-cool-policy", modifiedConfig2.getContentSecurityPolicy()); Assert.assertTrue(modifiedConfig2.isEnableHSTS()); + Assert.assertEquals(UriCompliance.RFC3986.getName(), modifiedConfig2.getUriCompliance()); } @Test From 6e8e5f5d774e752b689b66c524b6efd9f122d9e9 Mon Sep 17 00:00:00 2001 From: capistrant Date: Mon, 25 Aug 2025 09:30:15 -0500 Subject: [PATCH 23/42] fix import order --- .../druid/server/initialization/jetty/JettyServerModule.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/main/java/org/apache/druid/server/initialization/jetty/JettyServerModule.java b/server/src/main/java/org/apache/druid/server/initialization/jetty/JettyServerModule.java index 0a3c6f153225..9b7d9e336d30 100644 --- a/server/src/main/java/org/apache/druid/server/initialization/jetty/JettyServerModule.java +++ b/server/src/main/java/org/apache/druid/server/initialization/jetty/JettyServerModule.java @@ -58,10 +58,10 @@ import org.apache.druid.server.metrics.MonitorsConfig; import org.apache.druid.server.security.CustomCheckX509TrustManager; import org.apache.druid.server.security.TLSCertificateChecker; +import org.eclipse.jetty.http.UriCompliance; import org.eclipse.jetty.server.ConnectionFactory; import org.eclipse.jetty.server.ForwardedRequestCustomizer; import org.eclipse.jetty.server.Handler; -import org.eclipse.jetty.http.UriCompliance; import org.eclipse.jetty.server.HttpConfiguration; import org.eclipse.jetty.server.HttpConnectionFactory; import org.eclipse.jetty.server.Request; From a66e4757085fe7476397d1315f433d43a4c161ef Mon Sep 17 00:00:00 2001 From: capistrant Date: Mon, 25 Aug 2025 09:52:09 -0500 Subject: [PATCH 24/42] fix spell checker flag --- docs/configuration/index.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/configuration/index.md b/docs/configuration/index.md index 34f21237cf35..89fdaa1121ac 100644 --- a/docs/configuration/index.md +++ b/docs/configuration/index.md @@ -1496,7 +1496,7 @@ Druid uses Jetty to serve HTTP requests. |`druid.server.http.enableForwardedRequestCustomizer`|If enabled, adds Jetty ForwardedRequestCustomizer which reads X-Forwarded-* request headers to manipulate servlet request object when Druid is used behind a proxy.|false| |`druid.server.http.allowedHttpMethods`|List of HTTP methods that should be allowed in addition to the ones required by Druid APIs. Druid APIs require GET, PUT, POST, and DELETE, which are always allowed. This option is not useful unless you have installed an extension that needs these additional HTTP methods or that adds functionality related to CORS. None of Druid's bundled extensions require these methods.|`[]`| |`druid.server.http.contentSecurityPolicy`|Content-Security-Policy header value to set on each non-POST response. Setting this property to an empty string, or omitting it, both result in the default `frame-ancestors: none` being set.|`frame-ancestors 'none'`| -|`druid.server.http.uriCompliance`|Jetty UriCompliance mode for Druid's embedded Jetty servers. To modify, override this config with the string representation of any URiComplicance mode that Jetty supports.|LEGACY| +|`druid.server.http.uriCompliance`|Jetty `UriCompliance` mode for Druid's embedded Jetty servers. To modify, override this config with the string representation of any `UriCompliance` mode that Jetty supports.|LEGACY| #### Indexer processing resources From 9fb989d74d83a6dd9fb92493cdcc6cd5071fb140 Mon Sep 17 00:00:00 2001 From: capistrant Date: Tue, 26 Aug 2025 10:53:58 -0500 Subject: [PATCH 25/42] Fixup java docs links to use java17 --- docs/configuration/index.md | 6 +++--- .../extensions-core/simple-client-sslcontext.md | 6 +++--- docs/ingestion/input-sources.md | 8 ++++---- docs/operations/dump-segment.md | 2 +- docs/operations/java.md | 14 +++----------- docs/operations/tls-support.md | 8 ++++---- docs/querying/dimensionspecs.md | 2 +- docs/querying/filters.md | 2 +- docs/querying/math-expr.md | 2 +- docs/querying/sql-scalar.md | 2 +- 10 files changed, 22 insertions(+), 30 deletions(-) diff --git a/docs/configuration/index.md b/docs/configuration/index.md index 89fdaa1121ac..7bd8e2ab6be2 100644 --- a/docs/configuration/index.md +++ b/docs/configuration/index.md @@ -196,9 +196,9 @@ and `druid.tlsPort` properties on each service. Please see `Configuration` secti Druid uses Jetty as an embedded web server. To learn more about TLS/SSL, certificates, and related concepts in Jetty, including explanations of the configuration settings below, see "Configuring SSL/TLS KeyStores" in the [Jetty Operations Guide](https://www.eclipse.org/jetty/documentation.php). -For information about TLS/SSL support in Java in general, see the [Java Secure Socket Extension (JSSE) Reference Guide](https://docs.oracle.com/en/java/javase/11/security/java-secure-socket-extension-jsse-reference-guide.html). +For information about TLS/SSL support in Java in general, see the [Java Secure Socket Extension (JSSE) Reference Guide](https://docs.oracle.com/en/java/javase/17/security/java-secure-socket-extension-jsse-reference-guide.html). The [Java Cryptography Architecture -Standard Algorithm Name Documentation for JDK 11](https://docs.oracle.com/en/java/javase/11/docs/specs/security/standard-names.html) lists all possible +Standard Algorithm Name Documentation for JDK 17](https://docs.oracle.com/en/java/javase/17/docs/specs/security/standard-names.html) lists all possible values for the following properties, among others provided by the Java implementation. |Property|Description|Default|Required| @@ -231,7 +231,7 @@ These properties apply to the SSLContext that will be provided to the internal H |`druid.client.https.trustStoreAlgorithm`|Algorithm to be used by TrustManager to validate certificate chains|`javax.net.ssl.TrustManagerFactory.getDefaultAlgorithm()`|no| |`druid.client.https.trustStorePassword`|The [Password Provider](../operations/password-provider.md) or String password for the Trust Store.|none|yes| -This [document](https://docs.oracle.com/en/java/javase/11/docs/specs/security/standard-names.html) lists all the possible +This [document](https://docs.oracle.com/en/java/javase/17/docs/specs/security/standard-names.html) lists all the possible values for the above mentioned configs among others provided by Java implementation. ### Authentication and authorization diff --git a/docs/development/extensions-core/simple-client-sslcontext.md b/docs/development/extensions-core/simple-client-sslcontext.md index db1a37afe414..981e00107a45 100644 --- a/docs/development/extensions-core/simple-client-sslcontext.md +++ b/docs/development/extensions-core/simple-client-sslcontext.md @@ -23,9 +23,9 @@ title: "Simple SSLContext Provider Module" --> -This Apache Druid module contains a simple implementation of [SSLContext](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/javax/net/ssl/SSLContext.html) +This Apache Druid module contains a simple implementation of [SSLContext](https://docs.oracle.com/en/java/javase/17/docs/api/java.base/javax/net/ssl/SSLContext.html) that will be injected to be used with HttpClient that Druid processes use internally to communicate with each other. To learn more about -Java's SSL support, please refer to [this](https://docs.oracle.com/en/java/javase/11/security/java-secure-socket-extension-jsse-reference-guide.html) guide. +Java's SSL support, please refer to [this](https://docs.oracle.com/en/java/javase/17/security/java-secure-socket-extension-jsse-reference-guide.html) guide. |Property|Description|Default|Required| @@ -48,5 +48,5 @@ The following table contains optional parameters for supporting client certifica |`druid.client.https.keyManagerPassword`|The [Password Provider](../../operations/password-provider.md) or String password for the Key Manager.|none|no| |`druid.client.https.validateHostnames`|Validate the hostname of the server. This should not be disabled unless you are using [custom TLS certificate checks](../../operations/tls-support.md) and know that standard hostname validation is not needed.|true|no| -This [document](https://docs.oracle.com/en/java/javase/11/docs/specs/security/standard-names.html) lists all the possible +This [document](https://docs.oracle.com/en/java/javase/17/docs/specs/security/standard-names.html) lists all the possible values for the above mentioned configs among others provided by Java implementation. diff --git a/docs/ingestion/input-sources.md b/docs/ingestion/input-sources.md index d4698b20f038..be29cb31a55c 100644 --- a/docs/ingestion/input-sources.md +++ b/docs/ingestion/input-sources.md @@ -184,7 +184,7 @@ Sample specs: |uris|JSON array of URIs where S3 objects to be ingested are located.|None|`uris` or `prefixes` or `objects` must be set| |prefixes|JSON array of URI prefixes for the locations of S3 objects to be ingested. Empty objects starting with one of the given prefixes will be skipped.|None|`uris` or `prefixes` or `objects` must be set| |objects|JSON array of S3 Objects to be ingested.|None|`uris` or `prefixes` or `objects` must be set| -|objectGlob|A glob for the object part of the S3 URI. In the URI `s3://foo/bar/file.json`, the glob is applied to `bar/file.json`.

The glob must match the entire object part, not just the filename. For example, the glob `*.json` does not match `s3://foo/bar/file.json`, because the object part is `bar/file.json`, and the`*` does not match the slash. To match all objects ending in `.json`, use `**.json` instead.

For more information, refer to the documentation for [`FileSystem#getPathMatcher`](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/nio/file/FileSystem.html#getPathMatcher(java.lang.String)).|None|no| +|objectGlob|A glob for the object part of the S3 URI. In the URI `s3://foo/bar/file.json`, the glob is applied to `bar/file.json`.

The glob must match the entire object part, not just the filename. For example, the glob `*.json` does not match `s3://foo/bar/file.json`, because the object part is `bar/file.json`, and the`*` does not match the slash. To match all objects ending in `.json`, use `**.json` instead.

For more information, refer to the documentation for [`FileSystem#getPathMatcher`](https://docs.oracle.com/en/java/javase/17/docs/api/java.base/java/nio/file/FileSystem.html#getPathMatcher(java.lang.String)).|None|no| |systemFields|JSON array of system fields to return as part of input rows. Possible values: `__file_uri` (S3 URI starting with `s3://`), `__file_bucket` (S3 bucket), and `__file_path` (S3 object key).|None|no| | endpointConfig |Config for overriding the default S3 endpoint and signing region. This would allow ingesting data from a different S3 store. Please see [s3 config](../development/extensions-core/s3.md#connecting-to-s3-configuration) for more information.|None|No (defaults will be used if not given) | clientConfig |S3 client properties for the overridden s3 endpoint. This is used in conjunction with `endPointConfig`. Please see [s3 config](../development/extensions-core/s3.md#connecting-to-s3-configuration) for more information.|None|No (defaults will be used if not given) @@ -289,7 +289,7 @@ Sample specs: |uris|JSON array of URIs where Google Cloud Storage objects to be ingested are located.|None|`uris` or `prefixes` or `objects` must be set| |prefixes|JSON array of URI prefixes for the locations of Google Cloud Storage objects to be ingested. Empty objects starting with one of the given prefixes will be skipped.|None|`uris` or `prefixes` or `objects` must be set| |objects|JSON array of Google Cloud Storage objects to be ingested.|None|`uris` or `prefixes` or `objects` must be set| -|objectGlob|A glob for the object part of the S3 URI. In the URI `s3://foo/bar/file.json`, the glob is applied to `bar/file.json`.

The glob must match the entire object part, not just the filename. For example, the glob `*.json` does not match `s3://foo/bar/file.json`, because the object part is `bar/file.json`, and the`*` does not match the slash. To match all objects ending in `.json`, use `**.json` instead.

For more information, refer to the documentation for [`FileSystem#getPathMatcher`](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/nio/file/FileSystem.html#getPathMatcher(java.lang.String)).|None|no| +|objectGlob|A glob for the object part of the S3 URI. In the URI `s3://foo/bar/file.json`, the glob is applied to `bar/file.json`.

The glob must match the entire object part, not just the filename. For example, the glob `*.json` does not match `s3://foo/bar/file.json`, because the object part is `bar/file.json`, and the`*` does not match the slash. To match all objects ending in `.json`, use `**.json` instead.

For more information, refer to the documentation for [`FileSystem#getPathMatcher`](https://docs.oracle.com/en/java/javase/17/docs/api/java.base/java/nio/file/FileSystem.html#getPathMatcher(java.lang.String)).|None|no| Note that the Google Cloud Storage input source will skip all empty objects only when `prefixes` is specified. @@ -377,7 +377,7 @@ Sample specs: |uris|JSON array of URIs where the Azure objects to be ingested are located. Use this format: `azureStorage://STORAGE_ACCOUNT/CONTAINER/PATH_TO_FILE`|None|One of the following must be set:`uris`, `prefixes`, or `objects`.| |prefixes|JSON array of URI prefixes for the locations of Azure objects to ingest. Use this format`azureStorage://STORAGE_ACCOUNT/CONTAINER/PREFIX`. Empty objects starting with any of the given prefixes are skipped.|None|One of the following must be set:`uris`, `prefixes`, or `objects`.| |objects|JSON array of Azure objects to ingest.|None|One of the following must be set:`uris`, `prefixes`, or `objects`.| -|objectGlob|A glob for the object part of the Azure URI. In the URI `azureStorage://foo/bar/file.json`, the glob is applied to `bar/file.json`.

The glob must match the entire object part, not just the filename. For example, the glob `*.json` does not match `azureStorage://foo/bar/file.json` because the object part is `bar/file.json`, and the`*` does not match the slash. To match all objects ending in `.json`, use `**.json` instead.

For more information, refer to the documentation for [`FileSystem#getPathMatcher`](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/nio/file/FileSystem.html#getPathMatcher(java.lang.String)).|None|no| +|objectGlob|A glob for the object part of the Azure URI. In the URI `azureStorage://foo/bar/file.json`, the glob is applied to `bar/file.json`.

The glob must match the entire object part, not just the filename. For example, the glob `*.json` does not match `azureStorage://foo/bar/file.json` because the object part is `bar/file.json`, and the`*` does not match the slash. To match all objects ending in `.json`, use `**.json` instead.

For more information, refer to the documentation for [`FileSystem#getPathMatcher`](https://docs.oracle.com/en/java/javase/17/docs/api/java.base/java/nio/file/FileSystem.html#getPathMatcher(java.lang.String)).|None|no| |systemFields|JSON array of system fields to return as part of input rows. Possible values: `__file_uri` (Azure blob URI starting with `azureStorage://`), `__file_bucket` (Azure bucket), and `__file_path` (Azure object path).|None|no| |properties|Properties object for overriding the default Azure configuration. See below for more information.|None|No (defaults will be used if not given)| @@ -471,7 +471,7 @@ Sample specs: |uris|JSON array of URIs where the Azure objects to be ingested are located, in the form `azure:///`|None|`uris` or `prefixes` or `objects` must be set| |prefixes|JSON array of URI prefixes for the locations of Azure objects to ingest, in the form `azure:///`. Empty objects starting with one of the given prefixes are skipped.|None|`uris` or `prefixes` or `objects` must be set| |objects|JSON array of Azure objects to ingest.|None|`uris` or `prefixes` or `objects` must be set| -|objectGlob|A glob for the object part of the Azure URI. In the URI `azure://foo/bar/file.json`, the glob is applied to `bar/file.json`.

The glob must match the entire object part, not just the filename. For example, the glob `*.json` does not match `azure://foo/bar/file.json`, because the object part is `bar/file.json`, and the`*` does not match the slash. To match all objects ending in `.json`, use `**.json` instead.

For more information, refer to the documentation for [`FileSystem#getPathMatcher`](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/nio/file/FileSystem.html#getPathMatcher(java.lang.String)).|None|no| +|objectGlob|A glob for the object part of the Azure URI. In the URI `azure://foo/bar/file.json`, the glob is applied to `bar/file.json`.

The glob must match the entire object part, not just the filename. For example, the glob `*.json` does not match `azure://foo/bar/file.json`, because the object part is `bar/file.json`, and the`*` does not match the slash. To match all objects ending in `.json`, use `**.json` instead.

For more information, refer to the documentation for [`FileSystem#getPathMatcher`](https://docs.oracle.com/en/java/javase/17/docs/api/java.base/java/nio/file/FileSystem.html#getPathMatcher(java.lang.String)).|None|no| |systemFields|JSON array of system fields to return as part of input rows. Possible values: `__file_uri` (Azure blob URI starting with `azure://`), `__file_bucket` (Azure bucket), and `__file_path` (Azure object path).|None|no| Note that the Azure input source skips all empty objects only when `prefixes` is specified. diff --git a/docs/operations/dump-segment.md b/docs/operations/dump-segment.md index f8c99366ad2c..f80b6275192d 100644 --- a/docs/operations/dump-segment.md +++ b/docs/operations/dump-segment.md @@ -36,7 +36,7 @@ java -classpath "/my/druid/lib/*" -Ddruid.extensions.loadList="[]" org.apache.dr --out /home/druid/output.txt ``` -If you use JDK 11 and above, you need to add the following additional parameters +If you use JDK 17 and above, you need to add the following additional parameters ``` --add-opens java.base/java.lang=ALL-UNNAMED --add-opens java.base/sun.nio.ch=ALL-UNNAMED diff --git a/docs/operations/java.md b/docs/operations/java.md index 0b8a474ac950..fc7faa60f24c 100644 --- a/docs/operations/java.md +++ b/docs/operations/java.md @@ -27,7 +27,7 @@ a Java runtime for Druid. ## Selecting a Java runtime - The project team recommends Java 17. Although you can use Java 11, support for it is deprecated. +Druid officially support Java 17. The project team recommends using an OpenJDK-based Java distribution. There are many free and actively-supported distributions available, including @@ -43,7 +43,7 @@ Druid relies on the environment variables `JAVA_HOME` or `DRUID_JAVA_HOME` to fi ## Garbage collection In general, the project team recommends using the G1 collector with default settings. This is the default collector in -Java 11 and 17. +Java 17. Garbage collector selection and tuning is a form of sport in the Java community. There may be situations where adjusting garbage collection configuration improves or worsens performance. The project team's guidance is that most people do @@ -51,18 +51,10 @@ not need to stray away from G1 with default settings. ## Strong encapsulation -Java 9 and beyond (including Java 11 and 17) include the capability for +Java 9 and beyond (including Java 17) include the capability for [strong encapsulation](https://dev.java/learn/strong-encapsulation-\(of-jdk-internals\)/) of internal JDK APIs. Druid uses certain internal JDK APIs, which must be added to `--add-exports` and `--add-opens` on the Java command line. -On Java 11, if these parameters are not included, you will see warnings like the following: - -``` -WARNING: An illegal reflective access operation has occurred -WARNING: Use --illegal-access=warn to enable warnings of further illegal reflective access operations -WARNING: All illegal access operations will be denied in a future release -``` - On Java 17, if these parameters are not included, you will see errors on startup like the following: ``` diff --git a/docs/operations/tls-support.md b/docs/operations/tls-support.md index 543f177bfedd..994f5f27441b 100644 --- a/docs/operations/tls-support.md +++ b/docs/operations/tls-support.md @@ -37,10 +37,10 @@ Apache Druid uses Jetty as its embedded web server. To get familiar with TLS/SSL, along with related concepts like keys and certificates, read [Configuring Secure Protocols](https://www.eclipse.org/jetty/documentation/jetty-12/operations-guide/index.html#og-protocols-ssl) in the Jetty documentation. -To get more in-depth knowledge of TLS/SSL support in Java in general, refer to the [Java Secure Socket Extension (JSSE) Reference Guide](https://docs.oracle.com/en/java/javase/11/security/java-secure-socket-extension-jsse-reference-guide.html). -The [Class SslContextFactory](https://www.eclipse.org/jetty/javadoc/jetty-11/org/eclipse/jetty/util/ssl/SslContextFactory.html) +To get more in-depth knowledge of TLS/SSL support in Java in general, refer to the [Java Secure Socket Extension (JSSE) Reference Guide](https://docs.oracle.com/en/java/javase/17/security/java-secure-socket-extension-jsse-reference-guide.html). +The [Class SslContextFactory](https://javadoc.jetty.org/jetty-12/org/eclipse/jetty/util/ssl/SslContextFactory.html) reference doc can help in understanding TLS/SSL configurations listed below. Finally, [Java Cryptography Architecture -Standard Algorithm Name Documentation for JDK 11](https://docs.oracle.com/en/java/javase/11/docs/specs/security/standard-names.html) lists all possible +Standard Algorithm Name Documentation for JDK 17](https://docs.oracle.com/en/java/javase/17/docs/specs/security/standard-names.html) lists all possible values for the configs below, among others provided by Java implementation. |Property|Description|Default|Required| @@ -79,7 +79,7 @@ The following table contains non-mandatory advanced configuration options, use c ## Internal communication over TLS Whenever possible Druid processes will use HTTPS to talk to each other. To enable this communication Druid's HttpClient needs to -be configured with a proper [SSLContext](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/javax/net/ssl/SSLContext.html) that is able +be configured with a proper [SSLContext](https://docs.oracle.com/en/java/javase/17/docs/api/java.base/javax/net/ssl/SSLContext.html) that is able to validate the Server Certificates, otherwise communication will fail. Since, there are various ways to configure SSLContext, by default, Druid looks for an instance of SSLContext Guice binding diff --git a/docs/querying/dimensionspecs.md b/docs/querying/dimensionspecs.md index 68773abdae10..dba52abcc689 100644 --- a/docs/querying/dimensionspecs.md +++ b/docs/querying/dimensionspecs.md @@ -255,7 +255,7 @@ For a regular dimension, it assumes the string is formatted in [ISO-8601 date and time format](https://en.wikipedia.org/wiki/ISO_8601). * `format` : date time format for the resulting dimension value, in [Joda Time DateTimeFormat](http://www.joda.org/joda-time/apidocs/org/joda/time/format/DateTimeFormat.html), or null to use the default ISO8601 format. -* `locale` : locale (language and country) to use, given as a [IETF BCP 47 language tag](https://www.oracle.com/java/technologies/javase/jdk11-suported-locales.html#util-text), e.g. `en-US`, `en-GB`, `fr-FR`, `fr-CA`, etc. +* `locale` : locale (language and country) to use, given as a [IETF BCP 47 language tag](https://www.oracle.com/java/technologies/javase/jdk17-suported-locales.html#util-text), e.g. `en-US`, `en-GB`, `fr-FR`, `fr-CA`, etc. * `timeZone` : time zone to use in [IANA tz database format](http://en.wikipedia.org/wiki/List_of_tz_database_time_zones), e.g. `Europe/Berlin` (this can possibly be different than the aggregation time-zone) * `granularity` : [granularity](granularities.md) to apply before formatting, or omit to not apply any granularity. * `asMillis` : boolean value, set to true to treat input strings as millis rather than ISO8601 strings. Additionally, if `format` is null or not specified, output will be in millis rather than ISO8601. diff --git a/docs/querying/filters.md b/docs/querying/filters.md index a9a862a49985..d088d92641ea 100644 --- a/docs/querying/filters.md +++ b/docs/querying/filters.md @@ -431,7 +431,7 @@ The regular expression filter is similar to the selector filter, but using regul | -------- | ----------- | -------- | | `type` | Must be `regex`.| Yes | | `dimension` | Input column or virtual column name to filter on. | Yes | -| `pattern` | String pattern to match - any standard [Java regular expression](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/util/regex/Pattern.html). | Yes | +| `pattern` | String pattern to match - any standard [Java regular expression](https://docs.oracle.com/en/java/javase/17/docs/api/java.base/java/util/regex/Pattern.html). | Yes | | `extractionFn` | [Extraction function](./dimensionspecs.md#extraction-functions) to apply to `dimension` prior to value matching. See [filtering with extraction functions](#filtering-with-extraction-functions) for details. | No | Note that it is often more optimal to use a like filter instead of a regex for simple matching of prefixes. diff --git a/docs/querying/math-expr.md b/docs/querying/math-expr.md index 926446200f15..06ac395c7ad4 100644 --- a/docs/querying/math-expr.md +++ b/docs/querying/math-expr.md @@ -81,7 +81,7 @@ The following built-in functions are available. |name|description| |----|-----------| |concat|concat(expr, expr...) concatenate a list of strings| -|format|format(pattern[, args...]) returns a string formatted in the manner of Java's [String.format](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/lang/String.html#format(java.lang.String,java.lang.Object...)).| +|format|format(pattern[, args...]) returns a string formatted in the manner of Java's [String.format](https://docs.oracle.com/en/java/javase/17/docs/api/java.base/java/lang/String.html#format(java.lang.String,java.lang.Object...)).| |like|like(expr, pattern[, escape]) is equivalent to SQL `expr LIKE pattern`| |lookup|lookup(expr, lookup-name[,replaceMissingValueWith]) looks up expr in a registered,`replaceMissingValueWith` is an optional constant string [query-time lookup](../querying/lookups.md)| |parse_long|parse_long(string[, radix]) parses a string as a long with the given radix, or 10 (decimal) if a radix is not provided.| diff --git a/docs/querying/sql-scalar.md b/docs/querying/sql-scalar.md index 05f29436da0f..59c3e2893a86 100644 --- a/docs/querying/sql-scalar.md +++ b/docs/querying/sql-scalar.md @@ -115,7 +115,7 @@ String functions accept strings and return a type appropriate to the function. |`REPLACE(expr, substring, replacement)`|Replaces instances of `substring` in `expr` with `replacement` and returns the result.| |`REPEAT(expr, N)`|Repeats `expr` `N` times.| |`REVERSE(expr)`|Reverses `expr`.| -|`STRING_FORMAT(pattern[, args...])`|Returns a string formatted in the manner of Java's [String.format](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/lang/String.html#format(java.lang.String,java.lang.Object...)).| +|`STRING_FORMAT(pattern[, args...])`|Returns a string formatted in the manner of Java's [String.format](https://docs.oracle.com/en/java/javase/17/docs/api/java.base/java/lang/String.html#format(java.lang.String,java.lang.Object...)).| |`STRPOS(expr, substring)`|Returns the index of `substring` within `expr`, with indexes starting from 1. If `substring` is not found, returns 0.| |`SUBSTRING(expr, index[, length])`|Returns a substring of `expr` starting at a given one-based index. If `length` is omitted, extracts characters to the end of the string, otherwise returns a substring of `length` UTF-16 characters.| |`SUBSTR(expr, index[, length])`|Alias for `SUBSTRING`.| From bdf67904f53b6f094f5563364477b4e8d398bd38 Mon Sep 17 00:00:00 2001 From: capistrant Date: Tue, 2 Sep 2025 16:44:51 -0500 Subject: [PATCH 26/42] Unify approach to jetty handler for avatica json and protobuf --- .../sql/avatica/DruidAvaticaJsonHandler.java | 69 ++++++++++--------- .../avatica/DruidAvaticaProtobufHandler.java | 1 - .../avatica/DruidAvaticaJsonHandlerTest.java | 28 ++++---- .../DruidAvaticaProtobufHandlerTest.java | 4 +- 4 files changed, 54 insertions(+), 48 deletions(-) diff --git a/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaJsonHandler.java b/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaJsonHandler.java index 1af3b88863a0..3c866a24cfd6 100644 --- a/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaJsonHandler.java +++ b/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaJsonHandler.java @@ -84,46 +84,51 @@ public DruidAvaticaJsonHandler( public boolean handle(Request request, Response response, Callback callback) throws Exception { String requestURI = request.getHttpURI().getPath(); - if (AVATICA_PATH_NO_TRAILING_SLASH.equals(StringUtils.maybeRemoveTrailingSlash(requestURI))) { - try (Timer.Context ctx = requestTimer.start()) { + try(Timer.Context ctx = this.requestTimer.start()) { + if (AVATICA_PATH_NO_TRAILING_SLASH.equals(StringUtils.maybeRemoveTrailingSlash(requestURI))) { response.getHeaders().put("Content-Type", "application/json;charset=utf-8"); - if ("POST".equals(request.getMethod())) { - String rawRequest = request.getHeaders().get("request"); - if (rawRequest == null) { - // Avoid a new buffer creation for every HTTP request - final UnsynchronizedBuffer buffer = threadLocalBuffer.get(); - try (InputStream inputStream = Content.Source.asInputStream(request)) { - byte[] bytes = AvaticaUtils.readFullyToBytes(inputStream, buffer); - String encoding = request.getHeaders().get("Content-Encoding"); - if (encoding == null) { - encoding = "UTF-8"; - } - rawRequest = AvaticaUtils.newString(bytes, encoding); - } - finally { - // Reset the offset into the buffer after we're done - buffer.reset(); - } - } - final String jsonRequest = rawRequest; - LOG.trace("request: %s", jsonRequest); + if (!"POST".equals(request.getMethod())) { + response.setStatus(400); // HttpServletResponse.SC_BAD_REQUEST + response.write( + true, + ByteBuffer.wrap("This server expects only POST calls.".getBytes(StandardCharsets.UTF_8)), callback + ); + return true; + } - org.apache.calcite.avatica.remote.Handler.HandlerResponse jsonResponse; - try { - jsonResponse = jsonHandler.apply(jsonRequest); + String rawRequest = request.getHeaders().get("request"); + if (rawRequest == null) { + // Avoid a new buffer creation for every HTTP request + final UnsynchronizedBuffer buffer = threadLocalBuffer.get(); + try (InputStream inputStream = Content.Source.asInputStream(request)) { + byte[] bytes = AvaticaUtils.readFullyToBytes(inputStream, buffer); + String encoding = request.getHeaders().get("Content-Encoding"); + if (encoding == null) { + encoding = "UTF-8"; + } + rawRequest = AvaticaUtils.newString(bytes, encoding); } - catch (Exception e) { - LOG.debug(e, "Error invoking request"); - jsonResponse = jsonHandler.convertToErrorResponse(e); + finally { + // Reset the offset into the buffer after we're done + buffer.reset(); } + } + final String jsonRequest = rawRequest; + LOG.trace("request: %s", jsonRequest); - LOG.trace("response: %s", jsonResponse); - response.setStatus(jsonResponse.getStatusCode()); - response.write(true, ByteBuffer.wrap(jsonResponse.getResponse().getBytes(StandardCharsets.UTF_8)), callback); + org.apache.calcite.avatica.remote.Handler.HandlerResponse jsonResponse; + try { + jsonResponse = jsonHandler.apply(jsonRequest); + } + catch (Exception e) { + LOG.debug(e, "Error invoking request"); + jsonResponse = jsonHandler.convertToErrorResponse(e); } - callback.succeeded(); + LOG.trace("response: %s", jsonResponse); + response.setStatus(jsonResponse.getStatusCode()); + response.write(true, ByteBuffer.wrap(jsonResponse.getResponse().getBytes(StandardCharsets.UTF_8)), callback); return true; } } diff --git a/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaProtobufHandler.java b/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaProtobufHandler.java index e6626d0a66b1..d34e5b49aa72 100644 --- a/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaProtobufHandler.java +++ b/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaProtobufHandler.java @@ -108,7 +108,6 @@ public boolean handle(Request request, Response response, Callback callback) thr org.apache.calcite.avatica.remote.Handler.HandlerResponse handlerResponse; try { - // For now, skip impersonation to get basic compilation working handlerResponse = pbHandler.apply(requestBytes); } catch (Exception e) { diff --git a/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaJsonHandlerTest.java b/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaJsonHandlerTest.java index 86331f9c3606..99e7a2d6477d 100644 --- a/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaJsonHandlerTest.java +++ b/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaJsonHandlerTest.java @@ -21,15 +21,16 @@ import org.apache.druid.server.DruidNode; import org.easymock.EasyMock; -import org.eclipse.jetty.http.HttpFields; import org.eclipse.jetty.http.HttpURI; import org.eclipse.jetty.server.Handler; import org.eclipse.jetty.server.Request; import org.eclipse.jetty.server.Response; import org.eclipse.jetty.util.Callback; -import org.junit.Assert; +import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; +import java.nio.ByteBuffer; + public class DruidAvaticaJsonHandlerTest extends DruidAvaticaHandlerTest { @Override @@ -49,10 +50,10 @@ protected Handler.Abstract getAvaticaHandler(final DruidMeta druidMeta) } @Test - public void testNonPostRequestSucceeds() throws Exception + public void testNonPostRequestReturns400() throws Exception { DruidMeta druidMeta = EasyMock.mock(DruidMeta.class); - DruidAvaticaJsonHandler handler = new DruidAvaticaJsonHandler( + DruidAvaticaProtobufHandler handler = new DruidAvaticaProtobufHandler( druidMeta, new DruidNode("dummy", "dummy", false, 1, null, true, false), new AvaticaMonitor() @@ -62,25 +63,26 @@ public void testNonPostRequestSucceeds() throws Exception Response response = EasyMock.mock(Response.class); Callback callback = EasyMock.mock(Callback.class); HttpURI httpURI = EasyMock.mock(HttpURI.class); - HttpFields.Mutable headers = EasyMock.mock(HttpFields.Mutable.class); EasyMock.expect(request.getHttpURI()).andReturn(httpURI); - EasyMock.expect(httpURI.getPath()).andReturn(DruidAvaticaJsonHandler.AVATICA_PATH_NO_TRAILING_SLASH); + EasyMock.expect(httpURI.getPath()).andReturn(DruidAvaticaProtobufHandler.AVATICA_PATH_NO_TRAILING_SLASH); EasyMock.expect(request.getMethod()).andReturn("GET"); - EasyMock.expect(response.getHeaders()).andReturn(headers); - - headers.put("Content-Type", "application/json;charset=utf-8"); - EasyMock.expectLastCall().andReturn(null); - - callback.succeeded(); + response.setStatus(400); + EasyMock.expectLastCall(); + + response.write( + EasyMock.eq(true), + EasyMock.anyObject(ByteBuffer.class), + EasyMock.eq(callback) + ); EasyMock.expectLastCall(); EasyMock.replay(request, response, callback, httpURI); boolean handled = handler.handle(request, response, callback); - Assert.assertTrue("Handler should have handled the request", handled); + Assertions.assertTrue(handled, "Handler should have handled the request"); EasyMock.verify(request, response, callback, httpURI); } } diff --git a/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaProtobufHandlerTest.java b/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaProtobufHandlerTest.java index 88bdb2f296a3..0b75b43a440f 100644 --- a/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaProtobufHandlerTest.java +++ b/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaProtobufHandlerTest.java @@ -27,7 +27,7 @@ import org.eclipse.jetty.server.Request; import org.eclipse.jetty.server.Response; import org.eclipse.jetty.util.Callback; -import org.junit.Assert; +import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import java.nio.ByteBuffer; @@ -86,7 +86,7 @@ public void testNonPostRequestReturns400() throws Exception boolean handled = handler.handle(request, response, callback); - Assert.assertTrue("Handler should have handled the request", handled); + Assertions.assertTrue(handled, "Handler should have handled the request"); EasyMock.verify(request, response, callback, httpURI); } } From 99b0ebc7bb4d599738d65422dbf76b80b225d38f Mon Sep 17 00:00:00 2001 From: capistrant Date: Tue, 2 Sep 2025 16:47:43 -0500 Subject: [PATCH 27/42] use 405 response code for avatica jdbc non post requests --- .../org/apache/druid/sql/avatica/DruidAvaticaJsonHandler.java | 2 +- .../apache/druid/sql/avatica/DruidAvaticaProtobufHandler.java | 2 +- .../apache/druid/sql/avatica/DruidAvaticaJsonHandlerTest.java | 4 ++-- .../druid/sql/avatica/DruidAvaticaProtobufHandlerTest.java | 4 ++-- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaJsonHandler.java b/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaJsonHandler.java index 3c866a24cfd6..d3dd30264a43 100644 --- a/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaJsonHandler.java +++ b/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaJsonHandler.java @@ -89,7 +89,7 @@ public boolean handle(Request request, Response response, Callback callback) thr response.getHeaders().put("Content-Type", "application/json;charset=utf-8"); if (!"POST".equals(request.getMethod())) { - response.setStatus(400); // HttpServletResponse.SC_BAD_REQUEST + response.setStatus(405); response.write( true, ByteBuffer.wrap("This server expects only POST calls.".getBytes(StandardCharsets.UTF_8)), callback diff --git a/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaProtobufHandler.java b/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaProtobufHandler.java index d34e5b49aa72..7082060da351 100644 --- a/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaProtobufHandler.java +++ b/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaProtobufHandler.java @@ -87,7 +87,7 @@ public boolean handle(Request request, Response response, Callback callback) thr if (AVATICA_PATH_NO_TRAILING_SLASH.equals(StringUtils.maybeRemoveTrailingSlash(requestURI))) { try (Timer.Context ctx = this.requestTimer.start()) { if (!"POST".equals(request.getMethod())) { - response.setStatus(400); // HttpServletResponse.SC_BAD_REQUEST + response.setStatus(405); response.write( true, ByteBuffer.wrap("This server expects only POST calls.".getBytes(StandardCharsets.UTF_8)), callback diff --git a/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaJsonHandlerTest.java b/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaJsonHandlerTest.java index 99e7a2d6477d..452c2c3234d2 100644 --- a/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaJsonHandlerTest.java +++ b/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaJsonHandlerTest.java @@ -50,7 +50,7 @@ protected Handler.Abstract getAvaticaHandler(final DruidMeta druidMeta) } @Test - public void testNonPostRequestReturns400() throws Exception + public void testNonPostRequestReturns405() throws Exception { DruidMeta druidMeta = EasyMock.mock(DruidMeta.class); DruidAvaticaProtobufHandler handler = new DruidAvaticaProtobufHandler( @@ -68,7 +68,7 @@ public void testNonPostRequestReturns400() throws Exception EasyMock.expect(httpURI.getPath()).andReturn(DruidAvaticaProtobufHandler.AVATICA_PATH_NO_TRAILING_SLASH); EasyMock.expect(request.getMethod()).andReturn("GET"); - response.setStatus(400); + response.setStatus(405); EasyMock.expectLastCall(); response.write( diff --git a/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaProtobufHandlerTest.java b/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaProtobufHandlerTest.java index 0b75b43a440f..d0c146795e69 100644 --- a/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaProtobufHandlerTest.java +++ b/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaProtobufHandlerTest.java @@ -54,7 +54,7 @@ protected Handler.Abstract getAvaticaHandler(final DruidMeta druidMeta) } @Test - public void testNonPostRequestReturns400() throws Exception + public void testNonPostRequestReturns405() throws Exception { DruidMeta druidMeta = EasyMock.mock(DruidMeta.class); DruidAvaticaProtobufHandler handler = new DruidAvaticaProtobufHandler( @@ -72,7 +72,7 @@ public void testNonPostRequestReturns400() throws Exception EasyMock.expect(httpURI.getPath()).andReturn(DruidAvaticaProtobufHandler.AVATICA_PATH_NO_TRAILING_SLASH); EasyMock.expect(request.getMethod()).andReturn("GET"); - response.setStatus(400); + response.setStatus(405); EasyMock.expectLastCall(); response.write( From 7aa43aac57e1fa39d873f899cde16bdbdc9c4c8a Mon Sep 17 00:00:00 2001 From: capistrant Date: Tue, 2 Sep 2025 17:28:03 -0500 Subject: [PATCH 28/42] fix checkstyle --- .../org/apache/druid/sql/avatica/DruidAvaticaJsonHandler.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaJsonHandler.java b/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaJsonHandler.java index d3dd30264a43..74dc93806f9f 100644 --- a/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaJsonHandler.java +++ b/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaJsonHandler.java @@ -84,7 +84,7 @@ public DruidAvaticaJsonHandler( public boolean handle(Request request, Response response, Callback callback) throws Exception { String requestURI = request.getHttpURI().getPath(); - try(Timer.Context ctx = this.requestTimer.start()) { + try (Timer.Context ctx = this.requestTimer.start()) { if (AVATICA_PATH_NO_TRAILING_SLASH.equals(StringUtils.maybeRemoveTrailingSlash(requestURI))) { response.getHeaders().put("Content-Type", "application/json;charset=utf-8"); From de76acc9a4e1b02acb734fa73a8df64d1cfde426 Mon Sep 17 00:00:00 2001 From: capistrant Date: Tue, 9 Sep 2025 11:40:03 -0500 Subject: [PATCH 29/42] Add link for new jetty related config --- docs/configuration/index.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/configuration/index.md b/docs/configuration/index.md index 7bd8e2ab6be2..f364314bf0af 100644 --- a/docs/configuration/index.md +++ b/docs/configuration/index.md @@ -1496,7 +1496,7 @@ Druid uses Jetty to serve HTTP requests. |`druid.server.http.enableForwardedRequestCustomizer`|If enabled, adds Jetty ForwardedRequestCustomizer which reads X-Forwarded-* request headers to manipulate servlet request object when Druid is used behind a proxy.|false| |`druid.server.http.allowedHttpMethods`|List of HTTP methods that should be allowed in addition to the ones required by Druid APIs. Druid APIs require GET, PUT, POST, and DELETE, which are always allowed. This option is not useful unless you have installed an extension that needs these additional HTTP methods or that adds functionality related to CORS. None of Druid's bundled extensions require these methods.|`[]`| |`druid.server.http.contentSecurityPolicy`|Content-Security-Policy header value to set on each non-POST response. Setting this property to an empty string, or omitting it, both result in the default `frame-ancestors: none` being set.|`frame-ancestors 'none'`| -|`druid.server.http.uriCompliance`|Jetty `UriCompliance` mode for Druid's embedded Jetty servers. To modify, override this config with the string representation of any `UriCompliance` mode that Jetty supports.|LEGACY| +|`druid.server.http.uriCompliance`|Jetty `UriCompliance` mode for Druid's embedded Jetty servers. To modify, override this config with the string representation of any `UriCompliance` mode that [Jetty supports](https://javadoc.jetty.org/jetty-12/org/eclipse/jetty/http/UriCompliance.html).|LEGACY| #### Indexer processing resources From dec75f08ad62abfbe2109e968a3aeba96153c653 Mon Sep 17 00:00:00 2001 From: capistrant Date: Tue, 9 Sep 2025 11:40:40 -0500 Subject: [PATCH 30/42] fix grammar in java support doc --- docs/operations/java.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/operations/java.md b/docs/operations/java.md index fc7faa60f24c..765122e107f7 100644 --- a/docs/operations/java.md +++ b/docs/operations/java.md @@ -27,7 +27,7 @@ a Java runtime for Druid. ## Selecting a Java runtime -Druid officially support Java 17. +Druid officially supports Java 17. The project team recommends using an OpenJDK-based Java distribution. There are many free and actively-supported distributions available, including From bafb2fc6886639247ab20cf315ef44dbfd1ad6eb Mon Sep 17 00:00:00 2001 From: capistrant Date: Tue, 9 Sep 2025 16:36:32 -0500 Subject: [PATCH 31/42] Make the legacy ITs easier to run in a wider variety of docker steps --- integration-tests/script/docker_run_cluster.sh | 15 ++++++++++++--- integration-tests/stop_cluster.sh | 13 +++++++++++-- 2 files changed, 23 insertions(+), 5 deletions(-) diff --git a/integration-tests/script/docker_run_cluster.sh b/integration-tests/script/docker_run_cluster.sh index 775e56d6de52..289867dea9e2 100755 --- a/integration-tests/script/docker_run_cluster.sh +++ b/integration-tests/script/docker_run_cluster.sh @@ -14,6 +14,15 @@ # See the License for the specific language governing permissions and # limitations under the License. +# Determine if docker compose is available. If not, assume Docker supports +# the compose subcommand +set +e +if which docker-compose > /dev/null +then + DOCKER_COMPOSE='docker-compose' +else + DOCKER_COMPOSE='docker compose' +fi set -e . $(dirname "$0")/docker_compose_args.sh @@ -41,17 +50,17 @@ fi if [ -n "$DRUID_INTEGRATION_TEST_START_HADOOP_DOCKER" ] && [ "$DRUID_INTEGRATION_TEST_START_HADOOP_DOCKER" == true ] then # Start Hadoop docker container - docker compose -f ${DOCKERDIR}/docker-compose.druid-hadoop.yml up -d + $DOCKER_COMPOSE -f ${DOCKERDIR}/docker-compose.druid-hadoop.yml up -d fi if [ -z "$DRUID_INTEGRATION_TEST_OVERRIDE_CONFIG_PATH" ] then # Start Druid cluster echo "Starting cluster with empty config" - OVERRIDE_ENV=environment-configs/empty-config docker compose $(getComposeArgs) up -d + OVERRIDE_ENV=environment-configs/empty-config $DOCKER_COMPOSE $(getComposeArgs) up -d else # run druid cluster with override config echo "Starting cluster with a config file at $DRUID_INTEGRATION_TEST_OVERRIDE_CONFIG_PATH" - OVERRIDE_ENV=$DRUID_INTEGRATION_TEST_OVERRIDE_CONFIG_PATH docker compose $(getComposeArgs) up -d + OVERRIDE_ENV=$DRUID_INTEGRATION_TEST_OVERRIDE_CONFIG_PATH $DOCKER_COMPOSE $(getComposeArgs) up -d fi } diff --git a/integration-tests/stop_cluster.sh b/integration-tests/stop_cluster.sh index 5dcabbb83211..20260dd328c1 100755 --- a/integration-tests/stop_cluster.sh +++ b/integration-tests/stop_cluster.sh @@ -14,6 +14,15 @@ # See the License for the specific language governing permissions and # limitations under the License. +# Determine if docker compose is available. If not, assume Docker supports +# the compose subcommand +set +e +if which docker-compose > /dev/null +then + DOCKER_COMPOSE='docker-compose' +else + DOCKER_COMPOSE='docker compose' +fi set -e . $(dirname "$0")/script/docker_compose_args.sh @@ -39,9 +48,9 @@ fi # bring down using the same compose args we started with if [ -z "$DRUID_INTEGRATION_TEST_OVERRIDE_CONFIG_PATH" ] then - OVERRIDE_ENV=environment-configs/empty-config docker compose $(getComposeArgs) down + OVERRIDE_ENV=environment-configs/empty-config $DOCKER_COMPOSE $(getComposeArgs) down else - OVERRIDE_ENV=$DRUID_INTEGRATION_TEST_OVERRIDE_CONFIG_PATH docker compose $(getComposeArgs) down + OVERRIDE_ENV=$DRUID_INTEGRATION_TEST_OVERRIDE_CONFIG_PATH $DOCKER_COMPOSE $(getComposeArgs) down fi if [ ! -z "$(docker network ls -q -f name=druid-it-net)" ] From 4ba80aad95067c916e3fd649d2ae493964aa4fdf Mon Sep 17 00:00:00 2001 From: capistrant Date: Tue, 9 Sep 2025 16:38:52 -0500 Subject: [PATCH 32/42] Cleanup some of the Injector initialization code for hadoop tasks, based on review --- .../indexer/HadoopDruidIndexerConfig.java | 10 +-- .../initialization/CoreInjectorBuilder.java | 45 +----------- .../druid/initialization/Initialization.java | 12 ---- .../initialization/ServerInjectorBuilder.java | 70 ++++++++----------- 4 files changed, 37 insertions(+), 100 deletions(-) diff --git a/indexing-hadoop/src/main/java/org/apache/druid/indexer/HadoopDruidIndexerConfig.java b/indexing-hadoop/src/main/java/org/apache/druid/indexer/HadoopDruidIndexerConfig.java index ca44a0fc2749..664b7e5695f6 100644 --- a/indexing-hadoop/src/main/java/org/apache/druid/indexer/HadoopDruidIndexerConfig.java +++ b/indexing-hadoop/src/main/java/org/apache/druid/indexer/HadoopDruidIndexerConfig.java @@ -29,19 +29,20 @@ import com.google.common.base.Preconditions; import com.google.common.base.Splitter; import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.inject.Injector; import com.google.inject.Key; import com.google.inject.Module; import org.apache.druid.data.input.InputRow; import org.apache.druid.data.input.impl.InputRowParser; -import org.apache.druid.guice.GuiceInjectors; import org.apache.druid.guice.JsonConfigProvider; +import org.apache.druid.guice.StartupInjectorBuilder; import org.apache.druid.guice.annotations.Self; import org.apache.druid.indexer.granularity.GranularitySpec; import org.apache.druid.indexer.partitions.DimensionBasedPartitionsSpec; import org.apache.druid.indexer.path.PathSpec; -import org.apache.druid.initialization.Initialization; +import org.apache.druid.initialization.ServerInjectorBuilder; import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.java.util.common.JodaUtils; import org.apache.druid.java.util.common.StringUtils; @@ -106,8 +107,9 @@ public class HadoopDruidIndexerConfig public static final Properties PROPERTIES; static { - INJECTOR = Initialization.makeInjectorWithoutJettyModules( - GuiceInjectors.makeStartupInjector(), + INJECTOR = ServerInjectorBuilder.makeServerInjectorWithoutJettyModules( + new StartupInjectorBuilder().forServer().build(), + ImmutableSet.of(), ImmutableList.of( (Module) binder -> { JsonConfigProvider.bindInstance( diff --git a/server/src/main/java/org/apache/druid/initialization/CoreInjectorBuilder.java b/server/src/main/java/org/apache/druid/initialization/CoreInjectorBuilder.java index 5e09059ed55f..92db1824a8ab 100644 --- a/server/src/main/java/org/apache/druid/initialization/CoreInjectorBuilder.java +++ b/server/src/main/java/org/apache/druid/initialization/CoreInjectorBuilder.java @@ -95,53 +95,14 @@ public CoreInjectorBuilder withLifecycle() public CoreInjectorBuilder forServer() { - withLogging(); - withLifecycle(); - add( - ExtensionsModule.SecondaryModule.class, - new DruidAuthModule(), - new PolicyModule(), - TLSCertificateCheckerModule.class, - EmitterModule.class, - HttpClientModule.global(), - HttpClientModule.escalatedGlobal(), - new HttpClientModule("druid.broker.http", Client.class, true), - new HttpClientModule("druid.broker.http", EscalatedClient.class, true), - new CuratorModule(), - new AnnouncerModule(), - new MetricsModule(), - new SegmentWriteOutMediumModule(), - new ServerModule(), - new StorageNodeModule(), - new JettyServerModule(), - new ExpressionModule(), - new BuiltInTypesModule(), - new DiscoveryModule(), - new ServerViewModule(), - new MetadataConfigModule(), - new DerbyMetadataStorageDruidModule(), - new JacksonConfigManagerModule(), - new LocalDataStorageDruidModule(), - new TombstoneDataStorageModule(), - new JavaScriptModule(), - new AuthenticatorModule(), - new AuthenticatorMapperModule(), - new EscalatorModule(), - new AuthorizerModule(), - new AuthorizerMapperModule(), - new StartupLoggingModule(), - new ExternalStorageAccessSecurityModule(), - new ServiceClientModule(), - new StorageConnectorModule(), - new CatalogCoreModule() - ); + forServerWithoutJetty(); + add(JettyServerModule.class); return this; } /** - * Rough bridge solution for Hadoop indexing that needs server-like Injector but can't run jetty 12 + * Needed for Hadoop indexing that needs server-like Injector but can't run jetty 12 */ - @Deprecated public CoreInjectorBuilder forServerWithoutJetty() { withLogging(); diff --git a/server/src/main/java/org/apache/druid/initialization/Initialization.java b/server/src/main/java/org/apache/druid/initialization/Initialization.java index acdef3118e2f..adf3136d3b1d 100644 --- a/server/src/main/java/org/apache/druid/initialization/Initialization.java +++ b/server/src/main/java/org/apache/druid/initialization/Initialization.java @@ -62,16 +62,4 @@ public static Injector makeInjectorWithModules( { return ServerInjectorBuilder.makeServerInjector(baseInjector, ImmutableSet.of(), modules); } - - /** - * Rough bridge solution for Hadoop indexing that needs server-like Injector but can't run jetty 12 - */ - @Deprecated - public static Injector makeInjectorWithoutJettyModules( - final Injector baseInjector, - final Iterable modules - ) - { - return ServerInjectorBuilder.makeServerInjectorWithoutJettyModules(baseInjector, ImmutableSet.of(), modules); - } } diff --git a/server/src/main/java/org/apache/druid/initialization/ServerInjectorBuilder.java b/server/src/main/java/org/apache/druid/initialization/ServerInjectorBuilder.java index d6402fc50617..5751a700cd04 100644 --- a/server/src/main/java/org/apache/druid/initialization/ServerInjectorBuilder.java +++ b/server/src/main/java/org/apache/druid/initialization/ServerInjectorBuilder.java @@ -74,9 +74,8 @@ public static Injector makeServerInjector( } /** - * Rough bridge solution for Hadoop indexing that needs server-like Injector but can't run jetty 12 + * Needed for Hadoop indexing that needs server-like Injector but can't run jetty 12 */ - @Deprecated @VisibleForTesting public static Injector makeServerInjectorWithoutJettyModules( final Injector baseInjector, @@ -109,36 +108,32 @@ public ServerInjectorBuilder serviceModules(final Iterable mod public Injector build() { - Preconditions.checkNotNull(baseInjector); - Preconditions.checkNotNull(nodeRoles); - - Module registerNodeRoleModule = registerNodeRoleModule(nodeRoles); - - // Child injector, with the registered node roles - Injector childInjector = baseInjector.createChildInjector(registerNodeRoleModule); - - // Create the core set of modules shared by all services. - // Here and below, the modules are filtered by the load modules list and - // the set of roles which this server provides. - CoreInjectorBuilder coreBuilder = new CoreInjectorBuilder(childInjector, nodeRoles).forServer(); - - // Override with the per-service modules. - ServiceInjectorBuilder serviceBuilder = (ServiceInjectorBuilder) new ServiceInjectorBuilder(coreBuilder).addAll( - Iterables.concat( - // bind nodeRoles for the new injector as well - ImmutableList.of(registerNodeRoleModule), - modules - ) - ); - - // Override again with extensions. - return new ExtensionInjectorBuilder(serviceBuilder).build(); + return this.build(true); } /** - * Rough bridge solution for Hadoop indexing that needs server-like Injector but can't run jetty 12 + * Needed for Hadoop indexing that needs server-like Injector but can't run jetty 12 */ public Injector buildWithoutJettyModules() + { + return this.build(false); + } + + public static Module registerNodeRoleModule(Set nodeRoles) + { + return binder -> { + Multibinder selfBinder = Multibinder.newSetBinder(binder, NodeRole.class, Self.class); + nodeRoles.forEach(nodeRole -> selfBinder.addBinding().toInstance(nodeRole)); + + MapBinder.newMapBinder( + binder, + new TypeLiteral(){}, + new TypeLiteral>>(){} + ); + }; + } + + private Injector build(boolean withJettyModules) { Preconditions.checkNotNull(baseInjector); Preconditions.checkNotNull(nodeRoles); @@ -151,7 +146,12 @@ public Injector buildWithoutJettyModules() // Create the core set of modules shared by all services. // Here and below, the modules are filtered by the load modules list and // the set of roles which this server provides. - CoreInjectorBuilder coreBuilder = new CoreInjectorBuilder(childInjector, nodeRoles).forServerWithoutJetty(); + CoreInjectorBuilder coreBuilder; + if (withJettyModules) { + coreBuilder = new CoreInjectorBuilder(childInjector, nodeRoles).forServer(); + } else { + coreBuilder = new CoreInjectorBuilder(childInjector, nodeRoles).forServerWithoutJetty(); + } // Override with the per-service modules. ServiceInjectorBuilder serviceBuilder = (ServiceInjectorBuilder) new ServiceInjectorBuilder(coreBuilder).addAll( @@ -165,18 +165,4 @@ public Injector buildWithoutJettyModules() // Override again with extensions. return new ExtensionInjectorBuilder(serviceBuilder).build(); } - - public static Module registerNodeRoleModule(Set nodeRoles) - { - return binder -> { - Multibinder selfBinder = Multibinder.newSetBinder(binder, NodeRole.class, Self.class); - nodeRoles.forEach(nodeRole -> selfBinder.addBinding().toInstance(nodeRole)); - - MapBinder.newMapBinder( - binder, - new TypeLiteral(){}, - new TypeLiteral>>(){} - ); - }; - } } From 36949b145c482c558055510d8507f3ce7c2b783a Mon Sep 17 00:00:00 2001 From: capistrant Date: Tue, 9 Sep 2025 16:49:07 -0500 Subject: [PATCH 33/42] Starting point for creating a DruidAvaticaHandler base class --- .../org/apache/druid/sql/avatica/DruidAvaticaHandler.java | 8 ++++++++ .../apache/druid/sql/avatica/DruidAvaticaJsonHandler.java | 2 +- .../druid/sql/avatica/DruidAvaticaProtobufHandler.java | 2 +- .../org/apache/druid/quidem/DruidAvaticaTestDriver.java | 4 ++-- 4 files changed, 12 insertions(+), 4 deletions(-) create mode 100644 sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaHandler.java diff --git a/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaHandler.java b/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaHandler.java new file mode 100644 index 000000000000..83abb83d951c --- /dev/null +++ b/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaHandler.java @@ -0,0 +1,8 @@ +package org.apache.druid.sql.avatica; + +import org.apache.calcite.avatica.server.MetricsAwareAvaticaHandler; +import org.eclipse.jetty.server.Handler; + +public abstract class DruidAvaticaHandler extends Handler.Abstract implements MetricsAwareAvaticaHandler +{ +} diff --git a/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaJsonHandler.java b/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaJsonHandler.java index 74dc93806f9f..e12194c67d52 100644 --- a/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaJsonHandler.java +++ b/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaJsonHandler.java @@ -45,7 +45,7 @@ import java.nio.charset.StandardCharsets; import java.util.Objects; -public class DruidAvaticaJsonHandler extends Handler.Abstract implements MetricsAwareAvaticaHandler +public class DruidAvaticaJsonHandler extends DruidAvaticaHandler { private static final Logger LOG = new Logger(DruidAvaticaJsonHandler.class); diff --git a/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaProtobufHandler.java b/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaProtobufHandler.java index 7082060da351..3bd1b96ab6d4 100644 --- a/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaProtobufHandler.java +++ b/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaProtobufHandler.java @@ -46,7 +46,7 @@ import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; -public class DruidAvaticaProtobufHandler extends Handler.Abstract implements MetricsAwareAvaticaHandler +public class DruidAvaticaProtobufHandler extends DruidAvaticaHandler { private static final Logger LOG = new Logger(DruidAvaticaProtobufHandler.class); diff --git a/sql/src/test/java/org/apache/druid/quidem/DruidAvaticaTestDriver.java b/sql/src/test/java/org/apache/druid/quidem/DruidAvaticaTestDriver.java index cd5a6389fa70..a586358e1262 100644 --- a/sql/src/test/java/org/apache/druid/quidem/DruidAvaticaTestDriver.java +++ b/sql/src/test/java/org/apache/druid/quidem/DruidAvaticaTestDriver.java @@ -31,6 +31,7 @@ import org.apache.druid.server.DruidNode; import org.apache.druid.server.SpecificSegmentsQuerySegmentWalker; import org.apache.druid.sql.avatica.AvaticaMonitor; +import org.apache.druid.sql.avatica.DruidAvaticaHandler; import org.apache.druid.sql.avatica.DruidAvaticaJsonHandler; import org.apache.druid.sql.avatica.DruidMeta; import org.apache.druid.sql.calcite.SqlTestFrameworkConfig; @@ -41,7 +42,6 @@ import org.apache.druid.sql.calcite.util.SqlTestFramework.QueryComponentSupplierDelegate; import org.apache.druid.sql.hook.DruidHookDispatcher; import org.apache.http.client.utils.URIBuilder; -import org.eclipse.jetty.server.Handler; import org.eclipse.jetty.server.Server; import java.io.Closeable; @@ -185,7 +185,7 @@ public void close() } } - protected Handler.Abstract getAvaticaHandler(final DruidMeta druidMeta) + protected DruidAvaticaHandler getAvaticaHandler(final DruidMeta druidMeta) { return new DruidAvaticaJsonHandler( druidMeta, From f154b04a50c0a652e1adaa6a743812ef116c93b5 Mon Sep 17 00:00:00 2001 From: capistrant Date: Tue, 9 Sep 2025 17:01:52 -0500 Subject: [PATCH 34/42] Share a little bit of the code between the avatica handlers --- .../sql/avatica/DruidAvaticaHandler.java | 59 +++++++++++++++++++ .../sql/avatica/DruidAvaticaJsonHandler.java | 29 +-------- .../avatica/DruidAvaticaProtobufHandler.java | 29 +-------- 3 files changed, 64 insertions(+), 53 deletions(-) diff --git a/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaHandler.java b/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaHandler.java index 83abb83d951c..859544d36eaa 100644 --- a/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaHandler.java +++ b/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaHandler.java @@ -1,8 +1,67 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + package org.apache.druid.sql.avatica; +import org.apache.calcite.avatica.metrics.MetricsSystem; +import org.apache.calcite.avatica.metrics.Timer; +import org.apache.calcite.avatica.remote.LocalService; +import org.apache.calcite.avatica.remote.MetricsHelper; +import org.apache.calcite.avatica.remote.Service; import org.apache.calcite.avatica.server.MetricsAwareAvaticaHandler; +import org.apache.calcite.avatica.util.UnsynchronizedBuffer; +import org.apache.druid.guice.annotations.Self; +import org.apache.druid.server.DruidNode; import org.eclipse.jetty.server.Handler; public abstract class DruidAvaticaHandler extends Handler.Abstract implements MetricsAwareAvaticaHandler { + protected final Service service; + protected final MetricsSystem metrics; + protected final Timer requestTimer; + protected final ThreadLocal threadLocalBuffer; + + protected DruidAvaticaHandler( + final DruidMeta druidMeta, + @Self final DruidNode druidNode, + final AvaticaMonitor avaticaMonitor, + final Class timerClass + ) + { + this.service = new LocalService(druidMeta); + this.metrics = avaticaMonitor; + this.threadLocalBuffer = ThreadLocal.withInitial(UnsynchronizedBuffer::new); + this.requestTimer = this.metrics.getTimer( + MetricsHelper.concat(timerClass, MetricsAwareAvaticaHandler.REQUEST_TIMER_NAME) + ); + setServerRpcMetadata(new Service.RpcMetadataResponse(druidNode.getHostAndPortToUse())); + } + + @Override + public MetricsSystem getMetrics() + { + return metrics; + } + + @Override + public void setServerRpcMetadata(Service.RpcMetadataResponse metadata) + { + service.setRpcMetadata(metadata); + } } diff --git a/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaJsonHandler.java b/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaJsonHandler.java index e12194c67d52..9b1265af2b62 100644 --- a/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaJsonHandler.java +++ b/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaJsonHandler.java @@ -20,21 +20,16 @@ package org.apache.druid.sql.avatica; import org.apache.calcite.avatica.AvaticaUtils; -import org.apache.calcite.avatica.metrics.MetricsSystem; import org.apache.calcite.avatica.metrics.Timer; import org.apache.calcite.avatica.remote.JsonHandler; -import org.apache.calcite.avatica.remote.LocalService; -import org.apache.calcite.avatica.remote.MetricsHelper; import org.apache.calcite.avatica.remote.Service; import org.apache.calcite.avatica.server.AvaticaJsonHandler; -import org.apache.calcite.avatica.server.MetricsAwareAvaticaHandler; import org.apache.calcite.avatica.util.UnsynchronizedBuffer; import org.apache.druid.guice.annotations.Self; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.logger.Logger; import org.apache.druid.server.DruidNode; import org.eclipse.jetty.io.Content; -import org.eclipse.jetty.server.Handler; import org.eclipse.jetty.server.Request; import org.eclipse.jetty.server.Response; import org.eclipse.jetty.util.Callback; @@ -53,11 +48,7 @@ public class DruidAvaticaJsonHandler extends DruidAvaticaHandler public static final String AVATICA_PATH = AVATICA_PATH_NO_TRAILING_SLASH + "/"; - private final Service service; - private final MetricsSystem metrics; private final JsonHandler jsonHandler; - private final Timer requestTimer; - private final ThreadLocal threadLocalBuffer; @Inject public DruidAvaticaJsonHandler( @@ -66,18 +57,8 @@ public DruidAvaticaJsonHandler( final AvaticaMonitor avaticaMonitor ) { - super(); - this.service = new LocalService(druidMeta); - this.metrics = Objects.requireNonNull(avaticaMonitor); + super(druidMeta, druidNode, Objects.requireNonNull(avaticaMonitor), AvaticaJsonHandler.class); this.jsonHandler = new JsonHandler(service, this.metrics); - - // Metrics - this.requestTimer = this.metrics.getTimer( - MetricsHelper.concat(AvaticaJsonHandler.class, MetricsAwareAvaticaHandler.REQUEST_TIMER_NAME)); - - this.threadLocalBuffer = ThreadLocal.withInitial(UnsynchronizedBuffer::new); - - setServerRpcMetadata(new Service.RpcMetadataResponse(druidNode.getHostAndPortToUse())); } @Override @@ -135,16 +116,10 @@ public boolean handle(Request request, Response response, Callback callback) thr return false; } - @Override - public MetricsSystem getMetrics() - { - return metrics; - } - @Override public void setServerRpcMetadata(Service.RpcMetadataResponse metadata) { - service.setRpcMetadata(metadata); + super.setServerRpcMetadata(metadata); jsonHandler.setRpcMetadata(metadata); } } diff --git a/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaProtobufHandler.java b/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaProtobufHandler.java index 3bd1b96ab6d4..0f56db1029d3 100644 --- a/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaProtobufHandler.java +++ b/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaProtobufHandler.java @@ -20,23 +20,18 @@ package org.apache.druid.sql.avatica; import org.apache.calcite.avatica.AvaticaUtils; -import org.apache.calcite.avatica.metrics.MetricsSystem; import org.apache.calcite.avatica.metrics.Timer; -import org.apache.calcite.avatica.remote.LocalService; -import org.apache.calcite.avatica.remote.MetricsHelper; import org.apache.calcite.avatica.remote.ProtobufHandler; import org.apache.calcite.avatica.remote.ProtobufTranslation; import org.apache.calcite.avatica.remote.ProtobufTranslationImpl; import org.apache.calcite.avatica.remote.Service; import org.apache.calcite.avatica.server.AvaticaProtobufHandler; -import org.apache.calcite.avatica.server.MetricsAwareAvaticaHandler; import org.apache.calcite.avatica.util.UnsynchronizedBuffer; import org.apache.druid.guice.annotations.Self; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.logger.Logger; import org.apache.druid.server.DruidNode; import org.eclipse.jetty.io.Content; -import org.eclipse.jetty.server.Handler; import org.eclipse.jetty.server.Request; import org.eclipse.jetty.server.Response; import org.eclipse.jetty.util.Callback; @@ -54,11 +49,7 @@ public class DruidAvaticaProtobufHandler extends DruidAvaticaHandler public static final String AVATICA_PATH_NO_TRAILING_SLASH = "/druid/v2/sql/avatica-protobuf"; public static final String AVATICA_PATH = AVATICA_PATH_NO_TRAILING_SLASH + "/"; - private final Service service; private final ProtobufHandler pbHandler; - private final MetricsSystem metrics; - private final Timer requestTimer; - private final ThreadLocal threadLocalBuffer; @Inject public DruidAvaticaProtobufHandler( @@ -67,17 +58,9 @@ public DruidAvaticaProtobufHandler( final AvaticaMonitor metrics ) { - super(); - this.service = new LocalService(druidMeta); + super(druidMeta, druidNode, metrics, AvaticaProtobufHandler.class); ProtobufTranslation protobufTranslation = new ProtobufTranslationImpl(); - this.pbHandler = new ProtobufHandler(service, protobufTranslation, metrics); - this.metrics = metrics; - this.threadLocalBuffer = ThreadLocal.withInitial(UnsynchronizedBuffer::new); - this.requestTimer = this.metrics.getTimer( - MetricsHelper.concat( - AvaticaProtobufHandler.class, - MetricsAwareAvaticaHandler.REQUEST_TIMER_NAME)); - setServerRpcMetadata(new Service.RpcMetadataResponse(druidNode.getHostAndPortToUse())); + this.pbHandler = new ProtobufHandler(service, protobufTranslation, this.metrics); } @Override @@ -123,16 +106,10 @@ public boolean handle(Request request, Response response, Callback callback) thr return false; } - @Override - public MetricsSystem getMetrics() - { - return metrics; - } - @Override public void setServerRpcMetadata(Service.RpcMetadataResponse metadata) { - service.setRpcMetadata(metadata); + super.setServerRpcMetadata(metadata); pbHandler.setRpcMetadata(metadata); } } From 4506aacae60970fd635d27c0b8dc757b86917c8d Mon Sep 17 00:00:00 2001 From: capistrant Date: Wed, 10 Sep 2025 09:27:10 -0500 Subject: [PATCH 35/42] Work to fix AvaticaHandler constructor issues --- .../org/apache/druid/sql/avatica/DruidAvaticaHandler.java | 1 - .../apache/druid/sql/avatica/DruidAvaticaJsonHandler.java | 5 ++++- .../druid/sql/avatica/DruidAvaticaProtobufHandler.java | 5 ++++- 3 files changed, 8 insertions(+), 3 deletions(-) diff --git a/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaHandler.java b/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaHandler.java index 859544d36eaa..210baa1779e0 100644 --- a/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaHandler.java +++ b/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaHandler.java @@ -50,7 +50,6 @@ protected DruidAvaticaHandler( this.requestTimer = this.metrics.getTimer( MetricsHelper.concat(timerClass, MetricsAwareAvaticaHandler.REQUEST_TIMER_NAME) ); - setServerRpcMetadata(new Service.RpcMetadataResponse(druidNode.getHostAndPortToUse())); } @Override diff --git a/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaJsonHandler.java b/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaJsonHandler.java index 9b1265af2b62..e312f91c141a 100644 --- a/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaJsonHandler.java +++ b/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaJsonHandler.java @@ -59,6 +59,7 @@ public DruidAvaticaJsonHandler( { super(druidMeta, druidNode, Objects.requireNonNull(avaticaMonitor), AvaticaJsonHandler.class); this.jsonHandler = new JsonHandler(service, this.metrics); + setServerRpcMetadata(new Service.RpcMetadataResponse(druidNode.getHostAndPortToUse())); } @Override @@ -120,6 +121,8 @@ public boolean handle(Request request, Response response, Callback callback) thr public void setServerRpcMetadata(Service.RpcMetadataResponse metadata) { super.setServerRpcMetadata(metadata); - jsonHandler.setRpcMetadata(metadata); + if (jsonHandler != null) { + jsonHandler.setRpcMetadata(metadata); + } } } diff --git a/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaProtobufHandler.java b/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaProtobufHandler.java index 0f56db1029d3..19748d61f0fd 100644 --- a/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaProtobufHandler.java +++ b/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaProtobufHandler.java @@ -61,6 +61,7 @@ public DruidAvaticaProtobufHandler( super(druidMeta, druidNode, metrics, AvaticaProtobufHandler.class); ProtobufTranslation protobufTranslation = new ProtobufTranslationImpl(); this.pbHandler = new ProtobufHandler(service, protobufTranslation, this.metrics); + setServerRpcMetadata(new Service.RpcMetadataResponse(druidNode.getHostAndPortToUse())); } @Override @@ -110,6 +111,8 @@ public boolean handle(Request request, Response response, Callback callback) thr public void setServerRpcMetadata(Service.RpcMetadataResponse metadata) { super.setServerRpcMetadata(metadata); - pbHandler.setRpcMetadata(metadata); + if (pbHandler != null) { + pbHandler.setRpcMetadata(metadata); + } } } From d2ac8b7c39c968ae04792808a4351322b7fa18c6 Mon Sep 17 00:00:00 2001 From: capistrant Date: Thu, 11 Sep 2025 15:28:45 -0500 Subject: [PATCH 36/42] Fix codeql flag --- .../java/org/apache/druid/sql/avatica/DruidAvaticaHandler.java | 1 - .../org/apache/druid/sql/avatica/DruidAvaticaJsonHandler.java | 2 +- .../apache/druid/sql/avatica/DruidAvaticaProtobufHandler.java | 2 +- 3 files changed, 2 insertions(+), 3 deletions(-) diff --git a/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaHandler.java b/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaHandler.java index 210baa1779e0..bd5edf9720bf 100644 --- a/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaHandler.java +++ b/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaHandler.java @@ -39,7 +39,6 @@ public abstract class DruidAvaticaHandler extends Handler.Abstract implements Me protected DruidAvaticaHandler( final DruidMeta druidMeta, - @Self final DruidNode druidNode, final AvaticaMonitor avaticaMonitor, final Class timerClass ) diff --git a/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaJsonHandler.java b/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaJsonHandler.java index e312f91c141a..f6d0738e2321 100644 --- a/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaJsonHandler.java +++ b/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaJsonHandler.java @@ -57,7 +57,7 @@ public DruidAvaticaJsonHandler( final AvaticaMonitor avaticaMonitor ) { - super(druidMeta, druidNode, Objects.requireNonNull(avaticaMonitor), AvaticaJsonHandler.class); + super(druidMeta, Objects.requireNonNull(avaticaMonitor), AvaticaJsonHandler.class); this.jsonHandler = new JsonHandler(service, this.metrics); setServerRpcMetadata(new Service.RpcMetadataResponse(druidNode.getHostAndPortToUse())); } diff --git a/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaProtobufHandler.java b/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaProtobufHandler.java index 19748d61f0fd..a054f0b85e26 100644 --- a/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaProtobufHandler.java +++ b/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaProtobufHandler.java @@ -58,7 +58,7 @@ public DruidAvaticaProtobufHandler( final AvaticaMonitor metrics ) { - super(druidMeta, druidNode, metrics, AvaticaProtobufHandler.class); + super(druidMeta, metrics, AvaticaProtobufHandler.class); ProtobufTranslation protobufTranslation = new ProtobufTranslationImpl(); this.pbHandler = new ProtobufHandler(service, protobufTranslation, this.metrics); setServerRpcMetadata(new Service.RpcMetadataResponse(druidNode.getHostAndPortToUse())); From 2ada34fdef4a94949245f621cf75a497db8b7182 Mon Sep 17 00:00:00 2001 From: capistrant Date: Thu, 11 Sep 2025 16:10:33 -0500 Subject: [PATCH 37/42] Revert "Make the legacy ITs easier to run in a wider variety of docker steps" This reverts commit bafb2fc6886639247ab20cf315ef44dbfd1ad6eb. --- integration-tests/script/docker_run_cluster.sh | 15 +++------------ integration-tests/stop_cluster.sh | 13 ++----------- 2 files changed, 5 insertions(+), 23 deletions(-) diff --git a/integration-tests/script/docker_run_cluster.sh b/integration-tests/script/docker_run_cluster.sh index 289867dea9e2..775e56d6de52 100755 --- a/integration-tests/script/docker_run_cluster.sh +++ b/integration-tests/script/docker_run_cluster.sh @@ -14,15 +14,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -# Determine if docker compose is available. If not, assume Docker supports -# the compose subcommand -set +e -if which docker-compose > /dev/null -then - DOCKER_COMPOSE='docker-compose' -else - DOCKER_COMPOSE='docker compose' -fi set -e . $(dirname "$0")/docker_compose_args.sh @@ -50,17 +41,17 @@ fi if [ -n "$DRUID_INTEGRATION_TEST_START_HADOOP_DOCKER" ] && [ "$DRUID_INTEGRATION_TEST_START_HADOOP_DOCKER" == true ] then # Start Hadoop docker container - $DOCKER_COMPOSE -f ${DOCKERDIR}/docker-compose.druid-hadoop.yml up -d + docker compose -f ${DOCKERDIR}/docker-compose.druid-hadoop.yml up -d fi if [ -z "$DRUID_INTEGRATION_TEST_OVERRIDE_CONFIG_PATH" ] then # Start Druid cluster echo "Starting cluster with empty config" - OVERRIDE_ENV=environment-configs/empty-config $DOCKER_COMPOSE $(getComposeArgs) up -d + OVERRIDE_ENV=environment-configs/empty-config docker compose $(getComposeArgs) up -d else # run druid cluster with override config echo "Starting cluster with a config file at $DRUID_INTEGRATION_TEST_OVERRIDE_CONFIG_PATH" - OVERRIDE_ENV=$DRUID_INTEGRATION_TEST_OVERRIDE_CONFIG_PATH $DOCKER_COMPOSE $(getComposeArgs) up -d + OVERRIDE_ENV=$DRUID_INTEGRATION_TEST_OVERRIDE_CONFIG_PATH docker compose $(getComposeArgs) up -d fi } diff --git a/integration-tests/stop_cluster.sh b/integration-tests/stop_cluster.sh index 20260dd328c1..5dcabbb83211 100755 --- a/integration-tests/stop_cluster.sh +++ b/integration-tests/stop_cluster.sh @@ -14,15 +14,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -# Determine if docker compose is available. If not, assume Docker supports -# the compose subcommand -set +e -if which docker-compose > /dev/null -then - DOCKER_COMPOSE='docker-compose' -else - DOCKER_COMPOSE='docker compose' -fi set -e . $(dirname "$0")/script/docker_compose_args.sh @@ -48,9 +39,9 @@ fi # bring down using the same compose args we started with if [ -z "$DRUID_INTEGRATION_TEST_OVERRIDE_CONFIG_PATH" ] then - OVERRIDE_ENV=environment-configs/empty-config $DOCKER_COMPOSE $(getComposeArgs) down + OVERRIDE_ENV=environment-configs/empty-config docker compose $(getComposeArgs) down else - OVERRIDE_ENV=$DRUID_INTEGRATION_TEST_OVERRIDE_CONFIG_PATH $DOCKER_COMPOSE $(getComposeArgs) down + OVERRIDE_ENV=$DRUID_INTEGRATION_TEST_OVERRIDE_CONFIG_PATH docker compose $(getComposeArgs) down fi if [ ! -z "$(docker network ls -q -f name=druid-it-net)" ] From 24f4c139f1cecf6676c3f4b340666a9177008510 Mon Sep 17 00:00:00 2001 From: capistrant Date: Thu, 11 Sep 2025 16:38:09 -0500 Subject: [PATCH 38/42] Add RequestLogger configuration back using proper jetty12 mechanism --- .../druid/server/initialization/jetty/JettyServerModule.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/server/src/main/java/org/apache/druid/server/initialization/jetty/JettyServerModule.java b/server/src/main/java/org/apache/druid/server/initialization/jetty/JettyServerModule.java index 9b7d9e336d30..887de2f35240 100644 --- a/server/src/main/java/org/apache/druid/server/initialization/jetty/JettyServerModule.java +++ b/server/src/main/java/org/apache/druid/server/initialization/jetty/JettyServerModule.java @@ -480,6 +480,9 @@ public boolean handle( }); } + log.info("Configuraing Jetty Request log for server"); + server.setRequestLog(new JettyRequestLog()); + return server; } From a3bf74ad1996e96519c90ba291e7d2e5db4f6d58 Mon Sep 17 00:00:00 2001 From: capistrant Date: Thu, 11 Sep 2025 16:49:22 -0500 Subject: [PATCH 39/42] Fix checkstyle --- .../java/org/apache/druid/sql/avatica/DruidAvaticaHandler.java | 2 -- 1 file changed, 2 deletions(-) diff --git a/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaHandler.java b/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaHandler.java index bd5edf9720bf..17ba3cc77ef2 100644 --- a/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaHandler.java +++ b/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaHandler.java @@ -26,8 +26,6 @@ import org.apache.calcite.avatica.remote.Service; import org.apache.calcite.avatica.server.MetricsAwareAvaticaHandler; import org.apache.calcite.avatica.util.UnsynchronizedBuffer; -import org.apache.druid.guice.annotations.Self; -import org.apache.druid.server.DruidNode; import org.eclipse.jetty.server.Handler; public abstract class DruidAvaticaHandler extends Handler.Abstract implements MetricsAwareAvaticaHandler From 0b88098a982f84da8a8cf192904cb5e9ae10db61 Mon Sep 17 00:00:00 2001 From: capistrant Date: Wed, 17 Sep 2025 08:38:42 -0500 Subject: [PATCH 40/42] fix import order --- .../java/org/apache/druid/guice/http/JettyHttpClientModule.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/main/java/org/apache/druid/guice/http/JettyHttpClientModule.java b/server/src/main/java/org/apache/druid/guice/http/JettyHttpClientModule.java index a37005d751b2..da543448af8b 100644 --- a/server/src/main/java/org/apache/druid/guice/http/JettyHttpClientModule.java +++ b/server/src/main/java/org/apache/druid/guice/http/JettyHttpClientModule.java @@ -33,8 +33,8 @@ import org.apache.druid.server.DruidNode; import org.eclipse.jetty.client.HttpClient; import org.eclipse.jetty.client.transport.HttpClientTransportDynamic; -import org.eclipse.jetty.io.ClientConnector; import org.eclipse.jetty.http.HttpField; +import org.eclipse.jetty.io.ClientConnector; import org.eclipse.jetty.util.ssl.SslContextFactory; import org.eclipse.jetty.util.thread.QueuedThreadPool; import org.jboss.netty.handler.codec.http.HttpHeaders; From a907ba7e88fd465ca6bab0b1a235c0979d4f803c Mon Sep 17 00:00:00 2001 From: capistrant Date: Thu, 18 Sep 2025 11:09:49 -0500 Subject: [PATCH 41/42] Address review comments --- pom.xml | 12 ++++---- .../initialization/ServerInjectorBuilder.java | 29 +++++++++---------- .../server/initialization/ServerConfig.java | 24 ++++++++++++--- .../jetty/JettyServerModule.java | 6 ++-- .../StandardResponseHeaderFilterHolder.java | 2 +- .../initialization/ServerConfigTest.java | 5 ++-- .../AsyncManagementForwardingServletTest.java | 3 +- ...tandardResponseHeaderFilterHolderTest.java | 10 ++++--- .../server/mocks/MockHttpServletResponse.java | 5 +--- .../sql/avatica/DruidAvaticaHandler.java | 14 +++++++++ .../avatica/DruidAvaticaProtobufHandler.java | 12 ++++---- .../sql/avatica/DruidAvaticaHandlerTest.java | 3 +- .../avatica/DruidAvaticaJsonHandlerTest.java | 3 +- .../DruidAvaticaProtobufHandlerTest.java | 3 +- 14 files changed, 78 insertions(+), 53 deletions(-) diff --git a/pom.xml b/pom.xml index aa758e60ae27..bb2dd823d15a 100644 --- a/pom.xml +++ b/pom.xml @@ -765,12 +765,12 @@ org.eclipse.jetty.ee8 jetty-ee8-servlet ${jetty.version} - - - org.eclipse.jetty.toolchain - jetty-servlet-api - - + + + org.eclipse.jetty.toolchain + jetty-servlet-api + +
org.eclipse.jetty.ee8 diff --git a/server/src/main/java/org/apache/druid/initialization/ServerInjectorBuilder.java b/server/src/main/java/org/apache/druid/initialization/ServerInjectorBuilder.java index 5751a700cd04..ff63c9cf34ac 100644 --- a/server/src/main/java/org/apache/druid/initialization/ServerInjectorBuilder.java +++ b/server/src/main/java/org/apache/druid/initialization/ServerInjectorBuilder.java @@ -76,7 +76,6 @@ public static Injector makeServerInjector( /** * Needed for Hadoop indexing that needs server-like Injector but can't run jetty 12 */ - @VisibleForTesting public static Injector makeServerInjectorWithoutJettyModules( final Injector baseInjector, final Set nodeRoles, @@ -119,20 +118,6 @@ public Injector buildWithoutJettyModules() return this.build(false); } - public static Module registerNodeRoleModule(Set nodeRoles) - { - return binder -> { - Multibinder selfBinder = Multibinder.newSetBinder(binder, NodeRole.class, Self.class); - nodeRoles.forEach(nodeRole -> selfBinder.addBinding().toInstance(nodeRole)); - - MapBinder.newMapBinder( - binder, - new TypeLiteral(){}, - new TypeLiteral>>(){} - ); - }; - } - private Injector build(boolean withJettyModules) { Preconditions.checkNotNull(baseInjector); @@ -165,4 +150,18 @@ private Injector build(boolean withJettyModules) // Override again with extensions. return new ExtensionInjectorBuilder(serviceBuilder).build(); } + + public static Module registerNodeRoleModule(Set nodeRoles) + { + return binder -> { + Multibinder selfBinder = Multibinder.newSetBinder(binder, NodeRole.class, Self.class); + nodeRoles.forEach(nodeRole -> selfBinder.addBinding().toInstance(nodeRole)); + + MapBinder.newMapBinder( + binder, + new TypeLiteral(){}, + new TypeLiteral>>(){} + ); + }; + } } diff --git a/server/src/main/java/org/apache/druid/server/initialization/ServerConfig.java b/server/src/main/java/org/apache/druid/server/initialization/ServerConfig.java index 13c4245c86bb..626d65b4be04 100644 --- a/server/src/main/java/org/apache/druid/server/initialization/ServerConfig.java +++ b/server/src/main/java/org/apache/druid/server/initialization/ServerConfig.java @@ -20,6 +20,10 @@ package org.apache.druid.server.initialization; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableList; import org.apache.druid.common.exception.ErrorResponseTransformStrategy; @@ -36,6 +40,7 @@ import javax.validation.constraints.Max; import javax.validation.constraints.Min; import javax.validation.constraints.NotNull; +import java.io.IOException; import java.util.List; import java.util.Objects; import java.util.concurrent.TimeUnit; @@ -75,7 +80,7 @@ public ServerConfig( @NotNull ErrorResponseTransformStrategy errorResponseTransformStrategy, @Nullable String contentSecurityPolicy, boolean enableHSTS, - @Nullable String uriCompliance + @Nullable UriCompliance uriCompliance ) { this.numThreads = numThreads; @@ -99,7 +104,7 @@ public ServerConfig( this.errorResponseTransformStrategy = errorResponseTransformStrategy; this.contentSecurityPolicy = contentSecurityPolicy; this.enableHSTS = enableHSTS; - this.uriCompliance = uriCompliance; + this.uriCompliance = uriCompliance != null ? uriCompliance : UriCompliance.LEGACY; } public ServerConfig() @@ -189,7 +194,8 @@ public ServerConfig(boolean enableQueryRequestsQueuing) private boolean enableHSTS = false; @JsonProperty - private String uriCompliance = UriCompliance.LEGACY.getName(); + @JsonDeserialize(using = UriComplianceDeserializer.class) + private UriCompliance uriCompliance = UriCompliance.LEGACY; /** * This feature flag enables query requests queuing when admins want to reserve some threads for @@ -312,7 +318,7 @@ public boolean isEnableQueryRequestsQueuing() return enableQueryRequestsQueuing; } - public String getUriCompliance() + public UriCompliance getUriCompliance() { return uriCompliance; } @@ -416,4 +422,14 @@ public static int getDefaultNumThreads() { return Math.max(10, (JvmUtils.getRuntimeInfo().getAvailableProcessors() * 17) / 16 + 2) + 30; } + + public static class UriComplianceDeserializer extends JsonDeserializer + { + @Override + public UriCompliance deserialize(JsonParser p, DeserializationContext ctxt) throws IOException + { + String value = p.getValueAsString(); + return UriCompliance.valueOf(value); + } + } } diff --git a/server/src/main/java/org/apache/druid/server/initialization/jetty/JettyServerModule.java b/server/src/main/java/org/apache/druid/server/initialization/jetty/JettyServerModule.java index 887de2f35240..b2e767a8b974 100644 --- a/server/src/main/java/org/apache/druid/server/initialization/jetty/JettyServerModule.java +++ b/server/src/main/java/org/apache/druid/server/initialization/jetty/JettyServerModule.java @@ -58,7 +58,6 @@ import org.apache.druid.server.metrics.MonitorsConfig; import org.apache.druid.server.security.CustomCheckX509TrustManager; import org.apache.druid.server.security.TLSCertificateChecker; -import org.eclipse.jetty.http.UriCompliance; import org.eclipse.jetty.server.ConnectionFactory; import org.eclipse.jetty.server.ForwardedRequestCustomizer; import org.eclipse.jetty.server.Handler; @@ -224,7 +223,7 @@ static Server makeAndInitializeServer( if (node.isEnablePlaintextPort()) { log.info("Creating http connector with port [%d]", node.getPlaintextPort()); HttpConfiguration httpConfiguration = new HttpConfiguration(); - httpConfiguration.setUriCompliance(UriCompliance.from(config.getUriCompliance())); + httpConfiguration.setUriCompliance(config.getUriCompliance()); if (config.isEnableForwardedRequestCustomizer()) { httpConfiguration.addCustomizer(new ForwardedRequestCustomizer()); } @@ -309,7 +308,7 @@ static Server makeAndInitializeServer( } final HttpConfiguration httpsConfiguration = new HttpConfiguration(); - httpsConfiguration.setUriCompliance(UriCompliance.from(config.getUriCompliance())); + httpsConfiguration.setUriCompliance(config.getUriCompliance()); if (config.isEnableForwardedRequestCustomizer()) { httpsConfiguration.addCustomizer(new ForwardedRequestCustomizer()); } @@ -480,7 +479,6 @@ public boolean handle( }); } - log.info("Configuraing Jetty Request log for server"); server.setRequestLog(new JettyRequestLog()); return server; diff --git a/server/src/main/java/org/apache/druid/server/initialization/jetty/StandardResponseHeaderFilterHolder.java b/server/src/main/java/org/apache/druid/server/initialization/jetty/StandardResponseHeaderFilterHolder.java index 1723d750c654..30054abca688 100644 --- a/server/src/main/java/org/apache/druid/server/initialization/jetty/StandardResponseHeaderFilterHolder.java +++ b/server/src/main/java/org/apache/druid/server/initialization/jetty/StandardResponseHeaderFilterHolder.java @@ -75,7 +75,7 @@ public static void deduplicateHeadersInProxyServlet( { for (final String headerName : StandardResponseHeaderFilterHolder.STANDARD_HEADERS) { if (serverResponse.getHeaders().contains(headerName) && proxyResponse.containsHeader(headerName)) { - // In Jetty 12 EE8, use the standard servlet API method to remove headers + // In EE8 compatible Jetty 12 using servlet API 4.x, setting a header to null is the accepted way to remove it. proxyResponse.setHeader(headerName, null); } } diff --git a/server/src/test/java/org/apache/druid/initialization/ServerConfigTest.java b/server/src/test/java/org/apache/druid/initialization/ServerConfigTest.java index 8aad81979e56..913c7f6109af 100644 --- a/server/src/test/java/org/apache/druid/initialization/ServerConfigTest.java +++ b/server/src/test/java/org/apache/druid/initialization/ServerConfigTest.java @@ -44,6 +44,7 @@ public void testSerde() throws Exception Assert.assertEquals(defaultConfig, defaultConfig2); Assert.assertFalse(defaultConfig2.isEnableForwardedRequestCustomizer()); Assert.assertFalse(defaultConfig2.isEnableHSTS()); + Assert.assertEquals(UriCompliance.LEGACY, defaultConfig.getUriCompliance()); ServerConfig modifiedConfig = new ServerConfig( 999, @@ -67,7 +68,7 @@ public void testSerde() throws Exception new AllowedRegexErrorResponseTransformStrategy(ImmutableList.of(".*")), "my-cool-policy", true, - UriCompliance.RFC3986.getName() + UriCompliance.RFC3986 ); String modifiedConfigJson = OBJECT_MAPPER.writeValueAsString(modifiedConfig); ServerConfig modifiedConfig2 = OBJECT_MAPPER.readValue(modifiedConfigJson, ServerConfig.class); @@ -81,7 +82,7 @@ public void testSerde() throws Exception Assert.assertEquals("my-cool-policy", modifiedConfig.getContentSecurityPolicy()); Assert.assertEquals("my-cool-policy", modifiedConfig2.getContentSecurityPolicy()); Assert.assertTrue(modifiedConfig2.isEnableHSTS()); - Assert.assertEquals(UriCompliance.RFC3986.getName(), modifiedConfig2.getUriCompliance()); + Assert.assertEquals(UriCompliance.RFC3986, modifiedConfig2.getUriCompliance()); } @Test diff --git a/server/src/test/java/org/apache/druid/server/AsyncManagementForwardingServletTest.java b/server/src/test/java/org/apache/druid/server/AsyncManagementForwardingServletTest.java index 6f4b37c3e880..c576f966cf65 100644 --- a/server/src/test/java/org/apache/druid/server/AsyncManagementForwardingServletTest.java +++ b/server/src/test/java/org/apache/druid/server/AsyncManagementForwardingServletTest.java @@ -536,7 +536,8 @@ public String getCurrentLeader() JettyServerInitUtils.wrapWithDefaultGzipHandler( root, ServerConfig.DEFAULT_GZIP_INFLATE_BUFFER_SIZE, - Deflater.DEFAULT_COMPRESSION) + Deflater.DEFAULT_COMPRESSION + ) ); server.setHandler(handlerList); } diff --git a/server/src/test/java/org/apache/druid/server/initialization/jetty/StandardResponseHeaderFilterHolderTest.java b/server/src/test/java/org/apache/druid/server/initialization/jetty/StandardResponseHeaderFilterHolderTest.java index e5a5afdb57f0..6eb2be2eb398 100644 --- a/server/src/test/java/org/apache/druid/server/initialization/jetty/StandardResponseHeaderFilterHolderTest.java +++ b/server/src/test/java/org/apache/druid/server/initialization/jetty/StandardResponseHeaderFilterHolderTest.java @@ -140,15 +140,17 @@ public void test_get_invalidContentSecurityPolicy() } @Test - public void test_deduplicateHeadersInProxyServlet_duplicatesExist() + public void test_deduplicateHeadersInProxyServlet_withDuplicates() { EasyMock.expect(proxyResponse.containsHeader("Cache-Control")).andReturn(true).once(); proxyResponse.setHeader("Cache-Control", null); EasyMock.expectLastCall().once(); EasyMock.expect(proxyResponse.containsHeader("Strict-Transport-Security")).andReturn(false).once(); - EasyMock.expect(clientResponse.getHeaders()).andReturn(HttpFields.from(new HttpField("Cache-Control", "true"), new HttpField("Strict-Transport-Security", "true"))).times(3); - + EasyMock.expect(clientResponse.getHeaders()) + .andReturn( + HttpFields.from(new HttpField("Cache-Control", "true"), new HttpField("Strict-Transport-Security", "true")) + ).times(3); replayAllMocks(); @@ -156,7 +158,7 @@ public void test_deduplicateHeadersInProxyServlet_duplicatesExist() } @Test - public void test_duplicateHeadersInProxyServlet_noDuplicates() + public void test_duplicateHeadersInProxyServlet_withNoDuplicates() { EasyMock.expect(proxyResponse.containsHeader("Cache-Control")).andReturn(false).once(); EasyMock.expect(proxyResponse.containsHeader("Strict-Transport-Security")).andReturn(false).once(); diff --git a/server/src/test/java/org/apache/druid/server/mocks/MockHttpServletResponse.java b/server/src/test/java/org/apache/druid/server/mocks/MockHttpServletResponse.java index b5c723bc275b..9b21c856f582 100644 --- a/server/src/test/java/org/apache/druid/server/mocks/MockHttpServletResponse.java +++ b/server/src/test/java/org/apache/druid/server/mocks/MockHttpServletResponse.java @@ -149,12 +149,10 @@ public void addDateHeader(String name, long date) throw new UnsupportedOperationException(); } - /** - * HttpServletResponse 4.0.1 spec dictates that setHeader overwrites existing values. - */ @Override public void setHeader(String name, String value) { + // HttpServletResponse 4.0.1 spec dictates that setHeader overwrites existing values. headers.removeAll(name); headers.put(name, value); } @@ -346,7 +344,6 @@ public Locale getLocale() throw new UnsupportedOperationException(); } - @Override public void setTrailerFields(Supplier> supplier) { diff --git a/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaHandler.java b/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaHandler.java index 17ba3cc77ef2..867f7734988a 100644 --- a/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaHandler.java +++ b/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaHandler.java @@ -28,6 +28,20 @@ import org.apache.calcite.avatica.util.UnsynchronizedBuffer; import org.eclipse.jetty.server.Handler; +/** + * Base class for Druid's custom Avatica server handlers that are compatible with Jetty 12. + * + *

This class provides a Jetty 12-compatible implementation of {@link MetricsAwareAvaticaHandler} + * to enable Druid's JDBC support via Apache Calcite's Avatica server. Since Calcite's Avatica server + * does not natively support Jetty 12, this custom implementation allows Druid to continue using + * Avatica for JDBC connectivity during the Jetty 12 migration. + * + *

Concrete implementations handle different wire protocols: + *

    + *
  • {@link DruidAvaticaJsonHandler} - JSON-based protocol
  • + *
  • {@link DruidAvaticaProtobufHandler} - Protocol Buffers-based protocol
  • + *
+ */ public abstract class DruidAvaticaHandler extends Handler.Abstract implements MetricsAwareAvaticaHandler { protected final Service service; diff --git a/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaProtobufHandler.java b/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaProtobufHandler.java index a054f0b85e26..7d5c6183025a 100644 --- a/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaProtobufHandler.java +++ b/sql/src/main/java/org/apache/druid/sql/avatica/DruidAvaticaProtobufHandler.java @@ -49,7 +49,7 @@ public class DruidAvaticaProtobufHandler extends DruidAvaticaHandler public static final String AVATICA_PATH_NO_TRAILING_SLASH = "/druid/v2/sql/avatica-protobuf"; public static final String AVATICA_PATH = AVATICA_PATH_NO_TRAILING_SLASH + "/"; - private final ProtobufHandler pbHandler; + private final ProtobufHandler protobufHandler; @Inject public DruidAvaticaProtobufHandler( @@ -60,7 +60,7 @@ public DruidAvaticaProtobufHandler( { super(druidMeta, metrics, AvaticaProtobufHandler.class); ProtobufTranslation protobufTranslation = new ProtobufTranslationImpl(); - this.pbHandler = new ProtobufHandler(service, protobufTranslation, this.metrics); + this.protobufHandler = new ProtobufHandler(service, protobufTranslation, this.metrics); setServerRpcMetadata(new Service.RpcMetadataResponse(druidNode.getHostAndPortToUse())); } @@ -92,11 +92,11 @@ public boolean handle(Request request, Response response, Callback callback) thr org.apache.calcite.avatica.remote.Handler.HandlerResponse handlerResponse; try { - handlerResponse = pbHandler.apply(requestBytes); + handlerResponse = protobufHandler.apply(requestBytes); } catch (Exception e) { LOG.debug(e, "Error invoking request"); - handlerResponse = pbHandler.convertToErrorResponse(e); + handlerResponse = protobufHandler.convertToErrorResponse(e); } response.setStatus(handlerResponse.getStatusCode()); @@ -111,8 +111,8 @@ public boolean handle(Request request, Response response, Callback callback) thr public void setServerRpcMetadata(Service.RpcMetadataResponse metadata) { super.setServerRpcMetadata(metadata); - if (pbHandler != null) { - pbHandler.setRpcMetadata(metadata); + if (protobufHandler != null) { + protobufHandler.setRpcMetadata(metadata); } } } diff --git a/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java b/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java index e6ce656bf50b..22ea225301ca 100644 --- a/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java +++ b/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java @@ -93,7 +93,6 @@ import org.apache.druid.sql.calcite.util.datasets.TestDataSet; import org.apache.druid.sql.guice.SqlModule; import org.apache.druid.sql.hook.DruidHookDispatcher; -import org.eclipse.jetty.server.Handler; import org.eclipse.jetty.server.Server; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; @@ -255,7 +254,7 @@ protected String getJdbcUrlTail() } // Default implementation is for JSON to allow debugging of tests. - protected Handler.Abstract getAvaticaHandler(final DruidMeta druidMeta) + protected DruidAvaticaHandler getAvaticaHandler(final DruidMeta druidMeta) { return new DruidAvaticaJsonHandler( druidMeta, diff --git a/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaJsonHandlerTest.java b/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaJsonHandlerTest.java index 452c2c3234d2..94bb835e0e99 100644 --- a/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaJsonHandlerTest.java +++ b/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaJsonHandlerTest.java @@ -22,7 +22,6 @@ import org.apache.druid.server.DruidNode; import org.easymock.EasyMock; import org.eclipse.jetty.http.HttpURI; -import org.eclipse.jetty.server.Handler; import org.eclipse.jetty.server.Request; import org.eclipse.jetty.server.Response; import org.eclipse.jetty.util.Callback; @@ -40,7 +39,7 @@ protected String getJdbcUrlTail() } @Override - protected Handler.Abstract getAvaticaHandler(final DruidMeta druidMeta) + protected DruidAvaticaHandler getAvaticaHandler(final DruidMeta druidMeta) { return new DruidAvaticaJsonHandler( druidMeta, diff --git a/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaProtobufHandlerTest.java b/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaProtobufHandlerTest.java index d0c146795e69..decf79ed2e0a 100644 --- a/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaProtobufHandlerTest.java +++ b/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaProtobufHandlerTest.java @@ -23,7 +23,6 @@ import org.apache.druid.server.DruidNode; import org.easymock.EasyMock; import org.eclipse.jetty.http.HttpURI; -import org.eclipse.jetty.server.Handler; import org.eclipse.jetty.server.Request; import org.eclipse.jetty.server.Response; import org.eclipse.jetty.util.Callback; @@ -44,7 +43,7 @@ protected String getJdbcUrlTail() } @Override - protected Handler.Abstract getAvaticaHandler(final DruidMeta druidMeta) + protected DruidAvaticaProtobufHandler getAvaticaHandler(final DruidMeta druidMeta) { return new DruidAvaticaProtobufHandler( druidMeta, From 73eab182c102ac984bb16287b15bc304b0849e25 Mon Sep 17 00:00:00 2001 From: capistrant Date: Thu, 18 Sep 2025 15:46:28 -0500 Subject: [PATCH 42/42] fix serialization --- .../druid/server/initialization/ServerConfig.java | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/server/src/main/java/org/apache/druid/server/initialization/ServerConfig.java b/server/src/main/java/org/apache/druid/server/initialization/ServerConfig.java index 626d65b4be04..45a2df48dfa0 100644 --- a/server/src/main/java/org/apache/druid/server/initialization/ServerConfig.java +++ b/server/src/main/java/org/apache/druid/server/initialization/ServerConfig.java @@ -20,10 +20,14 @@ package org.apache.druid.server.initialization; import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.databind.DeserializationContext; import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.SerializerProvider; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableList; import org.apache.druid.common.exception.ErrorResponseTransformStrategy; @@ -195,6 +199,7 @@ public ServerConfig(boolean enableQueryRequestsQueuing) @JsonProperty @JsonDeserialize(using = UriComplianceDeserializer.class) + @JsonSerialize(using = UriComplianceSerializer.class) private UriCompliance uriCompliance = UriCompliance.LEGACY; /** @@ -432,4 +437,13 @@ public UriCompliance deserialize(JsonParser p, DeserializationContext ctxt) thro return UriCompliance.valueOf(value); } } + + public static class UriComplianceSerializer extends JsonSerializer + { + @Override + public void serialize(UriCompliance value, JsonGenerator gen, SerializerProvider serializers) throws IOException + { + gen.writeString(value.getName()); + } + } }