Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -22,16 +22,12 @@
import com.amazonaws.auth.AWSCredentialsProvider;
import com.amazonaws.services.ec2.AmazonEC2;
import com.amazonaws.services.ec2.AmazonEC2Client;
import com.fasterxml.jackson.databind.Module;
import com.google.inject.Binder;
import com.google.inject.Provides;
import org.apache.druid.guice.JsonConfigProvider;
import org.apache.druid.guice.LazySingleton;
import org.apache.druid.initialization.DruidModule;

import java.util.Collections;
import java.util.List;

public class AWSModule implements DruidModule
{
@Override
Expand All @@ -56,10 +52,4 @@ public AmazonEC2 getEc2Client(AWSCredentialsProvider credentials)
{
return new AmazonEC2Client(credentials);
}

@Override
public List<? extends Module> getJacksonModules()
{
return Collections.emptyList();
}
}
8 changes: 0 additions & 8 deletions cloud/gcp-common/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -52,18 +52,10 @@
<artifactId>jackson-module-guice</artifactId>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</dependency>
<dependency>
<groupId>com.google.inject</groupId>
<artifactId>guice</artifactId>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
</dependency>
<dependency>
<groupId>com.google.http-client</groupId>
<artifactId>google-http-client</artifactId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,14 +19,12 @@

package org.apache.druid.common.gcp;

import com.fasterxml.jackson.databind.Module;
import com.google.api.client.googleapis.auth.oauth2.GoogleCredential;
import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport;
import com.google.api.client.http.HttpRequestInitializer;
import com.google.api.client.http.HttpTransport;
import com.google.api.client.json.JsonFactory;
import com.google.api.client.json.jackson2.JacksonFactory;
import com.google.common.collect.ImmutableList;
import com.google.inject.Binder;
import com.google.inject.Provides;
import org.apache.druid.guice.LazySingleton;
Expand All @@ -35,23 +33,15 @@
import java.io.IOException;
import java.security.GeneralSecurityException;
import java.util.Collections;
import java.util.List;

public class GcpModule implements DruidModule
{
@Override
public List<? extends Module> getJacksonModules()
{
return ImmutableList.of();
}

@Override
public void configure(Binder binder)
{
// Nothing to proactively bind
}


@Provides
@LazySingleton
public HttpRequestInitializer getHttpRequestInitializer(HttpTransport transport, JsonFactory factory)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,36 +19,24 @@

package org.apache.druid.common.gcp;

import com.fasterxml.jackson.databind.Module;
import com.google.api.client.googleapis.testing.auth.oauth2.MockGoogleCredential;
import com.google.api.client.http.HttpRequestInitializer;
import com.google.api.client.http.HttpTransport;
import com.google.api.client.json.JsonFactory;
import com.google.api.client.json.jackson2.JacksonFactory;
import com.google.api.client.testing.http.MockHttpTransport;
import com.google.common.collect.ImmutableList;
import com.google.inject.Binder;
import com.google.inject.Provides;
import org.apache.druid.guice.LazySingleton;
import org.apache.druid.initialization.DruidModule;

import java.util.List;

public class GcpMockModule implements DruidModule
{
@Override
public List<? extends Module> getJacksonModules()
{
return ImmutableList.of();
}

@Override
public void configure(Binder binder)
{

}


@Provides
@LazySingleton
public HttpRequestInitializer mockRequestInitializer(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,12 +22,22 @@
import com.fasterxml.jackson.databind.Module;
import org.apache.druid.guice.annotations.ExtensionPoint;

import java.util.Collections;
import java.util.List;

/**
* A Guice module which also provides Jackson modules.
Comment thread
paul-rogers marked this conversation as resolved.
* Extension modules must implement this interface.
* (Enforced in {@code ExtensionInjectorBuilder}).
* Built-in implementations that do not provide Jackson modules can
* implement the simpler {@link com.google.inject.Module Guice Module}
* interface instead.
*/
@ExtensionPoint
public interface DruidModule extends com.google.inject.Module
{
List<? extends Module> getJacksonModules();
default List<? extends Module> getJacksonModules()
{
return Collections.emptyList();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -1172,16 +1172,9 @@ public AWSCredentialsProvider getAWSCredentialsProvider()
return AWSCredentialsUtils.defaultAWSCredentialsProviderChain(null);
}

@Override
public List<? extends Module> getJacksonModules()
{
return Collections.emptyList();
}

@Override
public void configure(Binder binder)
{

}
}
);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,6 @@

package org.apache.druid.https;

import com.fasterxml.jackson.databind.Module;
import com.google.common.collect.ImmutableList;
import com.google.inject.Binder;
import org.apache.druid.guice.JsonConfigProvider;
import org.apache.druid.guice.annotations.Client;
Expand All @@ -31,17 +29,9 @@
import org.apache.druid.server.router.Router;

import javax.net.ssl.SSLContext;
import java.util.List;

public class SSLContextModule implements DruidModule
{

@Override
public List<? extends Module> getJacksonModules()
{
return ImmutableList.of();
}

@Override
public void configure(Binder binder)
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,24 +19,14 @@

package org.apache.druid.guice;

import com.fasterxml.jackson.databind.Module;
import com.google.inject.Binder;
import org.apache.druid.initialization.DruidModule;
import org.apache.druid.query.expressions.SleepExprMacro;
import org.apache.druid.query.sql.SleepOperatorConversion;
import org.apache.druid.sql.guice.SqlBindings;

import java.util.Collections;
import java.util.List;

public class SleepModule implements DruidModule
{
@Override
public List<? extends Module> getJacksonModules()
{
return Collections.emptyList();
}

@Override
public void configure(Binder binder)
{
Expand Down
4 changes: 4 additions & 0 deletions indexing-service/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -212,6 +212,10 @@
<artifactId>commons-collections4</artifactId>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.eclipse.aether</groupId>
<artifactId>aether-api</artifactId>
</dependency>
<!-- Tests -->
<dependency>
<groupId>junit</groupId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,13 +25,14 @@
import com.google.common.collect.Lists;
import com.google.inject.Injector;
import org.apache.druid.guice.ExtensionsConfig;
import org.apache.druid.guice.GuiceInjectors;
import org.apache.druid.guice.ExtensionsLoader;
import org.apache.druid.guice.StartupInjectorBuilder;
import org.apache.druid.indexing.common.TaskToolbox;
import org.apache.druid.initialization.Initialization;
import org.apache.druid.java.util.common.logger.Logger;
import org.apache.druid.utils.JvmUtils;

import javax.annotation.Nullable;

import java.io.File;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
Expand All @@ -48,13 +49,9 @@
public abstract class HadoopTask extends AbstractBatchIndexTask
{
private static final Logger log = new Logger(HadoopTask.class);
private static final ExtensionsConfig EXTENSIONS_CONFIG;

static final Injector INJECTOR = GuiceInjectors.makeStartupInjector();

static {
EXTENSIONS_CONFIG = INJECTOR.getInstance(ExtensionsConfig.class);
}
static final Injector INJECTOR = new StartupInjectorBuilder().withExtensions().build();
private static final ExtensionsLoader EXTENSIONS_LOADER = ExtensionsLoader.instance(INJECTOR);

private final List<String> hadoopDependencyCoordinates;

Expand Down Expand Up @@ -152,8 +149,8 @@ public static ClassLoader buildClassLoader(final List<String> hadoopDependencyCo
}

final List<URL> extensionURLs = new ArrayList<>();
for (final File extension : Initialization.getExtensionFilesToLoad(EXTENSIONS_CONFIG)) {
final URLClassLoader extensionLoader = Initialization.getClassLoaderForExtension(extension, false);
for (final File extension : EXTENSIONS_LOADER.getExtensionFilesToLoad()) {
final URLClassLoader extensionLoader = EXTENSIONS_LOADER.getClassLoaderForExtension(extension, false);
extensionURLs.addAll(Arrays.asList(extensionLoader.getURLs()));
}

Expand All @@ -165,9 +162,9 @@ public static ClassLoader buildClassLoader(final List<String> hadoopDependencyCo
for (final File hadoopDependency :
Initialization.getHadoopDependencyFilesToLoad(
finalHadoopDependencyCoordinates,
EXTENSIONS_CONFIG
EXTENSIONS_LOADER.config()
)) {
final URLClassLoader hadoopLoader = Initialization.getClassLoaderForExtension(hadoopDependency, false);
final URLClassLoader hadoopLoader = EXTENSIONS_LOADER.getClassLoaderForExtension(hadoopDependency, false);
localClassLoaderURLs.addAll(Arrays.asList(hadoopLoader.getURLs()));
}

Expand All @@ -187,15 +184,16 @@ public static ClassLoader buildClassLoader(final List<String> hadoopDependencyCo
);

final String hadoopContainerDruidClasspathJars;
if (EXTENSIONS_CONFIG.getHadoopContainerDruidClasspath() == null) {
ExtensionsConfig extnConfig = EXTENSIONS_LOADER.config();
if (extnConfig.getHadoopContainerDruidClasspath() == null) {
hadoopContainerDruidClasspathJars = Joiner.on(File.pathSeparator).join(jobURLs);

} else {
List<URL> hadoopContainerURLs = Lists.newArrayList(
Initialization.getURLsForClasspath(EXTENSIONS_CONFIG.getHadoopContainerDruidClasspath())
ExtensionsLoader.getURLsForClasspath(extnConfig.getHadoopContainerDruidClasspath())
);

if (EXTENSIONS_CONFIG.getAddExtensionsToHadoopContainer()) {
if (extnConfig.getAddExtensionsToHadoopContainer()) {
hadoopContainerURLs.addAll(extensionURLs);
}

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package org.apache.druid.indexing.common.task;

import org.apache.druid.guice.ExtensionsConfig;
import org.apache.druid.java.util.common.ISE;
import org.eclipse.aether.artifact.DefaultArtifact;

import java.io.File;
import java.util.List;

public class Initialization
{
/**
* Find all the Hadoop dependencies that should be loaded by Druid.
*
* @param hadoopDependencyCoordinates e.g.["org.apache.hadoop:hadoop-client:2.3.0"]
* @param extensionsConfig ExtensionsConfig configured by druid.extensions.xxx
*
* @return an array of Hadoop dependency files that will be loaded by the Druid process.
*/
public static File[] getHadoopDependencyFilesToLoad(
List<String> hadoopDependencyCoordinates,
ExtensionsConfig extensionsConfig
)
{
final File rootHadoopDependenciesDir = new File(extensionsConfig.getHadoopDependenciesDir());
if (rootHadoopDependenciesDir.exists() && !rootHadoopDependenciesDir.isDirectory()) {
throw new ISE("Root Hadoop dependencies directory [%s] is not a directory!?", rootHadoopDependenciesDir);
}
final File[] hadoopDependenciesToLoad = new File[hadoopDependencyCoordinates.size()];
int i = 0;
for (final String coordinate : hadoopDependencyCoordinates) {
final DefaultArtifact artifact = new DefaultArtifact(coordinate);
final File hadoopDependencyDir = new File(rootHadoopDependenciesDir, artifact.getArtifactId());
final File versionDir = new File(hadoopDependencyDir, artifact.getVersion());
// find the hadoop dependency with the version specified in coordinate
if (!hadoopDependencyDir.isDirectory() || !versionDir.isDirectory()) {
throw new ISE("Hadoop dependency [%s] didn't exist!?", versionDir.getAbsolutePath());
}
hadoopDependenciesToLoad[i++] = versionDir;
}
return hadoopDependenciesToLoad;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,7 @@ public class ParallelIndexSupervisorTask extends AbstractBatchIndexTask implemen
// reproduce but looking at the code around where the following constant is used one
// possibility is that the sketch's estimate is negative. If that case happens
// code has been added to log it and to set the estimate to the value of the
// following constant. It is not necessary to parametize this value since if this
// following constant. It is not necessary to parameterize this value since if this
// happens it is a bug and the new logging may now provide some evidence to reproduce
// and fix
private static final long DEFAULT_NUM_SHARDS_WHEN_ESTIMATE_GOES_NEGATIVE = 7L;
Expand Down
Loading