Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -21,20 +21,20 @@

import com.fasterxml.jackson.annotation.JacksonInject;
import com.fasterxml.jackson.annotation.JsonCreator;
import org.apache.druid.data.input.AbstractInputSourceBuilder;
import org.apache.druid.data.input.InputSourceFactory;
import org.apache.druid.data.input.impl.SplittableInputSource;
import org.apache.druid.guice.Hdfs;
import org.apache.hadoop.conf.Configuration;

import java.util.List;

public class HdfsInputSourceBuilder extends AbstractInputSourceBuilder
public class HdfsInputSourceFactory implements InputSourceFactory
{
private final Configuration configuration;
private final HdfsInputSourceConfig inputSourceConfig;

@JsonCreator
public HdfsInputSourceBuilder(
public HdfsInputSourceFactory(
@JacksonInject @Hdfs Configuration configuration,
@JacksonInject HdfsInputSourceConfig inputSourceConfig
)
Expand All @@ -44,7 +44,7 @@ public HdfsInputSourceBuilder(
}

@Override
public SplittableInputSource generateInputSource(List<String> inputFilePaths)
public SplittableInputSource create(List<String> inputFilePaths)
{
return new HdfsInputSource(inputFilePaths, configuration, inputSourceConfig);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,8 +34,8 @@
import org.apache.druid.guice.ManageLifecycle;
import org.apache.druid.initialization.DruidModule;
import org.apache.druid.inputsource.hdfs.HdfsInputSource;
import org.apache.druid.inputsource.hdfs.HdfsInputSourceBuilder;
import org.apache.druid.inputsource.hdfs.HdfsInputSourceConfig;
import org.apache.druid.inputsource.hdfs.HdfsInputSourceFactory;
import org.apache.druid.storage.hdfs.tasklog.HdfsTaskLogs;
import org.apache.druid.storage.hdfs.tasklog.HdfsTaskLogsConfig;
import org.apache.hadoop.conf.Configuration;
Expand Down Expand Up @@ -67,7 +67,7 @@ public List<? extends Module> getJacksonModules()
new SimpleModule().registerSubtypes(
new NamedType(HdfsLoadSpec.class, HdfsStorageDruidModule.SCHEME),
new NamedType(HdfsInputSource.class, HdfsStorageDruidModule.SCHEME),
new NamedType(HdfsInputSourceBuilder.class, HdfsStorageDruidModule.SCHEME)
new NamedType(HdfsInputSourceFactory.class, HdfsStorageDruidModule.SCHEME)
)
);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@

package org.apache.druid.inputsource.hdfs;

import org.apache.druid.data.input.InputSourceFactory;
import org.apache.hadoop.conf.Configuration;
import org.junit.Assert;
import org.junit.Test;
Expand All @@ -32,7 +33,7 @@ public void testAdapterGet()
{
Configuration conf = new Configuration();
HdfsInputSourceConfig inputSourceConfig = new HdfsInputSourceConfig(null);
HdfsInputSourceBuilder hdfsInputSourceAdapter = new HdfsInputSourceBuilder(conf, inputSourceConfig);
Assert.assertTrue(hdfsInputSourceAdapter.generateInputSource(Arrays.asList("hdfs://localhost:7020/bar/def.parquet", "hdfs://localhost:7020/bar/abc.parquet")) instanceof HdfsInputSource);
InputSourceFactory hdfsInputSourceAdapter = new HdfsInputSourceFactory(conf, inputSourceConfig);
Assert.assertTrue(hdfsInputSourceAdapter.create(Arrays.asList("hdfs://localhost:7020/bar/def.parquet", "hdfs://localhost:7020/bar/abc.parquet")) instanceof HdfsInputSource);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -38,8 +38,10 @@ public class S3InputSourceDruidModule implements DruidModule
public List<? extends Module> getJacksonModules()
{
return ImmutableList.of(
new SimpleModule().registerSubtypes(new NamedType(S3InputSource.class, S3StorageDruidModule.SCHEME),
new NamedType(S3InputSourceBuilder.class, S3StorageDruidModule.SCHEME))
new SimpleModule().registerSubtypes(
new NamedType(S3InputSource.class, S3StorageDruidModule.SCHEME),
new NamedType(S3InputSourceFactory.class, S3StorageDruidModule.SCHEME)
)
);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
import org.apache.druid.common.aws.AWSClientConfig;
import org.apache.druid.common.aws.AWSEndpointConfig;
import org.apache.druid.common.aws.AWSProxyConfig;
import org.apache.druid.data.input.AbstractInputSourceBuilder;
import org.apache.druid.data.input.InputSourceFactory;
import org.apache.druid.data.input.impl.SplittableInputSource;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.storage.s3.S3InputDataConfig;
Expand All @@ -39,7 +39,7 @@
import java.util.List;
import java.util.stream.Collectors;

public class S3InputSourceBuilder extends AbstractInputSourceBuilder
public class S3InputSourceFactory implements InputSourceFactory
{
private final ServerSideEncryptingAmazonS3 s3Client;
private final ServerSideEncryptingAmazonS3.Builder s3ClientBuilder;
Expand All @@ -51,7 +51,7 @@ public class S3InputSourceBuilder extends AbstractInputSourceBuilder
private final AWSEndpointConfig awsEndpointConfig;

@JsonCreator
public S3InputSourceBuilder(
public S3InputSourceFactory(
@JacksonInject ServerSideEncryptingAmazonS3 s3Client,
@JacksonInject ServerSideEncryptingAmazonS3.Builder s3ClientBuilder,
@JacksonInject S3InputDataConfig inputDataConfig,
Expand All @@ -73,7 +73,7 @@ public S3InputSourceBuilder(
}

@Override
public SplittableInputSource generateInputSource(List<String> inputFilePaths)
public SplittableInputSource create(List<String> inputFilePaths)
{
return new S3InputSource(
s3Client,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@

package org.apache.druid.data.input.s3;

import org.apache.druid.data.input.InputSourceFactory;
import org.apache.druid.storage.s3.S3InputDataConfig;
import org.apache.druid.storage.s3.ServerSideEncryptingAmazonS3;
import org.easymock.EasyMock;
Expand All @@ -28,7 +29,7 @@
import java.util.Arrays;
import java.util.List;

public class S3InputSourceBuilderTest
public class S3InputSourceFactoryTest
{
@Test
public void testAdapterGet()
Expand All @@ -43,7 +44,7 @@ public void testAdapterGet()
"s3://bar/foo/file3.txt"
);

S3InputSourceBuilder s3Builder = new S3InputSourceBuilder(
InputSourceFactory s3Builder = new S3InputSourceFactory(
service,
serverSides3Builder,
dataConfig,
Expand All @@ -53,6 +54,6 @@ public void testAdapterGet()
null,
null
);
Assert.assertTrue(s3Builder.generateInputSource(fileUris) instanceof S3InputSource);
Assert.assertTrue(s3Builder.create(fileUris) instanceof S3InputSource);
}
}

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package org.apache.druid.data.input;

import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import org.apache.druid.data.input.impl.LocalInputSourceFactory;
import org.apache.druid.data.input.impl.SplittableInputSource;
import org.apache.druid.guice.annotations.UnstableApi;

import java.util.List;

/**
* An interface to generate a {@link SplittableInputSource} objects on the fly.
* For composing input sources such as IcebergInputSource, the delegate input source instantiation might fail upon deserialization since the input file paths
* are not available yet and this might fail the input source precondition checks.
* This factory helps create the delegate input source once the input file paths are fully determined.
*/
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type")
@JsonSubTypes(value = {
@JsonSubTypes.Type(name = "local", value = LocalInputSourceFactory.class)
})
@UnstableApi
public interface InputSourceFactory
{
SplittableInputSource create(List<String> inputFilePaths);
}
Original file line number Diff line number Diff line change
Expand Up @@ -19,18 +19,17 @@

package org.apache.druid.data.input.impl;

import org.apache.druid.data.input.AbstractInputSourceBuilder;
import org.apache.druid.data.input.InputSourceFactory;

import java.io.File;
import java.util.List;
import java.util.stream.Collectors;

public class LocalInputSourceBuilder extends AbstractInputSourceBuilder
public class LocalInputSourceFactory implements InputSourceFactory
{
public static final String TYPE_KEY = "local";

@Override
public LocalInputSource generateInputSource(List<String> inputFilePaths)
public LocalInputSource create(List<String> inputFilePaths)
{
return new LocalInputSource(
null,
Expand Down
Loading