Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
31 commits
Select commit Hold shift + click to select a range
095bb32
Add InputSource and InputFormat interfaces
jihoonson Nov 5, 2019
f52f967
revert orc dependency
jihoonson Nov 5, 2019
93ab23f
fix dimension exclusions and failing unit tests
jihoonson Nov 5, 2019
e0b80cb
fix tests
jihoonson Nov 5, 2019
b4f041e
fix test
jihoonson Nov 5, 2019
d349db5
fix test
jihoonson Nov 5, 2019
f308f13
fix firehose and inputSource for parallel indexing task
jihoonson Nov 5, 2019
d451582
Merge branch 'master' of github.com:apache/incubator-druid into input…
jihoonson Nov 6, 2019
b7c8b87
fix tc
jihoonson Nov 6, 2019
e942a21
fix tc: remove unused method
jihoonson Nov 6, 2019
08d7872
Formattable
jihoonson Nov 7, 2019
c70af75
add needsFormat(); renamed to ObjectSource; pass metricsName for reader
jihoonson Nov 9, 2019
546d957
address comments
jihoonson Nov 9, 2019
7bb5d5f
fix closing resource
jihoonson Nov 9, 2019
6dba81a
fix checkstyle
jihoonson Nov 9, 2019
ea2c8f9
fix tests
jihoonson Nov 9, 2019
1ea7758
remove verify from csv
jihoonson Nov 10, 2019
218b392
Revert "remove verify from csv"
jihoonson Nov 11, 2019
7098056
address comments
jihoonson Nov 11, 2019
7381277
fix import order and javadoc
jihoonson Nov 12, 2019
2a3b114
flatMap
jihoonson Nov 12, 2019
355777c
sampleLine
jihoonson Nov 13, 2019
e466ea9
Add IntermediateRowParsingReader
jihoonson Nov 14, 2019
87b83fa
Address comments
jihoonson Nov 14, 2019
c1c3fb9
Merge branch 'master' of github.com:apache/incubator-druid into input…
jihoonson Nov 14, 2019
169ab49
move csv reader test
jihoonson Nov 14, 2019
a9d167a
remove test for verify
jihoonson Nov 14, 2019
42a6965
adjust comments
jihoonson Nov 14, 2019
230803b
Fix InputEntityIteratingReader
jihoonson Nov 14, 2019
540759b
rename source -> entity
jihoonson Nov 14, 2019
ce88049
address comments
jihoonson Nov 14, 2019
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package org.apache.druid.data.input;

import com.google.common.base.Preconditions;

import javax.annotation.Nullable;
import java.io.File;

/**
* Abstract class for {@link InputSource}. This class provides a default implementation of {@link #reader} with
* a sanity check. Child classes should implement one of {@link #formattableReader} or {@link #fixedFormatReader}
* depending on {@link #needsFormat()}.
*/
public abstract class AbstractInputSource implements InputSource
{
@Override
public InputSourceReader reader(
InputRowSchema inputRowSchema,
@Nullable InputFormat inputFormat,
@Nullable File temporaryDirectory
)
{
if (needsFormat()) {
return formattableReader(
inputRowSchema,
Preconditions.checkNotNull(inputFormat, "inputFormat"),
temporaryDirectory
);
} else {
return fixedFormatReader(inputRowSchema, temporaryDirectory);
}
}

protected InputSourceReader formattableReader(
InputRowSchema inputRowSchema,
InputFormat inputFormat,
@Nullable File temporaryDirectory
)
{
throw new UnsupportedOperationException("Implement this method properly if needsFormat() = true");
}

protected InputSourceReader fixedFormatReader(InputRowSchema inputRowSchema, @Nullable File temporaryDirectory)
{
throw new UnsupportedOperationException("Implement this method properly if needsFormat() = false");
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,12 @@
* {@link FiniteFirehoseFactory} designed for batch processing. Its implementations assume that the amount of inputs is
* limited.
*
* This class is deprecated in favor of {@link InputSource}.
*
* @param <T> parser type
* @param <S> input split type
*/
@Deprecated
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

suggest adding a link to InputSource to show what's replacing this

Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

👍 Added.

public interface FiniteFirehoseFactory<T extends InputRowParser, S> extends FirehoseFactory<T>
{
/**
Expand Down
6 changes: 3 additions & 3 deletions core/src/main/java/org/apache/druid/data/input/Firehose.java
Original file line number Diff line number Diff line change
Expand Up @@ -74,13 +74,13 @@ public interface Firehose extends Closeable
*
* @return an InputRowPlusRaw which may contain any of: an InputRow, the raw data, or a ParseException
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

javadoc for @return needs to be updated

Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This method is only for sampler and will be removed in the follow-up pr.

*/
default InputRowPlusRaw nextRowWithRaw() throws IOException
default InputRowListPlusJson nextRowWithRaw() throws IOException
{
try {
return InputRowPlusRaw.of(nextRow(), null);
return InputRowListPlusJson.of(nextRow(), null);
}
catch (ParseException e) {
return InputRowPlusRaw.of(null, e);
return InputRowListPlusJson.of((byte[]) null, e);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@

package org.apache.druid.data.input;

import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import org.apache.druid.data.input.impl.InputRowParser;
import org.apache.druid.data.input.impl.prefetch.PrefetchableTextFilesFirehoseFactory;
Expand Down Expand Up @@ -85,6 +86,7 @@ default Firehose connectForSampler(T parser, @Nullable File temporaryDirectory)
return connect(parser, temporaryDirectory);
}

@JsonIgnore
default boolean isSplittable()
{
return false;
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,99 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package org.apache.druid.data.input;

import com.google.common.base.Preconditions;
import org.apache.druid.data.input.impl.FirehoseToInputSourceReaderAdaptor;
import org.apache.druid.data.input.impl.InputRowParser;
import org.apache.druid.data.input.impl.SplittableInputSource;

import javax.annotation.Nullable;
import java.io.File;
import java.io.IOException;
import java.util.stream.Stream;

public class FirehoseFactoryToInputSourceAdaptor extends AbstractInputSource implements SplittableInputSource
{
private final FiniteFirehoseFactory firehoseFactory;
private final InputRowParser inputRowParser;

public FirehoseFactoryToInputSourceAdaptor(FiniteFirehoseFactory firehoseFactory, InputRowParser inputRowParser)
{
this.firehoseFactory = firehoseFactory;
this.inputRowParser = Preconditions.checkNotNull(inputRowParser, "inputRowParser");
}

public FiniteFirehoseFactory getFirehoseFactory()
{
return firehoseFactory;
}

@Override
public boolean isSplittable()
{
return firehoseFactory.isSplittable();
}

@Override
public Stream<InputSplit> createSplits(InputFormat inputFormat, @Nullable SplitHintSpec splitHintSpec)
throws IOException
{
if (firehoseFactory.isSplittable()) {
return firehoseFactory.getSplits(splitHintSpec);
} else {
throw new UnsupportedOperationException();
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is supporting unsplittable Firehoses future work?

Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

No, only splittable firehose can create splits.

}
}

@Override
public int getNumSplits(InputFormat inputFormat, @Nullable SplitHintSpec splitHintSpec) throws IOException
{
if (firehoseFactory.isSplittable()) {
return firehoseFactory.getNumSplits(splitHintSpec);
} else {
throw new UnsupportedOperationException();
}
}

@Override
public SplittableInputSource withSplit(InputSplit split)
{
if (firehoseFactory.isSplittable()) {
return new FirehoseFactoryToInputSourceAdaptor(
firehoseFactory.withSplit(split),
inputRowParser
);
} else {
throw new UnsupportedOperationException();
}
}

@Override
public boolean needsFormat()
{
return false;
}

@Override
protected InputSourceReader fixedFormatReader(InputRowSchema inputRowSchema, @Nullable File temporaryDirectory)
{
return new FirehoseToInputSourceReaderAdaptor(firehoseFactory, inputRowParser, temporaryDirectory);
}
}
120 changes: 120 additions & 0 deletions core/src/main/java/org/apache/druid/data/input/InputEntity.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,120 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package org.apache.druid.data.input;

import com.google.common.base.Predicate;
import org.apache.druid.guice.annotations.UnstableApi;
import org.apache.druid.java.util.common.FileUtils;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.common.logger.Logger;

import javax.annotation.Nullable;
import java.io.Closeable;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;

/**
* InputEntity abstracts an input entity and knows how to read bytes from the given entity.
*/
@UnstableApi
public interface InputEntity
{
Logger LOG = new Logger(InputEntity.class);

int DEFAULT_FETCH_BUFFER_SIZE = 4 * 1024; // 4 KB
int DEFAULT_MAX_NUM_FETCH_TRIES = 3; // 3 tries including the initial try

/**
* CleanableFile is the result type of {@link #fetch}.
* It should clean up any temporary resource on {@link #close()}.
*/
interface CleanableFile extends Closeable
{
File file();
}

/**
* Returns an URI to identify the input entity. Implementations can return null if they don't have
* an unique URI.
*/
@Nullable
URI getUri();

/**
* Opens an {@link InputStream} on the input entity directly.
* This is the basic way to read the given entity.
*
* @see #fetch as an alternative way to read data.
*/
InputStream open() throws IOException;

/**
* Fetches the input entity into the local storage.
* This method might be preferred instead of {@link #open()}, for example
*
* - {@link InputFormat} requires expensive random access on remote storage.
* - Holding a connection until you consume the entire InputStream is expensive.
*
* @param temporaryDirectory to store temp data. This directory will be removed automatically once
* the task finishes.
* @param fetchBuffer is used to fetch remote entity into local storage.
*
* @see FileUtils#copyLarge
*/
default CleanableFile fetch(File temporaryDirectory, byte[] fetchBuffer) throws IOException
{
final File tempFile = File.createTempFile("druid-input-entity", ".tmp", temporaryDirectory);
LOG.debug("Fetching entity into file[%s]", tempFile.getAbsolutePath());
try (InputStream is = open()) {
FileUtils.copyLarge(
is,
tempFile,
fetchBuffer,
getFetchRetryCondition(),
DEFAULT_MAX_NUM_FETCH_TRIES,
StringUtils.format("Failed to fetch into [%s]", tempFile.getAbsolutePath())
);
}

return new CleanableFile()
{
@Override
public File file()
{
return tempFile;
}

@Override
public void close()
{
if (!tempFile.delete()) {
LOG.warn("Failed to remove file[%s]", tempFile.getAbsolutePath());
}
}
};
}

/**
* {@link #fetch} will retry during the fetch if it sees an exception matching to the returned predicate.
*/
Predicate<Throwable> getFetchRetryCondition();
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package org.apache.druid.data.input;

import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectWriter;
import org.apache.druid.guice.annotations.UnstableApi;
import org.apache.druid.java.util.common.parsers.CloseableIterator;

import java.io.File;
import java.io.IOException;

/**
* InputEntityReader knows how to parse data into {@link InputRow}.
* This class is <i>stateful</i> and a new InputEntityReader should be created per {@link InputEntity}.
*
* @see IntermediateRowParsingReader
* @see TextReader
*/
@UnstableApi
public interface InputEntityReader
{
/**
* Default JSON writer for sampler. This writer can be used to create an {@link InputRowListPlusJson}.
* Note that this writer uses the default serializer of Jackson. You may want to create a custom writer
* to serialize your custom types.
*/
ObjectWriter DEFAULT_JSON_WRITER = new ObjectMapper().writerWithDefaultPrettyPrinter();

CloseableIterator<InputRow> read(InputEntity source, File temporaryDirectory) throws IOException;

CloseableIterator<InputRowListPlusJson> sample(InputEntity source, File temporaryDirectory) throws IOException;
}
Loading