Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
25 commits
Select commit Hold shift + click to select a range
a7a6fe0
Do stuff
zachjsh Apr 7, 2021
dcd9663
Do more stuff
zachjsh Apr 7, 2021
fd26c73
* Do more stuff
zachjsh Apr 7, 2021
ab45103
* Do more stuff
zachjsh Apr 7, 2021
78f2958
* working
zachjsh Apr 8, 2021
9f5bc94
* cleanup
zachjsh Apr 8, 2021
2f71523
* more cleanup
zachjsh Apr 8, 2021
22061c3
* more cleanup
zachjsh Apr 8, 2021
44f0707
* add license header
zachjsh Apr 8, 2021
ce168cf
* Add unit tests
zachjsh Apr 8, 2021
2d41899
* add java docs
zachjsh Apr 8, 2021
063d3f8
Merge remote-tracking branch 'apache/master' into hadoop-segment-inde…
zachjsh Apr 8, 2021
0b4bfbd
* add more unit tests
zachjsh Apr 9, 2021
c7fa3e8
* Cleanup test
zachjsh Apr 9, 2021
cf26d35
* Move removing of workingPath to index task rather than in hadoop job.
zachjsh Apr 9, 2021
2dd3c56
Merge remote-tracking branch 'apache/master' into hadoop-segment-inde…
zachjsh Apr 9, 2021
ed2e4ff
* Address review comments
zachjsh Apr 12, 2021
3692fe6
* remove unused import
zachjsh Apr 12, 2021
da82fa1
Merge remote-tracking branch 'apache/master' into hadoop-segment-inde…
zachjsh Apr 12, 2021
219ceb9
* Address review comments
zachjsh Apr 13, 2021
8d0ef20
Do not overwrite segment descriptor for segment if it already exists.
zachjsh Apr 16, 2021
b529b82
Merge remote-tracking branch 'apache/master' into hadoop-segment-inde…
zachjsh Apr 16, 2021
883402a
* add comments to FileSystemHelper class
zachjsh Apr 20, 2021
66713ee
Merge remote-tracking branch 'apache/master' into hadoop-segment-inde…
zachjsh Apr 20, 2021
de48276
* fix local hadoop integration test
zachjsh Apr 21, 2021
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 15 additions & 0 deletions indexing-hadoop/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -202,6 +202,21 @@
<artifactId>mockito-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.powermock</groupId>
<artifactId>powermock-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.powermock</groupId>
<artifactId>powermock-module-junit4</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.powermock</groupId>
<artifactId>powermock-api-easymock</artifactId>
<scope>test</scope>
</dependency>
</dependencies>

<build>
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,97 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package org.apache.druid.indexer;

import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import org.apache.druid.timeline.DataSegment;

import java.util.List;
import java.util.Objects;

/**
* holds a {@link DataSegment} with the temporary file path where the corresponding index zip file is currently stored
* and the final path where the index zip file should eventually be moved to.
* see {@link JobHelper#renameIndexFilesForSegments(HadoopIngestionSpec, List)}
*/
public class DataSegmentAndIndexZipFilePath
{
private final DataSegment segment;
private final String tmpIndexZipFilePath;
private final String finalIndexZipFilePath;

@JsonCreator
public DataSegmentAndIndexZipFilePath(
@JsonProperty("segment") DataSegment segment,
@JsonProperty("tmpIndexZipFilePath") String tmpIndexZipFilePath,
@JsonProperty("finalIndexZipFilePath") String finalIndexZipFilePath
)
{
this.segment = segment;
this.tmpIndexZipFilePath = tmpIndexZipFilePath;
this.finalIndexZipFilePath = finalIndexZipFilePath;
}

@JsonProperty
public DataSegment getSegment()
{
return segment;
}

@JsonProperty
public String getTmpIndexZipFilePath()
{
return tmpIndexZipFilePath;
}

@JsonProperty
public String getFinalIndexZipFilePath()
{
return finalIndexZipFilePath;
}

@Override
public boolean equals(Object o)
{
if (o instanceof DataSegmentAndIndexZipFilePath) {
DataSegmentAndIndexZipFilePath that = (DataSegmentAndIndexZipFilePath) o;
return segment.equals(((DataSegmentAndIndexZipFilePath) o).getSegment())
&& tmpIndexZipFilePath.equals(that.getTmpIndexZipFilePath())
&& finalIndexZipFilePath.equals(that.getFinalIndexZipFilePath());
}
return false;
}

@Override
public int hashCode()
{
return Objects.hash(segment.getId(), tmpIndexZipFilePath);
}

@Override
public String toString()
{
return "DataSegmentAndIndexZipFilePath{" +
"segment=" + segment +
", tmpIndexZipFilePath=" + tmpIndexZipFilePath +
", finalIndexZipFilePath=" + finalIndexZipFilePath +
'}';
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package org.apache.druid.indexer;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;

import java.io.IOException;
import java.net.URI;

/**
* This class exists for testing purposes, see {@link JobHelperPowerMockTest}. Using the
* raw {@link FileSystem} class resulted in errors with java assist.
*/
public class FileSystemHelper
{
public static FileSystem get(URI uri, Configuration conf) throws IOException
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This class seems unnecessary

Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I needed it for the test that I'm using it in. I wasnt able to mock the raw Filesystem.get routine, kept running into assist issues.

Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can you add a comment here about that?

Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Added comment

{
return FileSystem.get(uri, conf);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,12 @@ public boolean run()
if (config.isDeterminingPartitions()) {
job = createPartitionJob(config);
config.setHadoopJobIdFileName(hadoopJobIdFile);
return JobHelper.runSingleJob(job, config);
boolean jobSucceeded = JobHelper.runSingleJob(job);
JobHelper.maybeDeleteIntermediatePath(
jobSucceeded,
config.getSchema()
);
return jobSucceeded;
} else {
final PartitionsSpec partitionsSpec = config.getPartitionsSpec();
final int shardsPerInterval;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@
import com.google.common.base.Preconditions;
import com.google.inject.Inject;
import org.apache.druid.java.util.common.logger.Logger;
import org.apache.druid.timeline.DataSegment;

import javax.annotation.Nullable;
import java.util.ArrayList;
Expand All @@ -40,7 +39,7 @@ public class HadoopDruidIndexerJob implements Jobby
@Nullable
private IndexGeneratorJob indexJob;
@Nullable
private volatile List<DataSegment> publishedSegments = null;
private volatile List<DataSegmentAndIndexZipFilePath> publishedSegmentAndIndexZipFilePaths = null;
@Nullable
private String hadoopJobIdFile;

Expand Down Expand Up @@ -91,14 +90,14 @@ public boolean run()
@Override
public boolean run()
{
publishedSegments = IndexGeneratorJob.getPublishedSegments(config);
publishedSegmentAndIndexZipFilePaths = IndexGeneratorJob.getPublishedSegmentAndIndexZipFilePaths(config);
return true;
}
}
);

config.setHadoopJobIdFileName(hadoopJobIdFile);
return JobHelper.runJobs(jobs, config);
return JobHelper.runJobs(jobs);
}

@Override
Expand All @@ -122,12 +121,12 @@ public String getErrorMessage()
return indexJob.getErrorMessage();
}

public List<DataSegment> getPublishedSegments()
public List<DataSegmentAndIndexZipFilePath> getPublishedSegmentAndIndexZipFilePaths()
{
if (publishedSegments == null) {
if (publishedSegmentAndIndexZipFilePaths == null) {
throw new IllegalStateException("Job hasn't run yet. No segments have been published yet.");
}
return publishedSegments;
return publishedSegmentAndIndexZipFilePaths;
}

public void setHadoopJobIdFile(String hadoopJobIdFile)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -102,24 +102,24 @@ public class IndexGeneratorJob implements Jobby
{
private static final Logger log = new Logger(IndexGeneratorJob.class);

public static List<DataSegment> getPublishedSegments(HadoopDruidIndexerConfig config)
public static List<DataSegmentAndIndexZipFilePath> getPublishedSegmentAndIndexZipFilePaths(HadoopDruidIndexerConfig config)
{
final Configuration conf = JobHelper.injectSystemProperties(new Configuration(), config);
config.addJobProperties(conf);

final ObjectMapper jsonMapper = HadoopDruidIndexerConfig.JSON_MAPPER;

ImmutableList.Builder<DataSegment> publishedSegmentsBuilder = ImmutableList.builder();
ImmutableList.Builder<DataSegmentAndIndexZipFilePath> publishedSegmentAndIndexZipFilePathsBuilder = ImmutableList.builder();

final Path descriptorInfoDir = config.makeDescriptorInfoDir();

try {
FileSystem fs = descriptorInfoDir.getFileSystem(conf);

for (FileStatus status : fs.listStatus(descriptorInfoDir)) {
final DataSegment segment = jsonMapper.readValue((InputStream) fs.open(status.getPath()), DataSegment.class);
publishedSegmentsBuilder.add(segment);
log.info("Adding segment %s to the list of published segments", segment.getId());
final DataSegmentAndIndexZipFilePath segmentAndIndexZipFilePath = jsonMapper.readValue((InputStream) fs.open(status.getPath()), DataSegmentAndIndexZipFilePath.class);
publishedSegmentAndIndexZipFilePathsBuilder.add(segmentAndIndexZipFilePath);
log.info("Adding segment %s to the list of published segments", segmentAndIndexZipFilePath.getSegment().getId());
}
}
catch (FileNotFoundException e) {
Expand All @@ -133,9 +133,9 @@ public static List<DataSegment> getPublishedSegments(HadoopDruidIndexerConfig co
catch (IOException e) {
throw new RuntimeException(e);
}
List<DataSegment> publishedSegments = publishedSegmentsBuilder.build();
List<DataSegmentAndIndexZipFilePath> publishedSegmentAndIndexZipFilePaths = publishedSegmentAndIndexZipFilePathsBuilder.build();

return publishedSegments;
return publishedSegmentAndIndexZipFilePaths;
}

private final HadoopDruidIndexerConfig config;
Expand Down Expand Up @@ -809,7 +809,7 @@ public void doRun()
0
);

final DataSegment segment = JobHelper.serializeOutIndex(
final DataSegmentAndIndexZipFilePath segmentAndIndexZipFilePath = JobHelper.serializeOutIndex(
segmentTemplate,
context.getConfiguration(),
context,
Expand All @@ -831,7 +831,7 @@ public void doRun()
HadoopDruidIndexerConfig.DATA_SEGMENT_PUSHER
);

Path descriptorPath = config.makeDescriptorInfoPath(segment);
Path descriptorPath = config.makeDescriptorInfoPath(segmentAndIndexZipFilePath.getSegment());
descriptorPath = JobHelper.prependFSIfNullScheme(
FileSystem.get(
descriptorPath.toUri(),
Expand All @@ -842,7 +842,7 @@ public void doRun()
log.info("Writing descriptor to path[%s]", descriptorPath);
JobHelper.writeSegmentDescriptor(
config.makeDescriptorInfoDir().getFileSystem(context.getConfiguration()),
segment,
segmentAndIndexZipFilePath,
descriptorPath,
context
);
Expand Down
Loading