Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
26 commits
Select commit Hold shift + click to select a range
d5e3127
IndexerSQLMetadataStorageCoordinator.getTimelineForIntervalsWithHandl…
leventov Sep 20, 2019
94373ad
Add VersionedIntervalTimeline.findNonOvershadowedObjectsInInterval() …
leventov Sep 27, 2019
085d934
Fix tests
leventov Oct 1, 2019
1de4204
More fixes
leventov Oct 1, 2019
b45ad10
Add javadoc notes about returning Collection instead of Set. Add Jack…
leventov Oct 1, 2019
b4a67db
Merge remote-tracking branch 'upstream/master' into refactor-getUsedS…
leventov Oct 2, 2019
58482c9
Fix KinesisIndexTaskTest, factor out common parts from KinesisIndexTa…
leventov Oct 7, 2019
697cb94
More test fixes
leventov Oct 8, 2019
cd4a400
More test fixes
leventov Oct 8, 2019
f3543b2
Merge remote-tracking branch 'upstream/master' into refactor-getUsedS…
leventov Oct 11, 2019
b1897ad
Add a comment to VersionedIntervalTimelineTestBase
leventov Oct 11, 2019
dc657c0
Merge remote-tracking branch 'upstream/master' into refactor-getUsedS…
leventov Oct 21, 2019
7a8a123
Fix tests
leventov Oct 24, 2019
bd8be52
Set DataSegment.size(0) in more tests
leventov Oct 24, 2019
178f083
Specify DataSegment.size(0) in more places in tests
leventov Oct 25, 2019
998def8
Fix more tests
leventov Oct 27, 2019
186d751
Merge remote-tracking branch 'upstream/master' into refactor-getUsedS…
leventov Oct 27, 2019
91545c7
Fix DruidSchemaTest
leventov Oct 27, 2019
7137805
Set DataSegment's size in more tests and benchmarks
leventov Oct 28, 2019
3053f8a
Fix HdfsDataSegmentPusherTest
leventov Oct 28, 2019
1c45ecd
Doc changes addressing comments
leventov Oct 30, 2019
2fc7c5b
Extended doc for visibility
leventov Oct 31, 2019
448685b
Typo
leventov Oct 31, 2019
023e96c
Typo 2
leventov Oct 31, 2019
96cf4d8
Address comment
leventov Nov 6, 2019
e8dc564
Merge remote-tracking branch 'upstream/master' into refactor-getUsedS…
leventov Nov 6, 2019
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions benchmarks/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -104,6 +104,10 @@
<artifactId>json-flattener</artifactId>
<version>0.1.0</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.dataformat</groupId>
<artifactId>jackson-dataformat-smile</artifactId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -100,6 +100,7 @@ public void setup()
.interval(schemaInfo.getDataInterval())
.version("1")
.shardSpec(new LinearShardSpec(0))
.size(0)
.build();

final SegmentGenerator segmentGenerator = closer.register(new SegmentGenerator());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -114,6 +114,7 @@ public void setup()
.interval(schemaInfo.getDataInterval())
.version("1")
.shardSpec(new LinearShardSpec(0))
.size(0)
.build();

final SegmentGenerator segmentGenerator = closer.register(new SegmentGenerator());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -199,6 +199,7 @@ public void setup()
.interval(schemaInfo.getDataInterval())
.version("1")
.shardSpec(new LinearShardSpec(i))
.size(0)
.build();
final SegmentGenerator segmentGenerator = closer.register(new SegmentGenerator());
LOG.info("Starting benchmark setup using cacheDir[%s], rows[%,d].", segmentGenerator.getCacheDir(), rowsPerSegment);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,10 +19,11 @@

package org.apache.druid.benchmark.query;

import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Function;
import org.apache.druid.java.util.common.guava.Sequence;
import org.apache.druid.java.util.common.guava.Sequences;
import org.apache.druid.java.util.common.jackson.JacksonUtils;
import org.apache.druid.query.QueryPlus;
import org.apache.druid.query.QueryRunner;
import org.apache.druid.query.context.ResponseContext;
Expand Down Expand Up @@ -52,17 +53,12 @@ public Sequence<T> run(
{
return Sequences.map(
baseRunner.run(queryPlus, responseContext),
new Function<T, T>()
{
@Override
public T apply(T input)
{
try {
return smileMapper.readValue(smileMapper.writeValueAsBytes(input), clazz);
}
catch (Exception e) {
throw new RuntimeException(e);
}
input -> {
try {
return JacksonUtils.readValue(smileMapper, smileMapper.writeValueAsBytes(input), clazz);
}
catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
}
);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -173,6 +173,7 @@ public void setup()
.interval(schemaInfo.getDataInterval())
.version("1")
.shardSpec(new LinearShardSpec(0))
.size(0)
.build();

final PlannerConfig plannerConfig = new PlannerConfig();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -98,6 +98,7 @@ public void setup()
.interval(schemaInfo.getDataInterval())
.version("1")
.shardSpec(new LinearShardSpec(0))
.size(0)
.build();

final SegmentGenerator segmentGenerator = closer.register(new SegmentGenerator());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
import org.apache.druid.audit.AuditInfo;
import org.apache.druid.audit.AuditManager;
import org.apache.druid.common.config.ConfigManager.SetResult;
import org.apache.druid.java.util.common.jackson.JacksonUtils;

import java.io.IOException;
import java.util.concurrent.atomic.AtomicReference;
Expand Down Expand Up @@ -115,12 +116,7 @@ public T deserialize(byte[] bytes)
return defaultVal;
}

try {
return jsonMapper.readValue(bytes, clazz);
}
catch (IOException e) {
throw new RuntimeException(e);
}
return JacksonUtils.readValue(jsonMapper, bytes, clazz);
}
};
}
Expand Down
13 changes: 8 additions & 5 deletions core/src/main/java/org/apache/druid/indexer/TaskStatus.java
Original file line number Diff line number Diff line change
Expand Up @@ -67,9 +67,11 @@ public static TaskStatus fromCode(String taskId, TaskState code)
return new TaskStatus(taskId, code, -1, null, null);
}

// The error message can be large, so truncate it to avoid storing large objects in zookeeper/metadata storage.
// The full error message will be available via a TaskReport.
private static String truncateErrorMsg(String errorMsg)
/**
* The error message can be large, so truncate it to avoid storing large objects in zookeeper/metadata storage.
* The full error message will be available via a TaskReport.
*/
private static @Nullable String truncateErrorMsg(@Nullable String errorMsg)
{
if (errorMsg != null && errorMsg.length() > MAX_ERROR_MSG_LENGTH) {
return errorMsg.substring(0, MAX_ERROR_MSG_LENGTH) + "...";
Expand All @@ -81,15 +83,15 @@ private static String truncateErrorMsg(String errorMsg)
private final String id;
private final TaskState status;
private final long duration;
private final String errorMsg;
private final @Nullable String errorMsg;
private final TaskLocation location;

@JsonCreator
protected TaskStatus(
@JsonProperty("id") String id,
@JsonProperty("status") TaskState status,
@JsonProperty("duration") long duration,
@JsonProperty("errorMsg") String errorMsg,
@JsonProperty("errorMsg") @Nullable String errorMsg,
@Nullable @JsonProperty("location") TaskLocation location
)
{
Expand Down Expand Up @@ -122,6 +124,7 @@ public long getDuration()
return duration;
}

@Nullable
@JsonProperty("errorMsg")
public String getErrorMsg()
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,15 +20,32 @@
package org.apache.druid.java.util.common.jackson;

import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;

import java.io.IOException;
import java.util.Map;

public class JacksonUtils
public final class JacksonUtils
{
public static final TypeReference<Map<String, Object>> TYPE_REFERENCE_MAP_STRING_OBJECT = new TypeReference<Map<String, Object>>()
{
};
public static final TypeReference<Map<String, String>> TYPE_REFERENCE_MAP_STRING_STRING = new TypeReference<Map<String, String>>()
{
};

/** Silences Jackson's {@link IOException}. */
public static <T> T readValue(ObjectMapper mapper, byte[] bytes, Class<T> valueClass)
{
try {
return mapper.readValue(bytes, valueClass);
}
catch (IOException e) {
throw new RuntimeException(e);
}
}

private JacksonUtils()
{
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -214,6 +214,7 @@ public DataSegment(
? null
: prepareCompactionState(lastCompactionState);
this.binaryVersion = binaryVersion;
Preconditions.checkArgument(size >= 0);
this.size = size;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,28 +17,17 @@
* under the License.
*/

package org.apache.druid.indexer.path;

import org.apache.druid.timeline.DataSegment;
import org.joda.time.Interval;

import java.io.IOException;
import java.util.List;
package org.apache.druid.timeline;

/**
* This enum is used a parameter for several methods in {@link VersionedIntervalTimeline}, specifying whether only
* {@linkplain org.apache.druid.timeline.partition.PartitionHolder#isComplete() complete} partitions should be
* considered, or incomplete partitions as well.
*/
public interface UsedSegmentLister
public enum Partitions
{
/**
* Get all segments which may include any data in the interval and are flagged as used.
*
* @param dataSource The datasource to query
* @param intervals The intervals for which used segments are to be returned
*
* @return The DataSegments which include data in the requested intervals. These segments may contain data outside the requested interval.
*
* @throws IOException
*/
List<DataSegment> getUsedSegmentsForIntervals(String dataSource, List<Interval> intervals)
throws IOException;
/** Specifies that only complete partitions should be considered. */
ONLY_COMPLETE,
/** Specifies that complete as well as incomplete partitions should be considered. */
INCOMPLETE_OK
}
Loading