diff --git a/extensions-contrib/distinctcount/src/test/java/org/apache/druid/query/aggregation/distinctcount/DistinctCountTopNQueryTest.java b/extensions-contrib/distinctcount/src/test/java/org/apache/druid/query/aggregation/distinctcount/DistinctCountTopNQueryTest.java index b54c9af1b2f7..fa2466e825c9 100644 --- a/extensions-contrib/distinctcount/src/test/java/org/apache/druid/query/aggregation/distinctcount/DistinctCountTopNQueryTest.java +++ b/extensions-contrib/distinctcount/src/test/java/org/apache/druid/query/aggregation/distinctcount/DistinctCountTopNQueryTest.java @@ -133,7 +133,7 @@ public void testTopNWithDistinctCountAgg() throws Exception final Iterable> results = engine.query( query, - new IncrementalIndexSegment(index, SegmentId.dummy(QueryRunnerTestHelper.DATA_SOURCE)), + new IncrementalIndexSegment(index, SegmentId.simpleTable(QueryRunnerTestHelper.DATA_SOURCE)), null ).toList(); diff --git a/extensions-core/multi-stage-query/src/main/java/org/apache/druid/msq/input/ParseExceptionUtils.java b/extensions-core/multi-stage-query/src/main/java/org/apache/druid/msq/input/ParseExceptionUtils.java index 0911059b364f..110618039c05 100644 --- a/extensions-core/multi-stage-query/src/main/java/org/apache/druid/msq/input/ParseExceptionUtils.java +++ b/extensions-core/multi-stage-query/src/main/java/org/apache/druid/msq/input/ParseExceptionUtils.java @@ -21,12 +21,9 @@ import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.msq.input.external.ExternalSegment; -import org.apache.druid.msq.input.inline.InlineInputSliceReader; -import org.apache.druid.query.lookup.LookupSegment; -import org.apache.druid.segment.QueryableIndexSegment; import org.apache.druid.segment.Segment; +import org.apache.druid.timeline.SegmentId; -import javax.annotation.Nullable; /** * Utility class containing methods that help in generating the {@link org.apache.druid.sql.calcite.parser.ParseException} @@ -39,7 +36,6 @@ public class ParseExceptionUtils * Given a segment, this returns the human-readable description of the segment which can allow user to figure out the * source of the parse exception */ - @Nullable public static String generateReadableInputSourceNameFromMappedSegment(Segment segment) { if (segment instanceof ExternalSegment) { @@ -47,14 +43,13 @@ public static String generateReadableInputSourceNameFromMappedSegment(Segment se "external input source: %s", ((ExternalSegment) segment).externalInputSource().toString() ); - } else if (segment instanceof LookupSegment) { - return StringUtils.format("lookup input source: %s", segment.getId().getDataSource()); - } else if (segment instanceof QueryableIndexSegment) { - return StringUtils.format("table input source: %s", segment.getId().getDataSource()); - } else if (InlineInputSliceReader.SEGMENT_ID.equals(segment.getId().getDataSource())) { - return "inline input source"; } - return null; + SegmentId segmentId = segment.getId(); + return StringUtils.format( + "%s input source: %s", + StringUtils.toLowerCase(segmentId.getDataSourceType().name()), + segmentId + ); } } diff --git a/extensions-core/multi-stage-query/src/main/java/org/apache/druid/msq/input/external/ExternalInputSliceReader.java b/extensions-core/multi-stage-query/src/main/java/org/apache/druid/msq/input/external/ExternalInputSliceReader.java index 2a863fa55257..f78af92ebe81 100644 --- a/extensions-core/multi-stage-query/src/main/java/org/apache/druid/msq/input/external/ExternalInputSliceReader.java +++ b/extensions-core/multi-stage-query/src/main/java/org/apache/druid/msq/input/external/ExternalInputSliceReader.java @@ -51,7 +51,6 @@ import org.apache.druid.segment.column.ColumnHolder; import org.apache.druid.segment.column.RowSignature; import org.apache.druid.segment.incremental.SimpleRowIngestionMeters; -import org.apache.druid.timeline.SegmentId; import java.io.File; import java.io.IOException; @@ -65,7 +64,6 @@ */ public class ExternalInputSliceReader implements InputSliceReader { - public static final String SEGMENT_ID = "__external"; private final File temporaryDirectory; public ExternalInputSliceReader(final File temporaryDirectory) @@ -153,7 +151,6 @@ private static Iterator inputSourceSegmentIterator( reader = inputSource.reader(schema, inputFormat, temporaryDirectory); } - final SegmentId segmentId = SegmentId.dummy(SEGMENT_ID); final Segment segment = new ExternalSegment( inputSource, reader, @@ -165,7 +162,7 @@ private static Iterator inputSourceSegmentIterator( ); return new SegmentWithDescriptor( () -> ResourceHolder.fromCloseable(new CompleteSegment(null, segment)), - new RichSegmentDescriptor(segmentId.toDescriptor(), null) + new RichSegmentDescriptor(ExternalSegment.SEGMENT_ID.toDescriptor(), null) ); } ); diff --git a/extensions-core/multi-stage-query/src/main/java/org/apache/druid/msq/input/external/ExternalSegment.java b/extensions-core/multi-stage-query/src/main/java/org/apache/druid/msq/input/external/ExternalSegment.java index 3aae435b8d99..2b8a63a7fba0 100644 --- a/extensions-core/multi-stage-query/src/main/java/org/apache/druid/msq/input/external/ExternalSegment.java +++ b/extensions-core/multi-stage-query/src/main/java/org/apache/druid/msq/input/external/ExternalSegment.java @@ -42,10 +42,9 @@ */ public class ExternalSegment extends RowBasedSegment { - + public static final SegmentId SEGMENT_ID = SegmentId.simple(SegmentId.DataSourceType.EXTERNAL); private final InputSource inputSource; private final RowSignature signature; - public static final String SEGMENT_ID = "__external"; /** * @param inputSource {@link InputSource} that the segment is a representation of @@ -67,7 +66,7 @@ public ExternalSegment( ) { super( - SegmentId.dummy(SEGMENT_ID), + SEGMENT_ID, new BaseSequence<>( new BaseSequence.IteratorMaker>() { diff --git a/extensions-core/multi-stage-query/src/main/java/org/apache/druid/msq/input/inline/InlineInputSliceReader.java b/extensions-core/multi-stage-query/src/main/java/org/apache/druid/msq/input/inline/InlineInputSliceReader.java index 8a05ce1527e4..97322063d405 100644 --- a/extensions-core/multi-stage-query/src/main/java/org/apache/druid/msq/input/inline/InlineInputSliceReader.java +++ b/extensions-core/multi-stage-query/src/main/java/org/apache/druid/msq/input/inline/InlineInputSliceReader.java @@ -33,7 +33,6 @@ import org.apache.druid.segment.CompleteSegment; import org.apache.druid.segment.InlineSegmentWrangler; import org.apache.druid.segment.SegmentWrangler; -import org.apache.druid.timeline.SegmentId; import java.util.function.Consumer; @@ -43,10 +42,6 @@ */ public class InlineInputSliceReader implements InputSliceReader { - public static final String SEGMENT_ID = "__inline"; - private static final RichSegmentDescriptor DUMMY_SEGMENT_DESCRIPTOR - = new RichSegmentDescriptor(SegmentId.dummy(SEGMENT_ID).toDescriptor(), null); - private final SegmentWrangler segmentWrangler; public InlineInputSliceReader(SegmentWrangler segmentWrangler) @@ -76,7 +71,7 @@ public ReadableInputs attach( segment -> ReadableInput.segment( new SegmentWithDescriptor( () -> ResourceHolder.fromCloseable(new CompleteSegment(null, segment)), - DUMMY_SEGMENT_DESCRIPTOR + new RichSegmentDescriptor(InlineSegmentWrangler.SEGMENT_ID.toDescriptor(), null) ) ) ) diff --git a/extensions-core/multi-stage-query/src/main/java/org/apache/druid/msq/input/lookup/LookupInputSliceReader.java b/extensions-core/multi-stage-query/src/main/java/org/apache/druid/msq/input/lookup/LookupInputSliceReader.java index 85f0b10718db..fc1f77cae8a2 100644 --- a/extensions-core/multi-stage-query/src/main/java/org/apache/druid/msq/input/lookup/LookupInputSliceReader.java +++ b/extensions-core/multi-stage-query/src/main/java/org/apache/druid/msq/input/lookup/LookupInputSliceReader.java @@ -32,6 +32,7 @@ import org.apache.druid.msq.input.table.RichSegmentDescriptor; import org.apache.druid.msq.input.table.SegmentWithDescriptor; import org.apache.druid.query.LookupDataSource; +import org.apache.druid.query.lookup.LookupSegment; import org.apache.druid.segment.CompleteSegment; import org.apache.druid.segment.Segment; import org.apache.druid.segment.SegmentWrangler; @@ -71,6 +72,7 @@ public ReadableInputs attach( ) { final String lookupName = ((LookupInputSlice) slice).getLookupName(); + final SegmentId lookupSegmentId = LookupSegment.buildSegmentId(lookupName); return ReadableInputs.segments( () -> Iterators.singletonIterator( @@ -101,7 +103,7 @@ public ReadableInputs attach( return ResourceHolder.fromCloseable(new CompleteSegment(null, segment)); }, - new RichSegmentDescriptor(SegmentId.dummy(lookupName).toDescriptor(), null) + new RichSegmentDescriptor(lookupSegmentId.toDescriptor(), null) ) ) ) diff --git a/extensions-core/multi-stage-query/src/main/java/org/apache/druid/msq/querykit/groupby/GroupByPreShuffleFrameProcessor.java b/extensions-core/multi-stage-query/src/main/java/org/apache/druid/msq/querykit/groupby/GroupByPreShuffleFrameProcessor.java index ad456a45f5b1..7b16b2220ba5 100644 --- a/extensions-core/multi-stage-query/src/main/java/org/apache/druid/msq/querykit/groupby/GroupByPreShuffleFrameProcessor.java +++ b/extensions-core/multi-stage-query/src/main/java/org/apache/druid/msq/querykit/groupby/GroupByPreShuffleFrameProcessor.java @@ -58,7 +58,6 @@ import org.apache.druid.segment.SegmentReference; import org.apache.druid.segment.TimeBoundaryInspector; import org.apache.druid.segment.column.RowSignature; -import org.apache.druid.timeline.SegmentId; import java.io.IOException; import java.nio.ByteBuffer; @@ -187,7 +186,7 @@ protected ReturnOrAwait runWithInputChannel( if (inputChannel.canRead()) { final Frame frame = inputChannel.read(); - final FrameSegment frameSegment = new FrameSegment(frame, inputFrameReader, SegmentId.dummy("x")); + final FrameSegment frameSegment = new FrameSegment(frame, inputFrameReader, "x"); final SegmentReference mappedSegment = mapSegment(frameSegment); final Sequence rowSequence = diff --git a/extensions-core/multi-stage-query/src/main/java/org/apache/druid/msq/querykit/results/ExportResultsFrameProcessor.java b/extensions-core/multi-stage-query/src/main/java/org/apache/druid/msq/querykit/results/ExportResultsFrameProcessor.java index 2c4a92efef6c..04a06e2a6407 100644 --- a/extensions-core/multi-stage-query/src/main/java/org/apache/druid/msq/querykit/results/ExportResultsFrameProcessor.java +++ b/extensions-core/multi-stage-query/src/main/java/org/apache/druid/msq/querykit/results/ExportResultsFrameProcessor.java @@ -46,7 +46,6 @@ import org.apache.druid.sql.calcite.run.SqlResults; import org.apache.druid.sql.http.ResultFormat; import org.apache.druid.storage.StorageConnector; -import org.apache.druid.timeline.SegmentId; import java.io.IOException; import java.io.OutputStream; @@ -151,7 +150,7 @@ public ReturnOrAwait runIncrementally(IntSet readableInputs) throws IOEx private void exportFrame(final Frame frame) { - final Segment segment = new FrameSegment(frame, frameReader, SegmentId.dummy("test")); + final Segment segment = new FrameSegment(frame, frameReader, "test"); try (final CursorHolder cursorHolder = segment.asCursorFactory().makeCursorHolder(CursorBuildSpec.FULL_SCAN)) { final Cursor cursor = cursorHolder.asCursor(); if (cursor == null) { diff --git a/extensions-core/multi-stage-query/src/main/java/org/apache/druid/msq/querykit/scan/ScanQueryFrameProcessor.java b/extensions-core/multi-stage-query/src/main/java/org/apache/druid/msq/querykit/scan/ScanQueryFrameProcessor.java index 3f42e6c8e5f7..8fd0f0a28a12 100644 --- a/extensions-core/multi-stage-query/src/main/java/org/apache/druid/msq/querykit/scan/ScanQueryFrameProcessor.java +++ b/extensions-core/multi-stage-query/src/main/java/org/apache/druid/msq/querykit/scan/ScanQueryFrameProcessor.java @@ -78,7 +78,6 @@ import org.apache.druid.segment.VirtualColumn; import org.apache.druid.segment.VirtualColumns; import org.apache.druid.segment.column.RowSignature; -import org.apache.druid.timeline.SegmentId; import org.apache.druid.utils.CloseableUtils; import javax.annotation.Nullable; @@ -313,7 +312,7 @@ protected ReturnOrAwait runWithInputChannel( if (cursor == null || cursor.isDone()) { if (inputChannel.canRead()) { final Frame frame = inputChannel.read(); - final FrameSegment frameSegment = new FrameSegment(frame, inputFrameReader, SegmentId.dummy("scan")); + final FrameSegment frameSegment = new FrameSegment(frame, inputFrameReader, "scan"); final Segment mappedSegment = mapSegment(frameSegment); final CursorFactory cursorFactory = mappedSegment.asCursorFactory(); diff --git a/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/sql/MSQTaskQueryMakerTest.java b/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/sql/MSQTaskQueryMakerTest.java index ddb266889e8f..9483006ede7e 100644 --- a/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/sql/MSQTaskQueryMakerTest.java +++ b/extensions-core/multi-stage-query/src/test/java/org/apache/druid/msq/sql/MSQTaskQueryMakerTest.java @@ -57,7 +57,10 @@ import org.apache.druid.msq.test.MSQTestTaskActionClient; import org.apache.druid.query.Druids; import org.apache.druid.query.ForwardingQueryProcessingPool; +import org.apache.druid.query.InlineDataSource; import org.apache.druid.query.JoinDataSource; +import org.apache.druid.query.LookupDataSource; +import org.apache.druid.query.OrderBy; import org.apache.druid.query.Query; import org.apache.druid.query.QueryContext; import org.apache.druid.query.QueryDataSource; @@ -388,6 +391,75 @@ public void testUnnestOnRestrictedPassedPolicyValidation() throws Exception ); } + @Test + public void testInlineDataSourcePassedPolicyValidation() throws Exception + { + // Arrange + policyEnforcer = new RestrictAllTablesPolicyEnforcer(null); + RowSignature resultSignature = RowSignature.builder() + .add("EXPR$0", ColumnType.LONG) + .build(); + fieldMapping = buildFieldMapping(resultSignature); + InlineDataSource inlineDataSource = InlineDataSource.fromIterable( + ImmutableList.of(new Object[]{2L}), + resultSignature + ); + Query query = new Druids.ScanQueryBuilder().resultFormat(ScanQuery.ResultFormat.RESULT_FORMAT_COMPACTED_LIST) + .dataSource(inlineDataSource) + .eternityInterval() + .columns(resultSignature.getColumnNames()) + .columnTypes(resultSignature.getColumnTypes()) + .build(); + DruidQuery druidQueryMock = buildDruidQueryMock(query, resultSignature); + // Act + msqTaskQueryMaker = getMSQTaskQueryMaker(); + QueryResponse response = msqTaskQueryMaker.runQuery(druidQueryMock); + // Assert + String taskId = (String) Iterables.getOnlyElement(response.getResults().toList())[0]; + MSQTaskReportPayload payload = (MSQTaskReportPayload) fakeOverlordClient.taskReportAsMap(taskId) + .get() + .get(MSQTaskReport.REPORT_KEY) + .getPayload(); + Assert.assertTrue(payload.getStatus().getStatus().isSuccess()); + ImmutableList expectedResults = ImmutableList.of(new Object[]{2L}); + assertResultsEquals("select 1 + 1", expectedResults, payload.getResults().getResults()); + } + + @Test + public void testLookupDataSourcePassedPolicyValidation() throws Exception + { + // Arrange + policyEnforcer = new RestrictAllTablesPolicyEnforcer(null); + final RowSignature resultSignature = RowSignature.builder().add("v", ColumnType.STRING).build(); + fieldMapping = buildFieldMapping(resultSignature); + Query query = new Druids.ScanQueryBuilder().resultFormat(ScanQuery.ResultFormat.RESULT_FORMAT_COMPACTED_LIST) + .eternityInterval() + .dataSource(new LookupDataSource("lookyloo")) + .columns(resultSignature.getColumnNames()) + .columnTypes(resultSignature.getColumnTypes()) + .orderBy(ImmutableList.of(OrderBy.ascending("v"))) + .build(); + DruidQuery druidQueryMock = buildDruidQueryMock(query, resultSignature); + // Act + msqTaskQueryMaker = getMSQTaskQueryMaker(); + QueryResponse response = msqTaskQueryMaker.runQuery(druidQueryMock); + // Assert + String taskId = (String) Iterables.getOnlyElement(response.getResults().toList())[0]; + MSQTaskReportPayload payload = (MSQTaskReportPayload) fakeOverlordClient.taskReportAsMap(taskId) + .get() + .get(MSQTaskReport.REPORT_KEY) + .getPayload(); + // Assert + Assert.assertTrue(payload.getStatus().getStatus().isSuccess()); + ImmutableList expectedResults = ImmutableList.of( + new Object[]{"mysteryvalue"}, + new Object[]{"x6"}, + new Object[]{"xa"}, + new Object[]{"xabc"} + ); + assertResultsEquals("select v from lookyloo", expectedResults, payload.getResults().getResults()); + } + @Test public void testJoinFailWithPolicyValidationOnLeftChild() throws Exception { diff --git a/processing/src/main/java/org/apache/druid/frame/segment/FrameSegment.java b/processing/src/main/java/org/apache/druid/frame/segment/FrameSegment.java index 3df54bc184da..1591a548cc92 100644 --- a/processing/src/main/java/org/apache/druid/frame/segment/FrameSegment.java +++ b/processing/src/main/java/org/apache/druid/frame/segment/FrameSegment.java @@ -43,6 +43,17 @@ public class FrameSegment implements Segment private final FrameReader frameReader; private final SegmentId segmentId; + public FrameSegment(Frame frame, FrameReader frameReader, String version) + { + this.frame = frame; + this.frameReader = frameReader; + this.segmentId = SegmentId.simple(SegmentId.DataSourceType.FRAME).withVersion(version); + } + + /** + * @deprecated use {@link #FrameSegment(Frame, FrameReader, String)} instead + */ + @Deprecated public FrameSegment(Frame frame, FrameReader frameReader, SegmentId segmentId) { this.frame = frame; diff --git a/processing/src/main/java/org/apache/druid/query/lookup/LookupSegment.java b/processing/src/main/java/org/apache/druid/query/lookup/LookupSegment.java index 58c64c47239d..4241b86ec65a 100644 --- a/processing/src/main/java/org/apache/druid/query/lookup/LookupSegment.java +++ b/processing/src/main/java/org/apache/druid/query/lookup/LookupSegment.java @@ -44,10 +44,18 @@ public class LookupSegment extends RowBasedSegment> .add(LookupColumnSelectorFactory.VALUE_COLUMN, ColumnType.STRING) .build(); + /** + * Builds a {@link SegmentId} for a lookup segment. + */ + public static SegmentId buildSegmentId(String version) + { + return SegmentId.simple(SegmentId.DataSourceType.LOOKUP).withVersion(version); + } + public LookupSegment(final String lookupName, final LookupExtractorFactory lookupExtractorFactory) { super( - SegmentId.dummy(lookupName), + buildSegmentId(lookupName), Sequences.simple(() -> { final LookupExtractor extractor = lookupExtractorFactory.get(); diff --git a/processing/src/main/java/org/apache/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java b/processing/src/main/java/org/apache/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java index 23e583214c84..7981a80152d4 100644 --- a/processing/src/main/java/org/apache/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java +++ b/processing/src/main/java/org/apache/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java @@ -296,12 +296,12 @@ public static SegmentAnalysis mergeAnalyses( if (id1 != null && id2 != null) { if (id2.getIntervalEnd().isAfter(id1.getIntervalEnd()) || (id2.getIntervalEnd().isEqual(id1.getIntervalEnd()) && id2.getPartitionNum() > id1.getPartitionNum())) { - mergedSegmentId = SegmentId.merged(dataSource, id2.getInterval(), id2.getPartitionNum()); + mergedSegmentId = id2.withVersion(SegmentId.MERGED_VERSION); final SegmentAnalysis tmp = arg1; arg1 = arg2; arg2 = tmp; } else { - mergedSegmentId = SegmentId.merged(dataSource, id1.getInterval(), id1.getPartitionNum()); + mergedSegmentId = id1.withVersion(SegmentId.MERGED_VERSION); } break; } @@ -426,7 +426,7 @@ public static SegmentAnalysis mergeAnalyses( if (arg1.getId() != null && arg2.getId() != null && arg1.getId().equals(arg2.getId())) { mergedId = arg1.getId(); } else { - mergedId = mergedSegmentId == null ? "merged" : mergedSegmentId.toString(); + mergedId = mergedSegmentId == null ? SegmentId.MERGED_VERSION : mergedSegmentId.toString(); } final Boolean rollup; diff --git a/processing/src/main/java/org/apache/druid/query/policy/PolicyEnforcer.java b/processing/src/main/java/org/apache/druid/query/policy/PolicyEnforcer.java index 4b34b96c23b1..8479338028ad 100644 --- a/processing/src/main/java/org/apache/druid/query/policy/PolicyEnforcer.java +++ b/processing/src/main/java/org/apache/druid/query/policy/PolicyEnforcer.java @@ -27,6 +27,8 @@ import org.apache.druid.query.TableDataSource; import org.apache.druid.segment.ReferenceCountingSegment; import org.apache.druid.segment.SegmentReference; +import org.apache.druid.timeline.SegmentId; + /** * Interface for enforcing policies on data sources and segments in Druid queries. @@ -77,14 +79,31 @@ default void validateOrElseThrow(TableDataSource ds, Policy policy) throws Druid */ default void validateOrElseThrow(ReferenceCountingSegment segment, Policy policy) throws DruidException { - // Validation will always fail on lookups, external, and inline segments, because they will not have policies applied (except for NoopPolicyEnforcer). - // This is a temporary solution since we don't have a perfect way to identify segments that are backed by a regular table yet. + SegmentId segmentId = segment.getId(); + // This can happen if the segment is already closed + if (segmentId == null) { + return; + } + switch (segmentId.getDataSourceType()) { + case TABLE: + // Table segment needs to be validated + break; + case LOOKUP: + case INLINE: + case EXTERNAL: + case FRAME: + // Policy is not applicable, return early + return; + default: + throw DruidException.defensive("unreachable"); + } + if (validate(policy)) { return; } throw DruidException.forPersona(DruidException.Persona.OPERATOR) .ofCategory(DruidException.Category.FORBIDDEN) - .build("Failed security validation with segment [%s]", segment.getId()); + .build("Failed security validation with segment [%s]", segmentId); } /** diff --git a/processing/src/main/java/org/apache/druid/timeline/SegmentId.java b/processing/src/main/java/org/apache/druid/timeline/SegmentId.java index 72ca414d4df9..d070df053e4e 100644 --- a/processing/src/main/java/org/apache/druid/timeline/SegmentId.java +++ b/processing/src/main/java/org/apache/druid/timeline/SegmentId.java @@ -23,13 +23,16 @@ import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Joiner; import com.google.common.base.Splitter; +import com.google.common.base.Strings; import com.google.common.collect.Interner; import com.google.common.collect.Interners; import com.google.common.collect.Iterables; import com.google.common.primitives.Ints; +import org.apache.druid.error.DruidException; import org.apache.druid.guice.annotations.PublicApi; import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.java.util.common.Intervals; +import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.query.SegmentDescriptor; import org.apache.druid.timeline.partition.ShardSpec; import org.joda.time.DateTime; @@ -89,9 +92,11 @@ public final class SegmentId implements Comparable private static final int DATE_TIME_SIZE_UPPER_LIMIT = "yyyy-MM-ddTHH:mm:ss.SSS+00:00".length(); + public static final String MERGED_VERSION = "merged"; + public static SegmentId of(String dataSource, Interval interval, String version, int partitionNum) { - return new SegmentId(dataSource, interval, version, partitionNum); + return new SegmentId(DataSourceType.TABLE, dataSource, interval, version, partitionNum); } public static SegmentId of(String dataSource, Interval interval, String version, @Nullable ShardSpec shardSpec) @@ -103,21 +108,21 @@ public static SegmentId of(String dataSource, Interval interval, String version, * Tries to parse a segment id from the given String representation, or returns null on failure. If returns a non-null * {@code SegmentId} object, calling {@link #toString()} on the latter is guaranteed to return a string equal to the * argument string of the {@code tryParse()} call. - * + *

* It is possible that this method may incorrectly parse a segment id, for example if the dataSource name in the * segment id contains a DateTime parseable string such as 'datasource_2000-01-01T00:00:00.000Z' and dataSource was * provided as 'datasource'. The desired behavior in this case would be to return null since the identifier does not * actually belong to the provided dataSource but a non-null result would be returned. This is an edge case that would * currently only affect paged select queries with a union dataSource of two similarly-named dataSources as in the * given example. - * + *

* Another source of ambiguity is the end of a segment id like '_123' - it could always be interpreted either as the * partitionNum of the segment id, or as the end of the version, with the implicit partitionNum of 0. This method * prefers the first iterpretation. To iterate all possible parsings of a segment id, use {@link * #iteratePossibleParsingsWithDataSource}. * * @param dataSource the dataSource corresponding to this segment id - * @param segmentId segment id + * @param segmentId segment id * @return a {@link SegmentId} object if the segment id could be parsed, null otherwise */ @Nullable @@ -129,8 +134,8 @@ public static SegmentId tryParse(String dataSource, String segmentId) /** * Returns a (potentially empty) lazy iteration of all possible valid parsings of the given segment id string into - * {@code SegmentId} objects. - * + * {@code SegmentId} objects. Only {@link DataSourceType#TABLE} segment id string is supported. + *

* Warning: most of the parsing work is repeated each time {@link Iterable#iterator()} of this iterable is consumed, * so it should be consumed only once if possible. */ @@ -140,7 +145,7 @@ public static Iterable iterateAllPossibleParsings(String segmentId) String probableDataSource = tryExtractMostProbableDataSource(segmentId); // Iterate parsings with the most probably data source first to allow the users of iterateAllPossibleParsings() to // break from the iteration earlier with higher probability. - if (probableDataSource != null) { + if (!Strings.isNullOrEmpty(probableDataSource)) { List probableParsings = iteratePossibleParsingsWithDataSource(probableDataSource, segmentId); Iterable otherPossibleParsings = () -> IntStream .range(1, splits.size() - 3) @@ -172,6 +177,9 @@ public static Iterable iterateAllPossibleParsings(String segmentId) */ public static List iteratePossibleParsingsWithDataSource(String dataSource, String segmentId) { + if (Strings.isNullOrEmpty(dataSource)) { + return Collections.emptyList(); + } if (!segmentId.startsWith(dataSource) || segmentId.charAt(dataSource.length()) != DELIMITER) { return Collections.emptyList(); } @@ -211,7 +219,7 @@ public static List iteratePossibleParsingsWithDataSource(String dataS /** * Heuristically tries to extract the most probable data source from a String segment id representation, or returns * null on failure. - * + *

* This method is not guaranteed to return a non-null data source given a valid String segment id representation. */ @VisibleForTesting @@ -231,33 +239,52 @@ static String tryExtractMostProbableDataSource(String segmentId) } } + /** + * @deprecated use {@link #simple(DataSourceType)}} or {@link #simpleTable(String)}} instead. + */ + @Deprecated + public static SegmentId dummy(String dataSource) + { + return of(dataSource, Intervals.ETERNITY, "dummy_version", 0); + } /** - * Creates a merged SegmentId for the given data source, interval and partition number. Used when segments are - * merged. + * @deprecated use {@link #simple(DataSourceType)}} or {@link #simpleTable(String)}} instead. */ - public static SegmentId merged(String dataSource, Interval interval, int partitionNum) + @Deprecated + public static SegmentId dummy(String dataSource, int partitionNum) { - return of(dataSource, interval, "merged", partitionNum); + return of(dataSource, Intervals.ETERNITY, "dummy_version", partitionNum); } /** - * Creates a dummy SegmentId with the given data source. This method is useful in benchmark and test code. + * Creates a {@link SegmentId} with the given {@link DataSourceType}. + * + * @param dataSourceType the data source type, should be non-TABLE. */ - public static SegmentId dummy(String dataSource) + public static SegmentId simple(DataSourceType dataSourceType) { - return of(dataSource, Intervals.ETERNITY, "dummy_version", 0); + return new SegmentId(dataSourceType, "", Intervals.ETERNITY, StringUtils.toLowerCase(dataSourceType.name()), 0); } /** - * Creates a dummy SegmentId with the given data source and partition number. - * This method is useful in benchmark and test code. + * Creates a {@link SegmentId} with the given data source. */ - public static SegmentId dummy(String dataSource, int partitionNum) + public static SegmentId simpleTable(String dataSource) { - return of(dataSource, Intervals.ETERNITY, "dummy_version", partitionNum); + return new SegmentId(DataSourceType.TABLE, dataSource, Intervals.ETERNITY, "", 0); } + public enum DataSourceType + { + TABLE, + LOOKUP, + INLINE, + EXTERNAL, + FRAME + } + + private final DataSourceType dataSourceType; private final String dataSource; private final Interval interval; private final String version; @@ -269,13 +296,40 @@ public static SegmentId dummy(String dataSource, int partitionNum) */ private final int hashCode; - private SegmentId(String dataSource, Interval interval, String version, int partitionNum) + private SegmentId( + DataSourceType dataSourceType, + String dataSource, + Interval interval, + String version, + int partitionNum + ) { + this.dataSourceType = Objects.requireNonNull(dataSourceType); this.dataSource = STRING_INTERNER.intern(Objects.requireNonNull(dataSource)); + switch (dataSourceType) { + case TABLE: + if (Strings.isNullOrEmpty(dataSource)) { + throw DruidException.defensive("Datasource is not specified"); + } + // Versions are timestamp-based Strings, interning of them doesn't make sense. If this is not the case, interning + // could be conditionally allowed via a system property. + this.version = Objects.requireNonNull(version); + break; + case LOOKUP: + case INLINE: + case EXTERNAL: + case FRAME: + if (!Strings.isNullOrEmpty(dataSource)) { + throw DruidException.defensive("Datasource is used for druid table only"); + } + // For non-table segments, version is a String that is not timestamp-based. Interning it makes sense. + this.version = STRING_INTERNER.intern(Objects.requireNonNull(version)); + break; + default: + throw DruidException.defensive("unreachable"); + } + this.interval = INTERVAL_INTERNER.intern(Objects.requireNonNull(interval)); - // Versions are timestamp-based Strings, interning of them doesn't make sense. If this is not the case, interning - // could be conditionally allowed via a system property. - this.version = Objects.requireNonNull(version); this.partitionNum = partitionNum; this.hashCode = computeHashCode(); } @@ -287,12 +341,23 @@ private int computeHashCode() int hashCode = partitionNum; // 1000003 is a constant used in Google AutoValue, provides a little better distribution than 31 hashCode = hashCode * 1000003 + version.hashCode(); - + hashCode = hashCode * 1000003 + dataSourceType.ordinal(); hashCode = hashCode * 1000003 + dataSource.hashCode(); hashCode = hashCode * 1000003 + interval.hashCode(); return hashCode; } + /** + * Returns the {@link DataSourceType} of this segment. + */ + public DataSourceType getDataSourceType() + { + return dataSourceType; + } + + /** + * Returns the data source name of this segment. + */ public String getDataSource() { return dataSource; @@ -323,9 +388,20 @@ public int getPartitionNum() return partitionNum; } + /** + * Returns a new {@link SegmentId} with new interval. + */ public SegmentId withInterval(Interval newInterval) { - return of(dataSource, newInterval, version, partitionNum); + return new SegmentId(dataSourceType, dataSource, newInterval, version, partitionNum); + } + + /** + * Returns a new {@link SegmentId} with new version. + */ + public SegmentId withVersion(String newVersion) + { + return new SegmentId(dataSourceType, dataSource, interval, newVersion, partitionNum); } /** @@ -353,6 +429,7 @@ public boolean equals(Object o) // Compare hashCode instead of partitionNum: break the chain quicker if the objects are not equal. If the hashCodes // are equal as well as all other fields used to compute them, the partitionNums are also guaranteed to be equal. return hashCode == that.hashCode && + dataSourceType.equals(that.dataSourceType) && dataSource.equals(that.dataSource) && interval.equals(that.interval) && version.equals(that.version); @@ -367,7 +444,11 @@ public int hashCode() @Override public int compareTo(SegmentId o) { - int result = dataSource.compareTo(o.dataSource); + int result = dataSourceType.compareTo(o.dataSourceType); + if (result != 0) { + return result; + } + result = dataSource.compareTo(o.dataSource); if (result != 0) { return result; } @@ -392,6 +473,7 @@ public String toString() { StringBuilder sb = new StringBuilder(safeUpperLimitOfStringSize()); + // We're ignoring dataSourceType here. sb.append(dataSource).append(DELIMITER) .append(getIntervalStart()).append(DELIMITER) .append(getIntervalEnd()).append(DELIMITER) diff --git a/processing/src/test/java/org/apache/druid/frame/testutil/FrameTestUtil.java b/processing/src/test/java/org/apache/druid/frame/testutil/FrameTestUtil.java index 2bb8789740c4..3deb9e71f8ee 100644 --- a/processing/src/test/java/org/apache/druid/frame/testutil/FrameTestUtil.java +++ b/processing/src/test/java/org/apache/druid/frame/testutil/FrameTestUtil.java @@ -31,7 +31,6 @@ import org.apache.druid.frame.read.FrameReader; import org.apache.druid.frame.segment.FrameSegment; import org.apache.druid.frame.util.SettableLongVirtualColumn; -import org.apache.druid.java.util.common.Intervals; import org.apache.druid.java.util.common.guava.Sequence; import org.apache.druid.java.util.common.guava.Sequences; import org.apache.druid.query.OrderBy; @@ -199,7 +198,7 @@ public static FrameSegment cursorFactoryToFrameSegment( return new FrameSegment( cursorFactoryToFrame(adapter, frameType), FrameReader.create(adapter.getRowSignature()), - SegmentId.of("TestFrame", Intervals.ETERNITY, "0", 0) + "TestFrame" ); } @@ -244,7 +243,7 @@ public static Sequence> readRowsFromFrameChannel( /** * Reads a sequence of rows from a {@link CursorFactory}. - * + *

* If {@param populateRowNumberIfPresent} is set, and the provided signature contains {@link #ROW_NUMBER_COLUMN}, * then that column will be populated with a row number from the cursor. * @@ -280,7 +279,7 @@ public static Sequence> readRowsFromCursorFactoryWithRowNumber(Curs /** * Creates a {@link CursorHolder} from a {@link CursorFactory}. - * + *

* If {@param populateRowNumber} is set, the row number will be populated into {@link #ROW_NUMBER_COLUMN}. * * @param cursorFactory the cursor factory diff --git a/processing/src/test/java/org/apache/druid/frame/write/FrameWriterTest.java b/processing/src/test/java/org/apache/druid/frame/write/FrameWriterTest.java index c5790d3a5041..d30e7d485106 100644 --- a/processing/src/test/java/org/apache/druid/frame/write/FrameWriterTest.java +++ b/processing/src/test/java/org/apache/druid/frame/write/FrameWriterTest.java @@ -518,7 +518,7 @@ private static Pair writeFrame( Collections.emptyList() ).lhs; - inputSegment = new FrameSegment(inputFrame, FrameReader.create(signature), SegmentId.dummy("xxx")); + inputSegment = new FrameSegment(inputFrame, FrameReader.create(signature), "xxx"); } try (final CursorHolder cursorHolder = inputSegment.asCursorFactory().makeCursorHolder(CursorBuildSpec.FULL_SCAN)) { diff --git a/processing/src/test/java/org/apache/druid/query/aggregation/AggregationTestHelper.java b/processing/src/test/java/org/apache/druid/query/aggregation/AggregationTestHelper.java index 81f62cb7421c..f42ac47b7cb4 100644 --- a/processing/src/test/java/org/apache/druid/query/aggregation/AggregationTestHelper.java +++ b/processing/src/test/java/org/apache/druid/query/aggregation/AggregationTestHelper.java @@ -657,7 +657,7 @@ public Segment persistIncrementalIndex( } indexMerger.persist(index, outDir, IndexSpec.DEFAULT, null); - return new QueryableIndexSegment(indexIO.loadIndex(outDir), SegmentId.dummy("")); + return new QueryableIndexSegment(indexIO.loadIndex(outDir), SegmentId.simpleTable("some-data-source")); } //Simulates running group-by query on individual segments as historicals would do, json serialize the results @@ -677,7 +677,7 @@ public Sequence runQueryOnSegments(final List segmentDirs, final Qu public Segment apply(File segmentDir) { try { - return new QueryableIndexSegment(indexIO.loadIndex(segmentDir), SegmentId.dummy("")); + return new QueryableIndexSegment(indexIO.loadIndex(segmentDir), SegmentId.simpleTable("some-data-source")); } catch (IOException ex) { throw new RuntimeException(ex); diff --git a/processing/src/test/java/org/apache/druid/query/lookup/LookupSegmentTest.java b/processing/src/test/java/org/apache/druid/query/lookup/LookupSegmentTest.java index 8b93bc060118..7b66afacbddc 100644 --- a/processing/src/test/java/org/apache/druid/query/lookup/LookupSegmentTest.java +++ b/processing/src/test/java/org/apache/druid/query/lookup/LookupSegmentTest.java @@ -106,7 +106,10 @@ public LookupExtractor get() @Test public void test_getId() { - Assert.assertEquals(SegmentId.dummy(LOOKUP_NAME), LOOKUP_SEGMENT.getId()); + Assert.assertEquals( + SegmentId.simple(SegmentId.DataSourceType.LOOKUP).withVersion(LOOKUP_NAME), + LOOKUP_SEGMENT.getId() + ); } @Test diff --git a/processing/src/test/java/org/apache/druid/query/policy/RestrictAllTablesPolicyEnforcerTest.java b/processing/src/test/java/org/apache/druid/query/policy/RestrictAllTablesPolicyEnforcerTest.java index 86c597d6e83c..be88222cc8e8 100644 --- a/processing/src/test/java/org/apache/druid/query/policy/RestrictAllTablesPolicyEnforcerTest.java +++ b/processing/src/test/java/org/apache/druid/query/policy/RestrictAllTablesPolicyEnforcerTest.java @@ -23,13 +23,18 @@ import com.google.common.collect.ImmutableList; import org.apache.druid.error.DruidException; import org.apache.druid.java.util.common.Intervals; +import org.apache.druid.java.util.common.guava.Sequences; +import org.apache.druid.query.InlineDataSource; import org.apache.druid.query.RestrictedDataSource; import org.apache.druid.query.TableDataSource; import org.apache.druid.query.filter.NullFilter; import org.apache.druid.segment.ReferenceCountingSegment; +import org.apache.druid.segment.RowBasedSegment; import org.apache.druid.segment.Segment; import org.apache.druid.segment.TestHelper; import org.apache.druid.segment.TestSegmentUtils.SegmentForTesting; +import org.apache.druid.segment.column.RowSignature; +import org.apache.druid.timeline.SegmentId; import org.junit.Assert; import org.junit.Test; @@ -97,6 +102,35 @@ public void test_validate() throws Exception policyEnforcer.validateOrElseThrow(segment, policy); } + @Test + public void test_validate_allowNonTableSegments() throws Exception + { + final RestrictAllTablesPolicyEnforcer policyEnforcer = new RestrictAllTablesPolicyEnforcer(null); + + // Test validate segment, success for inline segment + final InlineDataSource inlineDataSource = InlineDataSource.fromIterable(ImmutableList.of(), RowSignature.empty()); + final Segment inlineSegment = new RowBasedSegment<>( + SegmentId.simple(SegmentId.DataSourceType.INLINE), + Sequences.simple(inlineDataSource.getRows()), + inlineDataSource.rowAdapter(), + inlineDataSource.getRowSignature() + ); + ReferenceCountingSegment segment = ReferenceCountingSegment.wrapRootGenerationSegment(inlineSegment); + + policyEnforcer.validateOrElseThrow(segment, null); + } + + @Test + public void test_validate_closedSegment() throws Exception + { + final RestrictAllTablesPolicyEnforcer policyEnforcer = new RestrictAllTablesPolicyEnforcer(null); + Segment baseSegment = new SegmentForTesting("table", Intervals.ETERNITY, "1"); + ReferenceCountingSegment segment = ReferenceCountingSegment.wrapRootGenerationSegment(baseSegment); + segment.close(); + + policyEnforcer.validateOrElseThrow(segment, null); + } + @Test public void test_validate_withAllowedPolicies() throws Exception { diff --git a/processing/src/test/java/org/apache/druid/segment/UnnestCursorFactoryTest.java b/processing/src/test/java/org/apache/druid/segment/UnnestCursorFactoryTest.java index ef26393a7734..f01e3d6d241a 100644 --- a/processing/src/test/java/org/apache/druid/segment/UnnestCursorFactoryTest.java +++ b/processing/src/test/java/org/apache/druid/segment/UnnestCursorFactoryTest.java @@ -832,7 +832,7 @@ public void testUnnestValueMatcherValueDoesntExist() cursor.advance(); count++; } - Assert.assertEquals(count, 618); + Assert.assertEquals(count, 604); } } diff --git a/processing/src/test/java/org/apache/druid/timeline/SegmentIdTest.java b/processing/src/test/java/org/apache/druid/timeline/SegmentIdTest.java index 48075c33bfb6..5efe12364ba9 100644 --- a/processing/src/test/java/org/apache/druid/timeline/SegmentIdTest.java +++ b/processing/src/test/java/org/apache/druid/timeline/SegmentIdTest.java @@ -21,6 +21,8 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Iterables; +import org.apache.druid.error.DruidException; import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.java.util.common.Intervals; import org.joda.time.DateTime; @@ -55,6 +57,36 @@ public void testBasic() Assert.assertEquals(desc, SegmentId.tryParse(datasource, desc.toString())); } + @Test + public void testBasicEmptyDataSource() + { + SegmentId desc = SegmentId.simple(SegmentId.DataSourceType.LOOKUP); + Assert.assertEquals("_-146136543-09-08T08:23:32.096Z_146140482-04-24T15:36:27.903Z_lookup", desc.toString()); + Assert.assertNull(SegmentId.tryExtractMostProbableDataSource(desc.toString())); + Assert.assertTrue(Iterables.isEmpty(SegmentId.iterateAllPossibleParsings(desc.toString()))); + + desc = SegmentId.simple(SegmentId.DataSourceType.INLINE); + Assert.assertEquals("_-146136543-09-08T08:23:32.096Z_146140482-04-24T15:36:27.903Z_inline", desc.toString()); + Assert.assertNull(SegmentId.tryExtractMostProbableDataSource(desc.toString())); + Assert.assertTrue(Iterables.isEmpty(SegmentId.iterateAllPossibleParsings(desc.toString()))); + + desc = SegmentId.simple(SegmentId.DataSourceType.EXTERNAL); + Assert.assertEquals("_-146136543-09-08T08:23:32.096Z_146140482-04-24T15:36:27.903Z_external", desc.toString()); + Assert.assertNull(SegmentId.tryExtractMostProbableDataSource(desc.toString())); + Assert.assertTrue(Iterables.isEmpty(SegmentId.iterateAllPossibleParsings(desc.toString()))); + + desc = SegmentId.simple(SegmentId.DataSourceType.FRAME); + Assert.assertEquals("_-146136543-09-08T08:23:32.096Z_146140482-04-24T15:36:27.903Z_frame", desc.toString()); + Assert.assertNull(SegmentId.tryExtractMostProbableDataSource(desc.toString())); + Assert.assertTrue(Iterables.isEmpty(SegmentId.iterateAllPossibleParsings(desc.toString()))); + + DruidException e = Assert.assertThrows(DruidException.class, () -> SegmentId.of("", Intervals.ETERNITY, "ver", 0)); + Assert.assertEquals("Datasource is not specified", e.getMessage()); + + DruidException e2 = Assert.assertThrows(DruidException.class, () -> SegmentId.simpleTable("")); + Assert.assertEquals("Datasource is not specified", e2.getMessage()); + } + @Test public void testDataSourceWithUnderscore() { @@ -228,4 +260,12 @@ public void testIterateAllPossibleParsings2() Assert.assertEquals(2, possibleParsings.size()); Assert.assertEquals(expected, ImmutableSet.copyOf(possibleParsings)); } + + @Test + public void testIterateAllPossibleParsings_emptyDataSource() + { + String segmentId = "_2015-01-02T00:00:00.000Z_2015-01-03T00:00:00.000Z_ver"; + Iterable possibleParsings = SegmentId.iterateAllPossibleParsings(segmentId); + Assert.assertTrue(Iterables.isEmpty(possibleParsings)); + } } diff --git a/server/src/main/java/org/apache/druid/segment/FrameBasedInlineSegmentWrangler.java b/server/src/main/java/org/apache/druid/segment/FrameBasedInlineSegmentWrangler.java index 407bbf750faa..dc2fc400c582 100644 --- a/server/src/main/java/org/apache/druid/segment/FrameBasedInlineSegmentWrangler.java +++ b/server/src/main/java/org/apache/druid/segment/FrameBasedInlineSegmentWrangler.java @@ -46,7 +46,7 @@ public Iterable getSegmentsForIntervals( frameSignaturePair -> new FrameSegment( frameSignaturePair.getFrame(), FrameReader.create(frameSignaturePair.getRowSignature()), - SegmentId.dummy(SEGMENT_ID) + SegmentId.simple(SegmentId.DataSourceType.INLINE) ) ) .iterator(); diff --git a/server/src/main/java/org/apache/druid/segment/InlineSegmentWrangler.java b/server/src/main/java/org/apache/druid/segment/InlineSegmentWrangler.java index 6128dcf49f26..044018fc8160 100644 --- a/server/src/main/java/org/apache/druid/segment/InlineSegmentWrangler.java +++ b/server/src/main/java/org/apache/druid/segment/InlineSegmentWrangler.java @@ -30,12 +30,12 @@ /** * A {@link SegmentWrangler} for {@link InlineDataSource}. - * + *

* It is not valid to pass any other DataSource type to the "getSegmentsForIntervals" method. */ public class InlineSegmentWrangler implements SegmentWrangler { - private static final String SEGMENT_ID = "inline"; + public static final SegmentId SEGMENT_ID = SegmentId.simple(SegmentId.DataSourceType.INLINE); @Override @SuppressWarnings("unchecked") @@ -46,7 +46,7 @@ public Iterable getSegmentsForIntervals(final DataSource dataSource, fi if (inlineDataSource.rowsAreArrayList()) { return Collections.singletonList( new ArrayListSegment<>( - SegmentId.dummy(SEGMENT_ID), + SEGMENT_ID, (ArrayList) inlineDataSource.getRowsAsList(), inlineDataSource.rowAdapter(), inlineDataSource.getRowSignature() @@ -56,7 +56,7 @@ public Iterable getSegmentsForIntervals(final DataSource dataSource, fi return Collections.singletonList( new RowBasedSegment<>( - SegmentId.dummy(SEGMENT_ID), + SEGMENT_ID, Sequences.simple(inlineDataSource.getRows()), inlineDataSource.rowAdapter(), inlineDataSource.getRowSignature() diff --git a/server/src/test/java/org/apache/druid/client/CachingClusteredClientTest.java b/server/src/test/java/org/apache/druid/client/CachingClusteredClientTest.java index d42b44b2db6c..bb28d65698dc 100644 --- a/server/src/test/java/org/apache/druid/client/CachingClusteredClientTest.java +++ b/server/src/test/java/org/apache/druid/client/CachingClusteredClientTest.java @@ -2768,7 +2768,7 @@ private class MyDataSegment extends DataSegment private MyDataSegment() { super( - "", + "myDataSource", Intervals.utc(0, 1), "", null, diff --git a/services/src/main/java/org/apache/druid/cli/DumpSegment.java b/services/src/main/java/org/apache/druid/cli/DumpSegment.java index a98b9aafdbf6..8b54ad19a9e4 100644 --- a/services/src/main/java/org/apache/druid/cli/DumpSegment.java +++ b/services/src/main/java/org/apache/druid/cli/DumpSegment.java @@ -746,7 +746,7 @@ static Sequence executeQuery(final Injector injector, final QueryableInde { final QueryRunnerFactoryConglomerate conglomerate = injector.getInstance(QueryRunnerFactoryConglomerate.class); final QueryRunnerFactory> factory = conglomerate.findFactory(query); - final QueryRunner runner = factory.createRunner(new QueryableIndexSegment(index, SegmentId.dummy("segment"))); + final QueryRunner runner = factory.createRunner(new QueryableIndexSegment(index, SegmentId.simpleTable("segment"))); return factory .getToolchest() .mergeResults(factory.mergeRunners(DirectQueryProcessingPool.INSTANCE, ImmutableList.of(runner)), true)