Skip to content
Original file line number Diff line number Diff line change
Expand Up @@ -125,60 +125,24 @@ public List<String> getColumns()
@Override
public Query<ScanResultValue> withQuerySegmentSpec(QuerySegmentSpec querySegmentSpec)
{
return new ScanQuery(
getDataSource(),
querySegmentSpec,
resultFormat,
batchSize,
limit,
dimFilter,
columns,
getContext()
);
return ScanQueryBuilder.copy(this).intervals(querySegmentSpec).build();
}

@Override
public Query<ScanResultValue> withDataSource(DataSource dataSource)
{
return new ScanQuery(
dataSource,
getQuerySegmentSpec(),
resultFormat,
batchSize,
limit,
dimFilter,
columns,
getContext()
);
return ScanQueryBuilder.copy(this).dataSource(dataSource).build();
}

@Override
public Query<ScanResultValue> withOverriddenContext(Map<String, Object> contextOverrides)
{
return new ScanQuery(
getDataSource(),
getQuerySegmentSpec(),
resultFormat,
batchSize,
limit,
dimFilter,
columns,
computeOverridenContext(contextOverrides)
);
return ScanQueryBuilder.copy(this).context(computeOverriddenContext(getContext(), contextOverrides)).build();
}

public ScanQuery withDimFilter(DimFilter dimFilter)
{
return new ScanQuery(
getDataSource(),
getQuerySegmentSpec(),
resultFormat,
batchSize,
limit,
dimFilter,
columns,
getContext()
);
return ScanQueryBuilder.copy(this).filters(dimFilter).build();
}

@Override
Expand Down Expand Up @@ -290,12 +254,17 @@ public ScanQuery build()
);
}

public ScanQueryBuilder copy(ScanQueryBuilder builder)
public static ScanQueryBuilder copy(ScanQuery query)
{
return new ScanQueryBuilder()
.dataSource(builder.dataSource)
.intervals(builder.querySegmentSpec)
.context(builder.context);
.dataSource(query.getDataSource())
.intervals(query.getQuerySegmentSpec())
.resultFormat(query.getResultFormat())
.batchSize(query.getBatchSize())
.limit(query.getLimit())
.filters(query.getFilter())
.columns(query.getColumns())
.context(query.getContext());
}

public ScanQueryBuilder dataSource(String ds)
Expand Down
8 changes: 5 additions & 3 deletions processing/src/main/java/io/druid/query/BaseQuery.java
Original file line number Diff line number Diff line change
Expand Up @@ -145,10 +145,12 @@ public boolean getContextBoolean(String key, boolean defaultValue)
return QueryContexts.parseBoolean(this, key, defaultValue);
}

protected Map<String, Object> computeOverridenContext(Map<String, Object> overrides)
protected static Map<String, Object> computeOverriddenContext(
Copy link
Copy Markdown
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This PR breaks compatibility of BaseQuery by refactoring this protected instance method into a static one. Is it a compatibility bug?

Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'm not sure if BaseQuery is one of the "supposed to be stable" interfaces or if it's just Query / QueryRunner / QueryToolChest. Good question. We should probably have an annotation or something to make it clear.

I guess since all built-in queries extend BaseQuery, it's likely that extensions would too, so it would be kinder to them to keep compatibility there.

Copy link
Copy Markdown
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@drcrallen asked me to keep compatibility of BaseQuery in earlier review of this PR: #4131 (comment)

I found this incompatibility because our extension broke :)

For annotation, there is an issue: #4044

Ok, I'll make a PR that fixes incompatibility

Copy link
Copy Markdown
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

PR: #4241

Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Sounds good.

final Map<String, Object> context,
final Map<String, Object> overrides
)
{
Map<String, Object> overridden = Maps.newTreeMap();
final Map<String, Object> context = getContext();
if (context != null) {
overridden.putAll(context);
}
Expand All @@ -173,7 +175,7 @@ public String getId()
@Override
public Query withId(String id)
{
return withOverriddenContext(ImmutableMap.<String, Object>of(QUERYID, id));
return withOverriddenContext(ImmutableMap.of(QUERYID, id));
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -58,9 +58,7 @@ private CPUTimeMetricQueryRunner(


@Override
public Sequence<T> run(
final Query<T> query, final Map<String, Object> responseContext
)
public Sequence<T> run(final Query<T> query, final Map<String, Object> responseContext)
{
final Sequence<T> baseSequence = delegate.run(query, responseContext);
return Sequences.wrap(
Expand Down Expand Up @@ -91,7 +89,6 @@ public void after(boolean isDone, Throwable thrown) throws Exception
}
);
}

public static <T> QueryRunner<T> safeBuild(
QueryRunner<T> delegate,
QueryToolChest<?, ? super Query<T>> queryToolChest,
Expand Down
104 changes: 46 additions & 58 deletions processing/src/main/java/io/druid/query/Druids.java
Original file line number Diff line number Diff line change
Expand Up @@ -367,32 +367,20 @@ public TimeseriesQuery build()
);
}

public TimeseriesQueryBuilder copy(TimeseriesQuery query)
public static TimeseriesQueryBuilder copy(TimeseriesQuery query)
{
return new TimeseriesQueryBuilder()
.dataSource(query.getDataSource())
.intervals(query.getIntervals())
.filters(query.getDimensionsFilter())
.intervals(query.getQuerySegmentSpec())
.descending(query.isDescending())
.virtualColumns(query.getVirtualColumns())
.filters(query.getDimensionsFilter())
.granularity(query.getGranularity())
.aggregators(query.getAggregatorSpecs())
.postAggregators(query.getPostAggregatorSpecs())
.context(query.getContext());
}

public TimeseriesQueryBuilder copy(TimeseriesQueryBuilder builder)
{
return new TimeseriesQueryBuilder()
.dataSource(builder.dataSource)
.intervals(builder.querySegmentSpec)
.filters(builder.dimFilter)
.descending(builder.descending)
.granularity(builder.granularity)
.aggregators(builder.aggregatorSpecs)
.postAggregators(builder.postAggregatorSpecs)
.context(builder.context);
}

public DataSource getDataSource()
{
return dataSource;
Expand Down Expand Up @@ -579,6 +567,7 @@ public SearchQueryBuilder()
querySegmentSpec = null;
dimensions = null;
querySpec = null;
sortSpec = null;
context = null;
}

Expand All @@ -597,32 +586,20 @@ public SearchQuery build()
);
}

public SearchQueryBuilder copy(SearchQuery query)
public static SearchQueryBuilder copy(SearchQuery query)
{
return new SearchQueryBuilder()
.dataSource(query.getDataSource())
.intervals(query.getQuerySegmentSpec())
.filters(query.getDimensionsFilter())
.granularity(query.getGranularity())
.limit(query.getLimit())
.intervals(query.getQuerySegmentSpec())
.dimensions(query.getDimensions())
.query(query.getQuery())
.sortSpec(query.getSort())
.context(query.getContext());
}

public SearchQueryBuilder copy(SearchQueryBuilder builder)
{
return new SearchQueryBuilder()
.dataSource(builder.dataSource)
.intervals(builder.querySegmentSpec)
.filters(builder.dimFilter)
.granularity(builder.granularity)
.limit(builder.limit)
.dimensions(builder.dimensions)
.query(builder.querySpec)
.context(builder.context);
}

public SearchQueryBuilder dataSource(String d)
{
dataSource = new TableDataSource(d);
Expand Down Expand Up @@ -819,14 +796,14 @@ public TimeBoundaryQuery build()
);
}

public TimeBoundaryQueryBuilder copy(TimeBoundaryQueryBuilder builder)
public static TimeBoundaryQueryBuilder copy(TimeBoundaryQuery query)
{
return new TimeBoundaryQueryBuilder()
.dataSource(builder.dataSource)
.intervals(builder.querySegmentSpec)
.bound(builder.bound)
.filters(builder.dimFilter)
.context(builder.context);
.dataSource(query.getDataSource())
.intervals(query.getQuerySegmentSpec())
.bound(query.getBound())
.filters(query.getFilter())
.context(query.getContext());
}

public TimeBoundaryQueryBuilder dataSource(String ds)
Expand Down Expand Up @@ -993,8 +970,8 @@ public SegmentMetadataQueryBuilder()
toInclude = null;
analysisTypes = null;
merge = null;
context = null;
lenientAggregatorMerge = null;
context = null;
}

public SegmentMetadataQuery build()
Expand All @@ -1011,20 +988,16 @@ public SegmentMetadataQuery build()
);
}

public SegmentMetadataQueryBuilder copy(SegmentMetadataQueryBuilder builder)
public static SegmentMetadataQueryBuilder copy(SegmentMetadataQuery query)
{
final SegmentMetadataQuery.AnalysisType[] analysisTypesArray =
analysisTypes != null
? analysisTypes.toArray(new SegmentMetadataQuery.AnalysisType[analysisTypes.size()])
: null;
return new SegmentMetadataQueryBuilder()
.dataSource(builder.dataSource)
.intervals(builder.querySegmentSpec)
.toInclude(toInclude)
.analysisTypes(analysisTypesArray)
.merge(merge)
.lenientAggregatorMerge(lenientAggregatorMerge)
.context(builder.context);
.dataSource(query.getDataSource())
.intervals(query.getQuerySegmentSpec())
.toInclude(query.getToInclude())
.analysisTypes(query.getAnalysisTypes())
.merge(query.isMerge())
.lenientAggregatorMerge(query.isLenientAggregatorMerge())
.context(query.getContext());
}

public SegmentMetadataQueryBuilder dataSource(String ds)
Expand Down Expand Up @@ -1075,6 +1048,12 @@ public SegmentMetadataQueryBuilder analysisTypes(SegmentMetadataQuery.AnalysisTy
return this;
}

public SegmentMetadataQueryBuilder analysisTypes(EnumSet<SegmentMetadataQuery.AnalysisType> analysisTypes)
{
this.analysisTypes = analysisTypes;
return this;
}

public SegmentMetadataQueryBuilder merge(boolean merge)
{
this.merge = merge;
Expand Down Expand Up @@ -1131,11 +1110,13 @@ public SelectQueryBuilder()
{
dataSource = null;
querySegmentSpec = null;
descending = false;
context = null;
dimFilter = null;
granularity = Granularities.ALL;
dimensions = Lists.newArrayList();
metrics = Lists.newArrayList();
virtualColumns = null;
pagingSpec = null;
}

Expand All @@ -1155,12 +1136,19 @@ public SelectQuery build()
);
}

public SelectQueryBuilder copy(SelectQueryBuilder builder)
public static SelectQueryBuilder copy(SelectQuery query)
{
return new SelectQueryBuilder()
.dataSource(builder.dataSource)
.intervals(builder.querySegmentSpec)
.context(builder.context);
.dataSource(query.getDataSource())
.intervals(query.getQuerySegmentSpec())
.descending(query.isDescending())
.filters(query.getFilter())
.granularity(query.getGranularity())
.dimensionSpecs(query.getDimensions())
.metrics(query.getMetrics())
.virtualColumns(query.getVirtualColumns())
.pagingSpec(query.getPagingSpec())
.context(query.getContext());
}

public SelectQueryBuilder dataSource(String ds)
Expand Down Expand Up @@ -1317,12 +1305,12 @@ public DataSourceMetadataQuery build()
);
}

public DataSourceMetadataQueryBuilder copy(DataSourceMetadataQueryBuilder builder)
public static DataSourceMetadataQueryBuilder copy(DataSourceMetadataQuery query)
{
return new DataSourceMetadataQueryBuilder()
.dataSource(builder.dataSource)
.intervals(builder.querySegmentSpec)
.context(builder.context);
.dataSource(query.getDataSource())
.intervals(query.getQuerySegmentSpec())
.context(query.getContext());
}

public DataSourceMetadataQueryBuilder dataSource(String ds)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@

package io.druid.query;

import com.google.common.base.Supplier;
import com.metamx.emitter.service.ServiceEmitter;
import io.druid.java.util.common.guava.LazySequence;
import io.druid.java.util.common.guava.Sequence;
Expand Down Expand Up @@ -91,15 +90,8 @@ public Sequence<T> run(final Query<T> query, final Map<String, Object> responseC
return Sequences.wrap(
// Use LazySequence because want to account execution time of queryRunner.run() (it prepares the underlying
// Sequence) as part of the reported query time, i. e. we want to execute queryRunner.run() after
// `startTime = System.currentTimeMillis();` (see below).
new LazySequence<>(new Supplier<Sequence<T>>()
{
@Override
public Sequence<T> get()
{
return queryRunner.run(query, responseContext);
}
}),
// `startTime = System.nanoTime();` (see below).
new LazySequence<>(() -> queryRunner.run(query, responseContext)),
new SequenceWrapper()
{
private long startTimeNs;
Expand Down
Loading