contextOverrides)
+ {
+ return new ScanQuery(
+ getDataSource(),
+ getQuerySegmentSpec(),
+ resultFormat,
+ batchSize,
+ limit,
+ dimFilter,
+ columns,
+ computeOverridenContext(contextOverrides)
+ );
+ }
+
+ public ScanQuery withDimFilter(DimFilter dimFilter)
+ {
+ return new ScanQuery(
+ getDataSource(),
+ getQuerySegmentSpec(),
+ resultFormat,
+ batchSize,
+ limit,
+ dimFilter,
+ columns,
+ getContext()
+ );
+ }
+
+ @Override
+ public boolean equals(Object o)
+ {
+ if (this == o) {
+ return true;
+ }
+ if (o == null || getClass() != o.getClass()) {
+ return false;
+ }
+ if (!super.equals(o)) {
+ return false;
+ }
+
+ ScanQuery that = (ScanQuery) o;
+
+ if (batchSize != that.batchSize) {
+ return false;
+ }
+ if (limit != that.limit) {
+ return false;
+ }
+ if (resultFormat != null ? !resultFormat.equals(that.resultFormat) : that.resultFormat != null) {
+ return false;
+ }
+ if (dimFilter != null ? !dimFilter.equals(that.dimFilter) : that.dimFilter != null) {
+ return false;
+ }
+ return columns != null ? columns.equals(that.columns) : that.columns == null;
+ }
+
+ @Override
+ public int hashCode()
+ {
+ int result = super.hashCode();
+ result = 31 * result + (resultFormat != null ? resultFormat.hashCode() : 0);
+ result = 31 * result + batchSize;
+ result = 31 * result + limit;
+ result = 31 * result + (dimFilter != null ? dimFilter.hashCode() : 0);
+ result = 31 * result + (columns != null ? columns.hashCode() : 0);
+ return result;
+ }
+
+ @Override
+ public String toString()
+ {
+ return "ScanQuery{" +
+ "dataSource='" + getDataSource() + '\'' +
+ ", querySegmentSpec=" + getQuerySegmentSpec() +
+ ", descending=" + isDescending() +
+ ", resultFormat='" + resultFormat + '\'' +
+ ", batchSize=" + batchSize +
+ ", limit=" + limit +
+ ", dimFilter=" + dimFilter +
+ ", columns=" + columns +
+ '}';
+ }
+
+ /**
+ * A Builder for ScanQuery.
+ *
+ * Required: dataSource(), intervals() must be called before build()
+ *
+ * Usage example:
+ *
+ * ScanQuery query = new ScanQueryBuilder()
+ * .dataSource("Example")
+ * .interval("2010/2013")
+ * .build();
+ *
+ *
+ * @see io.druid.query.scan.ScanQuery
+ */
+ public static class ScanQueryBuilder
+ {
+ private DataSource dataSource;
+ private QuerySegmentSpec querySegmentSpec;
+ private Map context;
+ private String resultFormat;
+ private int batchSize;
+ private int limit;
+ private DimFilter dimFilter;
+ private List columns;
+
+ public ScanQueryBuilder()
+ {
+ dataSource = null;
+ querySegmentSpec = null;
+ context = null;
+ resultFormat = null;
+ batchSize = 0;
+ limit = 0;
+ dimFilter = null;
+ columns = Lists.newArrayList();
+ }
+
+ public ScanQuery build()
+ {
+ return new ScanQuery(
+ dataSource,
+ querySegmentSpec,
+ resultFormat,
+ batchSize,
+ limit,
+ dimFilter,
+ columns,
+ context
+ );
+ }
+
+ public ScanQueryBuilder copy(ScanQueryBuilder builder)
+ {
+ return new ScanQueryBuilder()
+ .dataSource(builder.dataSource)
+ .intervals(builder.querySegmentSpec)
+ .context(builder.context);
+ }
+
+ public ScanQueryBuilder dataSource(String ds)
+ {
+ dataSource = new TableDataSource(ds);
+ return this;
+ }
+
+ public ScanQueryBuilder dataSource(DataSource ds)
+ {
+ dataSource = ds;
+ return this;
+ }
+
+ public ScanQueryBuilder intervals(QuerySegmentSpec q)
+ {
+ querySegmentSpec = q;
+ return this;
+ }
+
+ public ScanQueryBuilder intervals(String s)
+ {
+ querySegmentSpec = new LegacySegmentSpec(s);
+ return this;
+ }
+
+ public ScanQueryBuilder intervals(List l)
+ {
+ querySegmentSpec = new LegacySegmentSpec(l);
+ return this;
+ }
+
+ public ScanQueryBuilder context(Map c)
+ {
+ context = c;
+ return this;
+ }
+
+ public ScanQueryBuilder resultFormat(String r)
+ {
+ resultFormat = r;
+ return this;
+ }
+
+ public ScanQueryBuilder batchSize(int b)
+ {
+ batchSize = b;
+ return this;
+ }
+
+ public ScanQueryBuilder limit(int l)
+ {
+ limit = l;
+ return this;
+ }
+
+ public ScanQueryBuilder filters(String dimensionName, String value)
+ {
+ dimFilter = new SelectorDimFilter(dimensionName, value, null);
+ return this;
+ }
+
+ public ScanQueryBuilder filters(String dimensionName, String value, String... values)
+ {
+ dimFilter = new InDimFilter(dimensionName, Lists.asList(value, values), null);
+ return this;
+ }
+
+ public ScanQueryBuilder filters(DimFilter f)
+ {
+ dimFilter = f;
+ return this;
+ }
+
+ public ScanQueryBuilder columns(List c)
+ {
+ columns = c;
+ return this;
+ }
+
+ public ScanQueryBuilder columns(String... c)
+ {
+ columns = Arrays.asList(c);
+ return this;
+ }
+ }
+
+ public static ScanQueryBuilder newScanQueryBuilder()
+ {
+ return new ScanQueryBuilder();
+ }
+}
diff --git a/extensions-contrib/scan-query/src/main/java/io/druid/query/scan/ScanQueryDruidModule.java b/extensions-contrib/scan-query/src/main/java/io/druid/query/scan/ScanQueryDruidModule.java
new file mode 100644
index 000000000000..f33ea997b268
--- /dev/null
+++ b/extensions-contrib/scan-query/src/main/java/io/druid/query/scan/ScanQueryDruidModule.java
@@ -0,0 +1,53 @@
+/*
+ * Licensed to Metamarkets Group Inc. (Metamarkets) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. Metamarkets licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package io.druid.query.scan;
+
+import com.fasterxml.jackson.databind.Module;
+import com.fasterxml.jackson.databind.jsontype.NamedType;
+import com.fasterxml.jackson.databind.module.SimpleModule;
+import com.google.inject.Binder;
+import io.druid.guice.DruidBinders;
+import io.druid.guice.LazySingleton;
+import io.druid.initialization.DruidModule;
+
+import java.util.Arrays;
+import java.util.List;
+
+public class ScanQueryDruidModule implements DruidModule {
+ public void configure(Binder binder) {
+ DruidBinders.queryToolChestBinder(binder)
+ .addBinding(ScanQuery.class)
+ .to(ScanQueryQueryToolChest.class)
+ .in(LazySingleton.class);
+
+ DruidBinders.queryRunnerFactoryBinder(binder)
+ .addBinding(ScanQuery.class)
+ .to(ScanQueryRunnerFactory.class)
+ .in(LazySingleton.class);
+ }
+
+ public List extends Module> getJacksonModules() {
+ return Arrays.asList(
+ new SimpleModule("ScanQueryDruidModule")
+ .registerSubtypes(
+ new NamedType(ScanQuery.class, ScanQuery.SCAN)
+ )
+ );
+ }
+}
diff --git a/extensions-contrib/scan-query/src/main/java/io/druid/query/scan/ScanQueryEngine.java b/extensions-contrib/scan-query/src/main/java/io/druid/query/scan/ScanQueryEngine.java
new file mode 100644
index 000000000000..5b04bb19a8d6
--- /dev/null
+++ b/extensions-contrib/scan-query/src/main/java/io/druid/query/scan/ScanQueryEngine.java
@@ -0,0 +1,244 @@
+/*
+ * Licensed to Metamarkets Group Inc. (Metamarkets) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. Metamarkets licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package io.druid.query.scan;
+
+import com.google.common.base.Function;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+import io.druid.granularity.QueryGranularities;
+import io.druid.java.util.common.ISE;
+import io.druid.java.util.common.guava.BaseSequence;
+import io.druid.java.util.common.guava.Sequence;
+import io.druid.java.util.common.guava.Sequences;
+import io.druid.query.ColumnSelectorPlus;
+import io.druid.query.QueryInterruptedException;
+import io.druid.query.dimension.DefaultDimensionSpec;
+import io.druid.query.dimension.DimensionSpec;
+import io.druid.query.filter.Filter;
+import io.druid.query.select.SelectQueryEngine;
+import io.druid.segment.Cursor;
+import io.druid.segment.DimensionHandlerUtils;
+import io.druid.segment.LongColumnSelector;
+import io.druid.segment.ObjectColumnSelector;
+import io.druid.segment.Segment;
+import io.druid.segment.StorageAdapter;
+import io.druid.segment.VirtualColumns;
+import io.druid.segment.column.Column;
+import io.druid.segment.filter.Filters;
+import org.joda.time.Interval;
+
+import java.util.Arrays;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.TimeoutException;
+
+public class ScanQueryEngine
+{
+ private static final SelectQueryEngine.SelectStrategyFactory STRATEGY_FACTORY = new SelectQueryEngine.SelectStrategyFactory();
+ public Sequence process(
+ final ScanQuery query,
+ final Segment segment,
+ final Map responseContext
+ )
+ {
+ if (responseContext.get(ScanQueryRunnerFactory.CTX_COUNT) != null) {
+ int count = (int) responseContext.get(ScanQueryRunnerFactory.CTX_COUNT);
+ if (count >= query.getLimit()) {
+ return Sequences.empty();
+ }
+ }
+ final Long timeoutAt = (long) responseContext.get(ScanQueryRunnerFactory.CTX_TIMEOUT_AT);
+ final long start = System.currentTimeMillis();
+ final StorageAdapter adapter = segment.asStorageAdapter();
+
+ if (adapter == null) {
+ throw new ISE(
+ "Null storage adapter found. Probably trying to issue a query against a segment being memory unmapped."
+ );
+ }
+
+ List allDims = Lists.newLinkedList(adapter.getAvailableDimensions());
+ List allMetrics = Lists.newLinkedList(adapter.getAvailableMetrics());
+ final List allColumns = Lists.newLinkedList();
+ if (query.getColumns() != null && !query.getColumns().isEmpty()) {
+ if (!query.getColumns().contains(ScanResultValue.timestampKey)) {
+ allColumns.add(ScanResultValue.timestampKey);
+ }
+ allColumns.addAll(query.getColumns());
+ allDims.retainAll(query.getColumns());
+ allMetrics.retainAll(query.getColumns());
+ }
+ else {
+ if (!allDims.contains(ScanResultValue.timestampKey)) {
+ allColumns.add(ScanResultValue.timestampKey);
+ }
+ allColumns.addAll(allDims);
+ allColumns.addAll(allMetrics);
+ }
+ final List dims = DefaultDimensionSpec.toSpec(allDims);
+ final List metrics = allMetrics;
+
+ final List intervals = query.getQuerySegmentSpec().getIntervals();
+ Preconditions.checkArgument(intervals.size() == 1, "Can only handle a single interval, got[%s]", intervals);
+
+ final String segmentId = segment.getIdentifier();
+
+ final Filter filter = Filters.convertToCNFFromQueryContext(query, Filters.toFilter(query.getDimensionsFilter()));
+
+ if (responseContext.get(ScanQueryRunnerFactory.CTX_COUNT) == null) {
+ responseContext.put(ScanQueryRunnerFactory.CTX_COUNT, 0);
+ }
+ final int limit = query.getLimit() - (int) responseContext.get(ScanQueryRunnerFactory.CTX_COUNT);
+ return Sequences.concat(
+ Sequences.map(
+ adapter.makeCursors(
+ filter,
+ intervals.get(0),
+ VirtualColumns.EMPTY,
+ QueryGranularities.ALL,
+ query.isDescending()
+ ),
+ new Function>()
+ {
+ @Override
+ public Sequence apply(final Cursor cursor)
+ {
+ return new BaseSequence<>(
+ new BaseSequence.IteratorMaker>()
+ {
+ @Override
+ public Iterator make()
+ {
+ final LongColumnSelector timestampColumnSelector = cursor.makeLongColumnSelector(Column.TIME_COLUMN_NAME);
+
+ final List> selectorPlusList = Arrays.asList(
+ DimensionHandlerUtils.createColumnSelectorPluses(
+ STRATEGY_FACTORY,
+ Lists.newArrayList(dims),
+ cursor
+ )
+ );
+
+ final Map metSelectors = Maps.newHashMap();
+ for (String metric : metrics) {
+ final ObjectColumnSelector metricSelector = cursor.makeObjectColumnSelector(metric);
+ metSelectors.put(metric, metricSelector);
+ }
+ final int batchSize = query.getBatchSize();
+ return new Iterator()
+ {
+ private int offset = 0;
+
+ @Override
+ public boolean hasNext()
+ {
+ return !cursor.isDone() && offset < limit;
+ }
+
+ @Override
+ public ScanResultValue next()
+ {
+ if (System.currentTimeMillis() >= timeoutAt) {
+ throw new QueryInterruptedException(new TimeoutException());
+ }
+ int lastOffset = offset;
+ Object events = null;
+ String resultFormat = query.getResultFormat();
+ if (ScanQuery.RESULT_FORMAT_VALUE_VECTOR.equals(resultFormat)) {
+ throw new UnsupportedOperationException("valueVector is not supported now");
+ } else if (ScanQuery.RESULT_FORMAT_COMPACTED_LIST.equals(resultFormat)) {
+ events = rowsToCompactedList();
+ } else {
+ events = rowsToList();
+ }
+ responseContext.put(
+ ScanQueryRunnerFactory.CTX_COUNT,
+ (int) responseContext.get(ScanQueryRunnerFactory.CTX_COUNT) + (offset - lastOffset)
+ );
+ responseContext.put(
+ ScanQueryRunnerFactory.CTX_TIMEOUT_AT,
+ timeoutAt - (System.currentTimeMillis() - start)
+ );
+ return new ScanResultValue(segmentId, allColumns, events);
+ }
+
+ @Override
+ public void remove()
+ {
+ throw new UnsupportedOperationException();
+ }
+
+ private Object rowsToCompactedList()
+ {
+ return Lists.transform(
+ (List