Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,6 @@
import io.druid.query.dimension.DimensionSpec;
import io.druid.query.groupby.GroupByQuery;
import io.druid.segment.QueryableIndex;
import io.druid.server.initialization.ServerConfig;
import io.druid.sql.calcite.planner.DruidPlanner;
import io.druid.sql.calcite.planner.PlannerConfig;
import io.druid.sql.calcite.planner.PlannerFactory;
Expand Down Expand Up @@ -116,11 +115,10 @@ public void setup() throws Exception

plannerFactory = new PlannerFactory(
CalciteTests.createMockSchema(walker, plannerConfig),
walker,
CalciteTests.createMockQueryLifecycleFactory(walker),
CalciteTests.createOperatorTable(),
CalciteTests.createExprMacroTable(),
plannerConfig,
new ServerConfig()
plannerConfig
);
groupByQuery = GroupByQuery
.builder()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@
import io.druid.java.util.common.granularity.Granularities;
import io.druid.java.util.common.guava.Sequences;
import io.druid.query.Druids;
import io.druid.query.QueryContexts;
import io.druid.query.aggregation.CountAggregatorFactory;
import io.druid.query.aggregation.DoubleSumAggregatorFactory;
import io.druid.query.aggregation.FilteredAggregatorFactory;
Expand All @@ -45,7 +44,6 @@
import io.druid.segment.column.ValueType;
import io.druid.segment.incremental.IncrementalIndexSchema;
import io.druid.segment.virtual.ExpressionVirtualColumn;
import io.druid.server.initialization.ServerConfig;
import io.druid.sql.calcite.filtration.Filtration;
import io.druid.sql.calcite.planner.Calcites;
import io.druid.sql.calcite.planner.DruidOperatorTable;
Expand Down Expand Up @@ -131,11 +129,10 @@ public void setUp() throws Exception
);
plannerFactory = new PlannerFactory(
druidSchema,
walker,
CalciteTests.createMockQueryLifecycleFactory(walker),
operatorTable,
CalciteTests.createExprMacroTable(),
plannerConfig,
new ServerConfig()
plannerConfig
);
}

Expand Down Expand Up @@ -223,11 +220,7 @@ public void testQuantileOnFloatAndLongs() throws Exception
new QuantilePostAggregator("a7", "a5:agg", 0.999f),
new QuantilePostAggregator("a8", "a8:agg", 0.50f)
))
.context(ImmutableMap.<String, Object>of(
"skipEmptyBuckets", true,
QueryContexts.DEFAULT_TIMEOUT_KEY, QueryContexts.DEFAULT_TIMEOUT_MILLIS,
QueryContexts.MAX_SCATTER_GATHER_BYTES_KEY, Long.MAX_VALUE
))
.context(ImmutableMap.<String, Object>of("skipEmptyBuckets", true))
.build(),
Iterables.getOnlyElement(queryLogHook.getRecordedQueries())
);
Expand Down Expand Up @@ -287,11 +280,7 @@ public void testQuantileOnComplexColumn() throws Exception
new QuantilePostAggregator("a5", "a5:agg", 0.999f),
new QuantilePostAggregator("a6", "a4:agg", 0.999f)
))
.context(ImmutableMap.<String, Object>of(
"skipEmptyBuckets", true,
QueryContexts.DEFAULT_TIMEOUT_KEY, QueryContexts.DEFAULT_TIMEOUT_MILLIS,
QueryContexts.MAX_SCATTER_GATHER_BYTES_KEY, Long.MAX_VALUE
))
.context(ImmutableMap.<String, Object>of("skipEmptyBuckets", true))
.build(),
Iterables.getOnlyElement(queryLogHook.getRecordedQueries())
);
Expand Down
9 changes: 9 additions & 0 deletions server/src/main/java/io/druid/client/DirectDruidClient.java
Original file line number Diff line number Diff line change
Expand Up @@ -128,6 +128,15 @@ public static <T, QueryType extends Query<T>> QueryType withDefaultTimeoutAndMax
);
}

/**
* Removes the magical fields added by {@link #makeResponseContextForQuery(Query, long)}.
*/
public static void removeMagicResponseContextFields(Map<String, Object> responseContext)
{
responseContext.remove(DirectDruidClient.QUERY_FAIL_TIME);
responseContext.remove(DirectDruidClient.QUERY_TOTAL_BYTES_GATHERED);
}

public static Map<String, Object> makeResponseContextForQuery(Query query, long startTimeMillis)
{
final Map<String, Object> responseContext = new MapMaker().makeMap();
Expand Down
22 changes: 3 additions & 19 deletions server/src/main/java/io/druid/server/BrokerQueryResource.java
Original file line number Diff line number Diff line change
Expand Up @@ -22,19 +22,13 @@
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.jaxrs.smile.SmileMediaTypes;
import com.google.inject.Inject;
import com.metamx.emitter.service.ServiceEmitter;
import com.sun.jersey.spi.container.ResourceFilters;
import io.druid.client.ServerViewUtil;
import io.druid.client.TimelineServerView;
import io.druid.guice.annotations.Json;
import io.druid.guice.annotations.Smile;
import io.druid.query.Query;
import io.druid.query.GenericQueryMetricsFactory;
import io.druid.query.QuerySegmentWalker;
import io.druid.query.QueryToolChestWarehouse;
import io.druid.server.http.security.StateResourceFilter;
import io.druid.server.initialization.ServerConfig;
import io.druid.server.log.RequestLogger;
import io.druid.server.security.AuthConfig;

import javax.servlet.http.HttpServletRequest;
Expand All @@ -59,30 +53,20 @@ public class BrokerQueryResource extends QueryResource

@Inject
public BrokerQueryResource(
QueryToolChestWarehouse warehouse,
ServerConfig config,
QueryLifecycleFactory queryLifecycleFactory,
@Json ObjectMapper jsonMapper,
@Smile ObjectMapper smileMapper,
QuerySegmentWalker texasRanger,
ServiceEmitter emitter,
RequestLogger requestLogger,
QueryManager queryManager,
AuthConfig authConfig,
GenericQueryMetricsFactory queryMetricsFactory,
TimelineServerView brokerServerView
)
{
super(
warehouse,
config,
queryLifecycleFactory,
jsonMapper,
smileMapper,
texasRanger,
emitter,
requestLogger,
queryManager,
authConfig,
queryMetricsFactory
authConfig
);
this.brokerServerView = brokerServerView;
}
Expand Down
Loading