Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 10 additions & 3 deletions core/src/main/java/org/apache/druid/math/expr/Function.java
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
import org.joda.time.DateTimeZone;
import org.joda.time.format.DateTimeFormat;

import javax.annotation.Nullable;
import java.math.BigDecimal;
import java.math.RoundingMode;
import java.util.ArrayList;
Expand Down Expand Up @@ -224,7 +225,7 @@ protected final ExprEval eval(ExprEval x, ExprEval y)
return eval(x.asString(), y.asInt());
}

protected abstract ExprEval eval(String x, int y);
protected abstract ExprEval eval(@Nullable String x, int y);
}

/**
Expand Down Expand Up @@ -1455,14 +1456,17 @@ public String name()
}

@Override
protected ExprEval eval(String x, int y)
protected ExprEval eval(@Nullable String x, int y)
{
if (y < 0) {
throw new IAE(
"Function[%s] needs a postive integer as second argument",
name()
);
}
if (x == null) {
return ExprEval.of(null);
}
int len = x.length();
return ExprEval.of(y < len ? x.substring(len - y) : x);
}
Expand All @@ -1477,14 +1481,17 @@ public String name()
}

@Override
protected ExprEval eval(String x, int y)
protected ExprEval eval(@Nullable String x, int y)
{
if (y < 0) {
throw new IAE(
"Function[%s] needs a postive integer as second argument",
name()
);
}
if (x == null) {
return ExprEval.of(null);
}
return ExprEval.of(y < x.length() ? x.substring(0, y) : x);
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -144,15 +144,20 @@ public void testBloomFilterExprFilter() throws Exception

// fool the planner to make an expression virtual column to test bloom filter Druid expression
testQuery(
StringUtils.format("SELECT COUNT(*) FROM druid.foo WHERE bloom_filter_test(concat(dim2, '-foo'), '%s') = TRUE", base64),
StringUtils.format("SELECT COUNT(*) FROM druid.foo WHERE nullif(bloom_filter_test(concat(dim2, '-foo'), '%s'), 1) is null", base64),
ImmutableList.of(
Druids.newTimeseriesQueryBuilder()
.dataSource(CalciteTests.DATASOURCE1)
.intervals(querySegmentSpec(Filtration.eternity()))
.granularity(Granularities.ALL)
.filters(
new ExpressionDimFilter(
StringUtils.format("(bloom_filter_test(concat(\"dim2\",'-foo'),'%s') == 1)", base64),
StringUtils.format(
"case_searched(bloom_filter_test(concat(\"dim2\",'-foo'),'%s'),1,isnull(bloom_filter_test(concat(\"dim2\",'-foo'),'%s')))",
base64,
base64
),
null,
createExprMacroTable()
)
)
Expand Down
10 changes: 5 additions & 5 deletions licenses.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -169,7 +169,7 @@ name: Esri Geometry API for Java
license_category: binary
module: java-core
license_name: Apache License version 2.0
version: 2.0.0
version: 2.2.0
libraries:
- com.esri.geometry: esri-geometry-api

Expand Down Expand Up @@ -1147,17 +1147,17 @@ name: Apache Calcite
license_category: binary
module: java-core
license_name: Apache License version 2.0
version: 1.17.0
version: 1.21.0
libraries:
- org.apache.calcite: calcite-core
- org.apache.calcite: calcite-linq4j
notices:
- calcite-core: |
Calcite Core
Copyright 2012-2018 The Apache Software Foundation
Copyright 2012-2019 The Apache Software Foundation
- calcite-linq4j: |
Calcite Linq4j
Copyright 2012-2018 The Apache Software Foundation
Copyright 2012-2019 The Apache Software Foundation

---

Expand Down Expand Up @@ -3349,7 +3349,7 @@ name: Janino and Commons Compiler
license_category: binary
module: java-core
license_name: BSD-3-Clause License
version: 2.7.6
version: 3.0.11
copyright: Arno Unkrig and TIBCO Software Inc.
license_file_path: licenses/bin/janino.BSD3
libraries:
Expand Down
2 changes: 1 addition & 1 deletion pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@
<apache.curator.test.version>2.12.0</apache.curator.test.version>
<avatica.version>1.12.0</avatica.version>
<avro.version>1.9.1</avro.version>
<calcite.version>1.17.0</calcite.version>
<calcite.version>1.21.0</calcite.version>
<derby.version>10.14.2.0</derby.version>
<dropwizard.metrics.version>4.0.0</dropwizard.metrics.version>
<guava.version>16.0.1</guava.version>
Expand Down
17 changes: 17 additions & 0 deletions sql/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,17 @@
<groupId>com.yahoo.datasketches</groupId>
<artifactId>sketches-core</artifactId>
</exclusion>
<!--
~ We don't use Calcite's YAML features.
-->
<exclusion>
<groupId>com.fasterxml.jackson.dataformat</groupId>
<artifactId>jackson-dataformat-yaml</artifactId>
</exclusion>
<exclusion>
<groupId>org.yaml</groupId>
<artifactId>snakeyaml</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
Expand Down Expand Up @@ -155,6 +166,12 @@
<groupId>org.apache.curator</groupId>
<artifactId>curator-x-discovery</artifactId>
</dependency>
<dependency>
<groupId>org.checkerframework</groupId>
<artifactId>checker-qual</artifactId>
<version>${checkerframework.version}</version>
<scope>provided</scope>
</dependency>

<!-- Tests -->
<dependency>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,8 @@ public class DruidConvertletTable implements SqlRexConvertletTable
.add(SqlStdOperatorTable.TIMESTAMP_DIFF)
.add(SqlStdOperatorTable.UNION)
.add(SqlStdOperatorTable.UNION_ALL)
.add(SqlStdOperatorTable.NULLIF)
.add(SqlStdOperatorTable.COALESCE)
.add(OracleSqlOperatorTable.NVL)
.build();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
import org.apache.calcite.sql.SqlOperatorTable;
import org.apache.calcite.sql.SqlSyntax;
import org.apache.calcite.sql.fun.SqlStdOperatorTable;
import org.apache.calcite.sql.validate.SqlNameMatcher;
import org.apache.druid.java.util.common.ISE;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.sql.calcite.aggregation.SqlAggregator;
Expand Down Expand Up @@ -358,7 +359,8 @@ public void lookupOperatorOverloads(
final SqlIdentifier opName,
final SqlFunctionCategory category,
final SqlSyntax syntax,
final List<SqlOperator> operatorList
final List<SqlOperator> operatorList,
final SqlNameMatcher nameMatcher
)
{
if (opName == null) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,13 +35,15 @@
import org.apache.calcite.plan.RelOptUtil;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.rel.RelRoot;
import org.apache.calcite.rel.logical.LogicalSort;
import org.apache.calcite.rel.type.RelDataTypeFactory;
import org.apache.calcite.rex.RexBuilder;
import org.apache.calcite.rex.RexNode;
import org.apache.calcite.sql.SqlExplain;
import org.apache.calcite.sql.SqlKind;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.parser.SqlParseException;
import org.apache.calcite.sql.type.BasicSqlType;
import org.apache.calcite.sql.type.SqlTypeName;
import org.apache.calcite.tools.Planner;
import org.apache.calcite.tools.RelConversionException;
Expand All @@ -50,9 +52,11 @@
import org.apache.druid.java.util.common.guava.BaseSequence;
import org.apache.druid.java.util.common.guava.Sequence;
import org.apache.druid.java.util.common.guava.Sequences;
import org.apache.druid.segment.DimensionHandlerUtils;
import org.apache.druid.sql.calcite.rel.DruidConvention;
import org.apache.druid.sql.calcite.rel.DruidRel;

import javax.annotation.Nullable;
import java.io.Closeable;
import java.util.ArrayList;
import java.util.Iterator;
Expand All @@ -63,6 +67,7 @@ public class DruidPlanner implements Closeable
{
private final Planner planner;
private final PlannerContext plannerContext;
private RexBuilder rexBuilder;

public DruidPlanner(
final Planner planner,
Expand All @@ -82,6 +87,9 @@ public PlannerResult plan(final String sql)
explain = (SqlExplain) parsed;
parsed = explain.getExplicandum();
}
// the planner's type factory is not available until after parsing
this.rexBuilder = new RexBuilder(planner.getTypeFactory());

final SqlNode validated = planner.validate(parsed);
final RelRoot root = planner.rel(validated);

Expand Down Expand Up @@ -116,12 +124,14 @@ private PlannerResult planWithDruidConvention(
final RelRoot root
) throws RelConversionException
{
final RelNode possiblyWrappedRootRel = possiblyWrapRootWithOuterLimitFromContext(root);

final DruidRel<?> druidRel = (DruidRel<?>) planner.transform(
Rules.DRUID_CONVENTION_RULES,
planner.getEmptyTraitSet()
.replace(DruidConvention.instance())
.plus(root.collation),
root.rel
possiblyWrappedRootRel
);

final Set<String> dataSourceNames = ImmutableSet.copyOf(druidRel.getDataSourceNames());
Expand Down Expand Up @@ -232,6 +242,42 @@ public void cleanup(EnumeratorIterator iterFromMake)
}
}

/**
* This method wraps the root with a logical sort that applies a limit (no ordering change).
* The CTX_SQL_OUTER_LIMIT flag that controls this wrapping is meant for internal use only by the
* web console, allowing it to apply a limit to queries without rewriting the original SQL.
*
* @param root root node
* @return root node wrapped with a limiting logical sort if a limit is specified in the query context.
*/
@Nullable
private RelNode possiblyWrapRootWithOuterLimitFromContext(
RelRoot root
)
{
Object outerLimitObj = plannerContext.getQueryContext().get(PlannerContext.CTX_SQL_OUTER_LIMIT);
Long outerLimit = DimensionHandlerUtils.convertObjectToLong(outerLimitObj, true);
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Minor: the two calls could be collapsed, since DimensionHandlerUtils.convertObjectToLong handles nulls just fine.

Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Collapsed the two calls

if (outerLimit == null) {
return root.rel;
}

return LogicalSort.create(
root.rel,
root.collation,
makeBigIntLiteral(0),
makeBigIntLiteral(outerLimit)
);
}

private RexNode makeBigIntLiteral(long value)
{
return rexBuilder.makeLiteral(
value,
new BasicSqlType(DruidTypeSystem.INSTANCE, SqlTypeName.BIGINT),
false
);
}

private static class EnumeratorIterator<T> implements Iterator<T>
{
private final Iterator<T> it;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,10 @@ public class PlannerContext
public static final String CTX_SQL_CURRENT_TIMESTAMP = "sqlCurrentTimestamp";
public static final String CTX_SQL_TIME_ZONE = "sqlTimeZone";

// This context parameter is an undocumented parameter, used internally, to allow the web console to
// apply a limit without having to rewrite the SQL query.
public static final String CTX_SQL_OUTER_LIMIT = "sqlOuterLimit";
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

should this be documented somewhere or just leave it hidden since I guess is mostly to be friendly to web console?

Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think leaving it undocumented makes sense, since it's meant to be internal. End users should add a LIMIT to their queries. If we discover use cases where it makes sense to expose it then we could do it then.

Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think it would be better to document internal parameters somewhere rather than depending on human memory. But I think we can do in a follow-up pr.

Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think this should be in the docs, it is a super useful parameter IMO even outside the web console


// DataContext keys
public static final String DATA_CTX_AUTHENTICATION_RESULT = "authenticationResult";

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -138,6 +138,12 @@ public <C> C unwrap(final Class<C> aClass)
final Properties props = new Properties();
return (C) new CalciteConnectionConfigImpl(props)
{
@Override
public <T> T typeSystem(Class<T> typeSystemClass, T defaultTypeSystem)
{
return (T) DruidTypeSystem.INSTANCE;
}

@Override
public SqlConformance conformance()
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,8 @@ public void onMatch(final RelOptRuleCall call)
final Sort first = call.rel(1);
final Sort second = call.rel(0);

if (second.collation.getFieldCollations().isEmpty()) {
if (second.collation.getFieldCollations().isEmpty()
|| second.collation.getFieldCollations().equals(first.collation.getFieldCollations())) {
// Add up the offsets.
final int firstOffset = (first.offset != null ? RexLiteral.intValue(first.offset) : 0);
final int secondOffset = (second.offset != null ? RexLiteral.intValue(second.offset) : 0);
Expand Down Expand Up @@ -81,7 +82,7 @@ public void onMatch(final RelOptRuleCall call)
first.getInput(),
first.getCollation(),
offset == 0 ? null : call.builder().literal(offset),
call.builder().literal(fetch)
fetch < 0 ? null : call.builder().literal(fetch)
);

call.transformTo(combined);
Expand Down
Loading