From 11b2b50a361c0c4629991160873657c821e54adf Mon Sep 17 00:00:00 2001 From: Gian Merlino Date: Wed, 6 Jul 2022 19:37:03 -0700 Subject: [PATCH 1/3] Preserve column order in DruidSchema, SegmentMetadataQuery. Instead of putting columns in alphabetical order. This is helpful because it makes query order better match ingestion order. It also allows tools, like the reindexing flow in the web console, to more easily do follow-on ingestions using a column order that matches the pre-existing column order. We prefer the order from the latest segments. The logic takes all columns from the latest segments in the order they appear, then adds on columns from older segments after those. --- ...ruidSchemaInternRowSignatureBenchmark.java | 3 +- .../druid/query/metadata/SegmentAnalyzer.java | 4 +- .../SegmentMetadataQueryQueryToolChest.java | 33 +- .../SegmentMetadataQueryRunnerFactory.java | 3 +- .../metadata/metadata/SegmentAnalysis.java | 12 +- .../org/apache/druid/segment/IndexIO.java | 5 +- .../segment/QueryableIndexStorageAdapter.java | 12 +- .../apache/druid/query/DoubleStorageTest.java | 152 +++--- .../query/metadata/SegmentAnalysisTest.java | 89 ++++ .../query/metadata/SegmentAnalyzerTest.java | 42 +- ...egmentMetadataQueryQueryToolChestTest.java | 58 ++- .../metadata/SegmentMetadataQueryTest.java | 468 +++++++++--------- .../SegmentMetadataUnionQueryTest.java | 29 +- .../druid/sql/calcite/schema/DruidSchema.java | 5 +- .../sql/calcite/CalciteExplainQueryTest.java | 12 +- .../sql/calcite/CalciteIngestionDmlTest.java | 2 +- .../druid/sql/calcite/CalciteQueryTest.java | 4 +- .../sql/calcite/CalciteSelectQueryTest.java | 50 +- .../sql/calcite/schema/DruidSchemaTest.java | 121 ++--- 19 files changed, 652 insertions(+), 452 deletions(-) create mode 100644 processing/src/test/java/org/apache/druid/query/metadata/SegmentAnalysisTest.java diff --git a/benchmarks/src/test/java/org/apache/druid/benchmark/DruidSchemaInternRowSignatureBenchmark.java b/benchmarks/src/test/java/org/apache/druid/benchmark/DruidSchemaInternRowSignatureBenchmark.java index e05d1549a429..be39b252cff7 100644 --- a/benchmarks/src/test/java/org/apache/druid/benchmark/DruidSchemaInternRowSignatureBenchmark.java +++ b/benchmarks/src/test/java/org/apache/druid/benchmark/DruidSchemaInternRowSignatureBenchmark.java @@ -58,6 +58,7 @@ import java.io.IOException; import java.util.HashMap; +import java.util.LinkedHashMap; import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; @@ -113,7 +114,7 @@ public void addSegment(final DruidServerMetadata server, final DataSegment segme protected Sequence runSegmentMetadataQuery(Iterable segments) { final int numColumns = 1000; - Map columnToAnalysisMap = new HashMap<>(); + LinkedHashMap columnToAnalysisMap = new LinkedHashMap<>(); for (int i = 0; i < numColumns; ++i) { columnToAnalysisMap.put( "col" + i, diff --git a/processing/src/main/java/org/apache/druid/query/metadata/SegmentAnalyzer.java b/processing/src/main/java/org/apache/druid/query/metadata/SegmentAnalyzer.java index ca26ce695039..f22b2f68dc90 100644 --- a/processing/src/main/java/org/apache/druid/query/metadata/SegmentAnalyzer.java +++ b/processing/src/main/java/org/apache/druid/query/metadata/SegmentAnalyzer.java @@ -58,6 +58,7 @@ import javax.annotation.Nullable; import java.io.IOException; import java.util.EnumSet; +import java.util.LinkedHashMap; import java.util.Map; import java.util.TreeMap; @@ -98,7 +99,8 @@ public Map analyze(Segment segment) // get length and column names from storageAdapter final int length = storageAdapter.getNumRows(); - Map columns = new TreeMap<>(); + // Use LinkedHashMap to preserve column order. + final Map columns = new LinkedHashMap<>(); final RowSignature rowSignature = storageAdapter.getRowSignature(); for (String columnName : rowSignature.getColumnNames()) { diff --git a/processing/src/main/java/org/apache/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java b/processing/src/main/java/org/apache/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java index 1bb24ef2e716..b2864b45f2d0 100644 --- a/processing/src/main/java/org/apache/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java +++ b/processing/src/main/java/org/apache/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java @@ -54,13 +54,16 @@ import org.apache.druid.query.metadata.metadata.SegmentAnalysis; import org.apache.druid.query.metadata.metadata.SegmentMetadataQuery; import org.apache.druid.timeline.LogicalSegment; +import org.apache.druid.timeline.SegmentId; import org.joda.time.DateTime; import org.joda.time.Interval; +import javax.annotation.Nullable; import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; import java.util.HashMap; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; @@ -108,7 +111,8 @@ public Sequence doRun( ResponseContext context ) { - SegmentMetadataQuery updatedQuery = ((SegmentMetadataQuery) queryPlus.getQuery()).withFinalizedAnalysisTypes(config); + SegmentMetadataQuery updatedQuery = ((SegmentMetadataQuery) queryPlus.getQuery()).withFinalizedAnalysisTypes( + config); QueryPlus updatedQueryPlus = queryPlus.withQuery(updatedQuery); return new MappedSequence<>( CombiningSequence.create( @@ -135,7 +139,12 @@ private BinaryOperator createMergeFn(final SegmentMetadataQuery @Override public BinaryOperator createMergeFn(Query query) { - return (arg1, arg2) -> mergeAnalyses(arg1, arg2, ((SegmentMetadataQuery) query).isLenientAggregatorMerge()); + return (arg1, arg2) -> mergeAnalyses( + Iterables.getFirst(query.getDataSource().getTableNames(), null), + arg1, + arg2, + ((SegmentMetadataQuery) query).isLenientAggregatorMerge() + ); } @Override @@ -246,8 +255,9 @@ public List filterSegments(SegmentMetadataQuery qu @VisibleForTesting public static SegmentAnalysis mergeAnalyses( - final SegmentAnalysis arg1, - final SegmentAnalysis arg2, + @Nullable String dataSource, + SegmentAnalysis arg1, + SegmentAnalysis arg2, boolean lenientAggregatorMerge ) { @@ -259,6 +269,19 @@ public static SegmentAnalysis mergeAnalyses( return arg1; } + // Swap arg1, arg2 so the later-ending interval is first. This ensures we prefer the latest column order. + // We're preserving it so callers can see columns in their natural order. + if (dataSource != null) { + final SegmentId id1 = SegmentId.tryParse(dataSource, arg1.getId()); + final SegmentId id2 = SegmentId.tryParse(dataSource, arg2.getId()); + + if (id1 != null && id2 != null && id2.getIntervalEnd().isAfter(id1.getIntervalEnd())) { + final SegmentAnalysis tmp = arg1; + arg1 = arg2; + arg2 = tmp; + } + } + List newIntervals = null; if (arg1.getIntervals() != null) { newIntervals = new ArrayList<>(arg1.getIntervals()); @@ -272,7 +295,7 @@ public static SegmentAnalysis mergeAnalyses( final Map leftColumns = arg1.getColumns(); final Map rightColumns = arg2.getColumns(); - Map columns = new TreeMap<>(); + final LinkedHashMap columns = new LinkedHashMap<>(); Set rightColumnNames = Sets.newHashSet(rightColumns.keySet()); for (Map.Entry entry : leftColumns.entrySet()) { diff --git a/processing/src/main/java/org/apache/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java b/processing/src/main/java/org/apache/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java index c07ab5d6b7a0..6a586342823c 100644 --- a/processing/src/main/java/org/apache/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java +++ b/processing/src/main/java/org/apache/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java @@ -52,6 +52,7 @@ import java.util.Collections; import java.util.HashMap; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.TreeMap; @@ -98,7 +99,7 @@ public Sequence run(QueryPlus inQ, ResponseCon totalSize = analyzedColumns.size() * numRows; } - Map columns = new TreeMap<>(); + LinkedHashMap columns = new LinkedHashMap<>(); ColumnIncluderator includerator = updatedQuery.getToInclude(); for (Map.Entry entry : analyzedColumns.entrySet()) { final String columnName = entry.getKey(); diff --git a/processing/src/main/java/org/apache/druid/query/metadata/metadata/SegmentAnalysis.java b/processing/src/main/java/org/apache/druid/query/metadata/metadata/SegmentAnalysis.java index 71421366b4b3..13576a6a11f0 100644 --- a/processing/src/main/java/org/apache/druid/query/metadata/metadata/SegmentAnalysis.java +++ b/processing/src/main/java/org/apache/druid/query/metadata/metadata/SegmentAnalysis.java @@ -26,6 +26,7 @@ import org.apache.druid.query.aggregation.AggregatorFactory; import org.joda.time.Interval; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Objects; @@ -42,7 +43,12 @@ public class SegmentAnalysis implements Comparable */ private final String id; private final List interval; - private final Map columns; + + /** + * Require LinkedHashMap to emphasize how important column order is. It's used by DruidSchema to keep + * SQL column order in line with ingestion column order. + */ + private final LinkedHashMap columns; private final long size; private final long numRows; private final Map aggregators; @@ -54,7 +60,7 @@ public class SegmentAnalysis implements Comparable public SegmentAnalysis( @JsonProperty("id") String id, @JsonProperty("intervals") List interval, - @JsonProperty("columns") Map columns, + @JsonProperty("columns") LinkedHashMap columns, @JsonProperty("size") long size, @JsonProperty("numRows") long numRows, @JsonProperty("aggregators") Map aggregators, @@ -87,7 +93,7 @@ public List getIntervals() } @JsonProperty - public Map getColumns() + public LinkedHashMap getColumns() { return columns; } diff --git a/processing/src/main/java/org/apache/druid/segment/IndexIO.java b/processing/src/main/java/org/apache/druid/segment/IndexIO.java index f593f104eb1b..9698ebdc2bed 100644 --- a/processing/src/main/java/org/apache/druid/segment/IndexIO.java +++ b/processing/src/main/java/org/apache/druid/segment/IndexIO.java @@ -79,6 +79,7 @@ import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Objects; @@ -442,7 +443,7 @@ public QueryableIndex load(File inDir, ObjectMapper mapper, boolean lazy, Segmen { MMappedIndex index = legacyHandler.mapDir(inDir); - Map> columns = new HashMap<>(); + Map> columns = new LinkedHashMap<>(); for (String dimension : index.getAvailableDimensions()) { ColumnBuilder builder = new ColumnBuilder() @@ -624,7 +625,7 @@ public QueryableIndex load(File inDir, ObjectMapper mapper, boolean lazy, Segmen } } - Map> columns = new HashMap<>(); + Map> columns = new LinkedHashMap<>(); // Register the time column ByteBuffer timeBuffer = smooshedFiles.mapFile("__time"); diff --git a/processing/src/main/java/org/apache/druid/segment/QueryableIndexStorageAdapter.java b/processing/src/main/java/org/apache/druid/segment/QueryableIndexStorageAdapter.java index 31480c62b1ab..79d010392223 100644 --- a/processing/src/main/java/org/apache/druid/segment/QueryableIndexStorageAdapter.java +++ b/processing/src/main/java/org/apache/druid/segment/QueryableIndexStorageAdapter.java @@ -45,7 +45,9 @@ import java.io.IOException; import java.io.UncheckedIOException; import java.util.HashSet; +import java.util.LinkedHashSet; import java.util.Objects; +import java.util.Set; /** * @@ -82,8 +84,14 @@ public Indexed getAvailableDimensions() @Override public Iterable getAvailableMetrics() { - HashSet columnNames = Sets.newHashSet(index.getColumnNames()); - return Sets.difference(columnNames, Sets.newHashSet(index.getAvailableDimensions())); + // Use LinkedHashSet to preserve the original order. + final Set columnNames = new LinkedHashSet<>(index.getColumnNames()); + + for (final String dimension : index.getAvailableDimensions()) { + columnNames.remove(dimension); + } + + return columnNames; } @Override diff --git a/processing/src/test/java/org/apache/druid/query/DoubleStorageTest.java b/processing/src/test/java/org/apache/druid/query/DoubleStorageTest.java index d0719e756470..9dc8ee684756 100644 --- a/processing/src/test/java/org/apache/druid/query/DoubleStorageTest.java +++ b/processing/src/test/java/org/apache/druid/query/DoubleStorageTest.java @@ -73,6 +73,7 @@ import java.util.Arrays; import java.util.Collection; import java.util.Collections; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; @@ -153,42 +154,44 @@ public static Collection dataFeeder() SegmentAnalysis expectedSegmentAnalysisDouble = new SegmentAnalysis( SEGMENT_ID.toString(), ImmutableList.of(INTERVAL), - ImmutableMap.of( - TIME_COLUMN, - new ColumnAnalysis( - ColumnType.LONG, - ValueType.LONG.name(), - false, - false, - 100, - null, - null, - null, - null - ), - DIM_NAME, - new ColumnAnalysis( - ColumnType.STRING, - ValueType.STRING.name(), - false, - false, - 120, - 1, - DIM_VALUE, - DIM_VALUE, - null - ), - DIM_FLOAT_NAME, - new ColumnAnalysis( - ColumnType.DOUBLE, - ValueType.DOUBLE.name(), - false, - false, - 80, - null, - null, - null, - null + new LinkedHashMap<>( + ImmutableMap.of( + TIME_COLUMN, + new ColumnAnalysis( + ColumnType.LONG, + ValueType.LONG.name(), + false, + false, + 100, + null, + null, + null, + null + ), + DIM_NAME, + new ColumnAnalysis( + ColumnType.STRING, + ValueType.STRING.name(), + false, + false, + 120, + 1, + DIM_VALUE, + DIM_VALUE, + null + ), + DIM_FLOAT_NAME, + new ColumnAnalysis( + ColumnType.DOUBLE, + ValueType.DOUBLE.name(), + false, + false, + 80, + null, + null, + null, + null + ) ) ), 330, MAX_ROWS, @@ -201,44 +204,47 @@ public static Collection dataFeeder() SegmentAnalysis expectedSegmentAnalysisFloat = new SegmentAnalysis( SEGMENT_ID.toString(), ImmutableList.of(INTERVAL), - ImmutableMap.of( - TIME_COLUMN, - new ColumnAnalysis( - ColumnType.LONG, - ValueType.LONG.name(), - false, - false, - 100, - null, - null, - null, - null - ), - DIM_NAME, - new ColumnAnalysis( - ColumnType.STRING, - ValueType.STRING.name(), - false, - false, - 120, - 1, - DIM_VALUE, - DIM_VALUE, - null - ), - DIM_FLOAT_NAME, - new ColumnAnalysis( - ColumnType.FLOAT, - ValueType.FLOAT.name(), - false, - false, - 80, - null, - null, - null, - null + new LinkedHashMap<>( + ImmutableMap.of( + TIME_COLUMN, + new ColumnAnalysis( + ColumnType.LONG, + ValueType.LONG.name(), + false, + false, + 100, + null, + null, + null, + null + ), + DIM_NAME, + new ColumnAnalysis( + ColumnType.STRING, + ValueType.STRING.name(), + false, + false, + 120, + 1, + DIM_VALUE, + DIM_VALUE, + null + ), + DIM_FLOAT_NAME, + new ColumnAnalysis( + ColumnType.FLOAT, + ValueType.FLOAT.name(), + false, + false, + 80, + null, + null, + null, + null + ) ) - ), 330, + ), + 330, MAX_ROWS, null, null, diff --git a/processing/src/test/java/org/apache/druid/query/metadata/SegmentAnalysisTest.java b/processing/src/test/java/org/apache/druid/query/metadata/SegmentAnalysisTest.java new file mode 100644 index 000000000000..4f68c9e059dd --- /dev/null +++ b/processing/src/test/java/org/apache/druid/query/metadata/SegmentAnalysisTest.java @@ -0,0 +1,89 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.query.metadata; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import org.apache.druid.data.input.impl.TimestampSpec; +import org.apache.druid.java.util.common.Intervals; +import org.apache.druid.java.util.common.granularity.Granularities; +import org.apache.druid.query.aggregation.CountAggregatorFactory; +import org.apache.druid.query.metadata.metadata.ColumnAnalysis; +import org.apache.druid.query.metadata.metadata.SegmentAnalysis; +import org.apache.druid.segment.TestHelper; +import org.apache.druid.segment.column.ColumnType; +import org.junit.Assert; +import org.junit.Test; + +import java.util.LinkedHashMap; + +public class SegmentAnalysisTest +{ + @Test + public void testSerde() throws Exception + { + // Use LinkedHashMap to preserve order. + // We'll verify that the order is actually preserved on serde. + final LinkedHashMap columns = new LinkedHashMap<>(); + columns.put( + "b", + new ColumnAnalysis(ColumnType.LONG, ColumnType.LONG.asTypeString(), true, true, 0, null, null, null, null) + ); + columns.put( + "a", + new ColumnAnalysis(ColumnType.FLOAT, ColumnType.FLOAT.asTypeString(), true, true, 0, null, null, null, null) + ); + columns.put( + "f", + new ColumnAnalysis(ColumnType.STRING, ColumnType.STRING.asTypeString(), true, true, 0, null, null, null, null) + ); + columns.put( + "c", + new ColumnAnalysis(ColumnType.DOUBLE, ColumnType.DOUBLE.asTypeString(), true, true, 0, null, null, null, null) + ); + + final SegmentAnalysis analysis = new SegmentAnalysis( + "id", + Intervals.ONLY_ETERNITY, + columns, + 1, + 2, + ImmutableMap.of("cnt", new CountAggregatorFactory("cnt")), + new TimestampSpec(null, null, null), + Granularities.SECOND, + true + ); + + final ObjectMapper jsonMapper = TestHelper.makeJsonMapper(); + final SegmentAnalysis analysis2 = jsonMapper.readValue( + jsonMapper.writeValueAsBytes(analysis), + SegmentAnalysis.class + ); + + Assert.assertEquals(analysis, analysis2); + + // Verify column order is preserved. + Assert.assertEquals( + ImmutableList.copyOf(columns.entrySet()), + ImmutableList.copyOf(analysis2.getColumns().entrySet()) + ); + } +} diff --git a/processing/src/test/java/org/apache/druid/query/metadata/SegmentAnalyzerTest.java b/processing/src/test/java/org/apache/druid/query/metadata/SegmentAnalyzerTest.java index 82482541fbe0..b8c35917c3f0 100644 --- a/processing/src/test/java/org/apache/druid/query/metadata/SegmentAnalyzerTest.java +++ b/processing/src/test/java/org/apache/druid/query/metadata/SegmentAnalyzerTest.java @@ -46,6 +46,7 @@ import org.apache.druid.segment.Segment; import org.apache.druid.segment.TestIndex; import org.apache.druid.segment.column.ColumnBuilder; +import org.apache.druid.segment.column.ColumnHolder; import org.apache.druid.segment.column.ColumnType; import org.apache.druid.segment.column.ValueType; import org.apache.druid.segment.data.ObjectStrategy; @@ -68,6 +69,7 @@ import java.net.URL; import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; +import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.EnumSet; @@ -108,9 +110,20 @@ private void testIncrementalWorksHelper(EnumSet> entriesInOrder = new ArrayList<>(columns.entrySet()); + + Assert.assertEquals(ColumnHolder.TIME_COLUMN_NAME, entriesInOrder.get(0).getKey()); + Assert.assertEquals(ColumnType.LONG, entriesInOrder.get(0).getValue().getTypeSignature()); + + // Start from 1: skipping __time + for (int i = 0; i < TestIndex.DIMENSION_SCHEMAS.size(); i++) { + final DimensionSchema schema = TestIndex.DIMENSION_SCHEMAS.get(i); + final Map.Entry analysisEntry = entriesInOrder.get(i + 1 /* skip __time */); final String dimension = schema.getName(); - final ColumnAnalysis columnAnalysis = columns.get(dimension); + Assert.assertEquals(dimension, analysisEntry.getKey()); + final ColumnAnalysis columnAnalysis = analysisEntry.getValue(); final boolean isString = schema.getColumnType().is(ValueType.STRING); Assert.assertEquals(dimension, schema.getColumnType().toString(), columnAnalysis.getType()); @@ -161,14 +174,20 @@ private void testMappedWorksHelper(EnumSet an Assert.assertEquals(SegmentId.dummy("test_1").toString(), analysis.getId()); final Map columns = analysis.getColumns(); - Assert.assertEquals( - TestIndex.COLUMNS.length + 3, - columns.size() - ); // All columns including time + // Verify key order is the same as the underlying segment. + // This helps DruidSchema keep things in the proper order when it does SegmentMetadata queries. + final List> entriesInOrder = new ArrayList<>(columns.entrySet()); - for (DimensionSchema schema : TestIndex.DIMENSION_SCHEMAS) { + Assert.assertEquals(ColumnHolder.TIME_COLUMN_NAME, entriesInOrder.get(0).getKey()); + Assert.assertEquals(ColumnType.LONG, entriesInOrder.get(0).getValue().getTypeSignature()); + + // Start from 1: skipping __time + for (int i = 0; i < TestIndex.DIMENSION_SCHEMAS.size(); i++) { + final DimensionSchema schema = TestIndex.DIMENSION_SCHEMAS.get(i); + final Map.Entry analysisEntry = entriesInOrder.get(i + 1 /* skip __time */); final String dimension = schema.getName(); - final ColumnAnalysis columnAnalysis = columns.get(dimension); + Assert.assertEquals(dimension, analysisEntry.getKey()); + final ColumnAnalysis columnAnalysis = analysisEntry.getValue(); final boolean isString = schema.getColumnType().is(ValueType.STRING); Assert.assertEquals(dimension, schema.getColumnType().toString(), columnAnalysis.getType()); Assert.assertEquals(dimension, 0, columnAnalysis.getSize()); @@ -204,6 +223,7 @@ private void testMappedWorksHelper(EnumSet an * *Awesome* method name auto-generated by IntelliJ! I love IntelliJ! * * @param index + * * @return */ private List getSegmentAnalysises(Segment index, EnumSet analyses) @@ -257,6 +277,7 @@ public ObjectStrategy getObjectStrategy() * (which can happen if an aggregator was removed for a later version), then, * analyzing the segment doesn't fail and the result of analysis of the complex column * is reported as an error. + * * @throws IOException */ @Test @@ -317,7 +338,10 @@ public void testAnalyzingSegmentWithNonExistentAggregator() throws IOException Assert.assertEquals("error:unknown_complex_invalid_complex_column_type", invalidColumnAnalysis.getErrorMessage()); // Run a segment metadata query also to verify it doesn't break - final List results = getSegmentAnalysises(segment, EnumSet.of(SegmentMetadataQuery.AnalysisType.SIZE)); + final List results = getSegmentAnalysises( + segment, + EnumSet.of(SegmentMetadataQuery.AnalysisType.SIZE) + ); for (SegmentAnalysis result : results) { Assert.assertTrue(result.getColumns().get(invalid_aggregator).isError()); } diff --git a/processing/src/test/java/org/apache/druid/query/metadata/SegmentMetadataQueryQueryToolChestTest.java b/processing/src/test/java/org/apache/druid/query/metadata/SegmentMetadataQueryQueryToolChestTest.java index b93c160b2906..f6a8e8b4979b 100644 --- a/processing/src/test/java/org/apache/druid/query/metadata/SegmentMetadataQueryQueryToolChestTest.java +++ b/processing/src/test/java/org/apache/druid/query/metadata/SegmentMetadataQueryQueryToolChestTest.java @@ -46,6 +46,7 @@ import org.junit.Test; import java.util.HashMap; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; @@ -77,20 +78,23 @@ public void testCacheStrategy() throws Exception SegmentAnalysis result = new SegmentAnalysis( "testSegment", ImmutableList.of(Intervals.of("2011-01-12T00:00:00.000Z/2011-04-15T00:00:00.001Z")), - ImmutableMap.of( - "placement", - new ColumnAnalysis( - ColumnType.STRING, - ValueType.STRING.name(), - true, - false, - 10881, - 1, - "preferred", - "preferred", - null + new LinkedHashMap<>( + ImmutableMap.of( + "placement", + new ColumnAnalysis( + ColumnType.STRING, + ValueType.STRING.name(), + true, + false, + 10881, + 1, + "preferred", + "preferred", + null + ) ) - ), 71982, + ), + 71982, 100, null, null, @@ -117,7 +121,7 @@ public void testMergeAggregators() final SegmentAnalysis analysis1 = new SegmentAnalysis( "id", null, - new HashMap<>(), + new LinkedHashMap<>(), 0, 0, ImmutableMap.of( @@ -131,7 +135,7 @@ public void testMergeAggregators() final SegmentAnalysis analysis2 = new SegmentAnalysis( "id", null, - new HashMap<>(), + new LinkedHashMap<>(), 0, 0, ImmutableMap.of( @@ -167,7 +171,7 @@ public void testMergeAggregatorsOneNull() final SegmentAnalysis analysis1 = new SegmentAnalysis( "id", null, - new HashMap<>(), + new LinkedHashMap<>(), 0, 0, null, @@ -178,7 +182,7 @@ public void testMergeAggregatorsOneNull() final SegmentAnalysis analysis2 = new SegmentAnalysis( "id", null, - new HashMap<>(), + new LinkedHashMap<>(), 0, 0, ImmutableMap.of( @@ -206,7 +210,7 @@ public void testMergeAggregatorsAllNull() final SegmentAnalysis analysis1 = new SegmentAnalysis( "id", null, - new HashMap<>(), + new LinkedHashMap<>(), 0, 0, null, @@ -217,7 +221,7 @@ public void testMergeAggregatorsAllNull() final SegmentAnalysis analysis2 = new SegmentAnalysis( "id", null, - new HashMap<>(), + new LinkedHashMap<>(), 0, 0, null, @@ -236,7 +240,7 @@ public void testMergeAggregatorsConflict() final SegmentAnalysis analysis1 = new SegmentAnalysis( "id", null, - new HashMap<>(), + new LinkedHashMap<>(), 0, 0, ImmutableMap.of( @@ -250,7 +254,7 @@ public void testMergeAggregatorsConflict() final SegmentAnalysis analysis2 = new SegmentAnalysis( "id", null, - new HashMap<>(), + new LinkedHashMap<>(), 0, 0, ImmutableMap.of( @@ -331,7 +335,7 @@ public void testMergeRollup() final SegmentAnalysis analysis1 = new SegmentAnalysis( "id", null, - new HashMap<>(), + new LinkedHashMap<>(), 0, 0, null, @@ -342,7 +346,7 @@ public void testMergeRollup() final SegmentAnalysis analysis2 = new SegmentAnalysis( "id", null, - new HashMap<>(), + new LinkedHashMap<>(), 0, 0, null, @@ -353,7 +357,7 @@ public void testMergeRollup() final SegmentAnalysis analysis3 = new SegmentAnalysis( "id", null, - new HashMap<>(), + new LinkedHashMap<>(), 0, 0, null, @@ -364,7 +368,7 @@ public void testMergeRollup() final SegmentAnalysis analysis4 = new SegmentAnalysis( "id", null, - new HashMap<>(), + new LinkedHashMap<>(), 0, 0, null, @@ -375,7 +379,7 @@ public void testMergeRollup() final SegmentAnalysis analysis5 = new SegmentAnalysis( "id", null, - new HashMap<>(), + new LinkedHashMap<>(), 0, 0, null, @@ -395,6 +399,7 @@ private static SegmentAnalysis mergeStrict(SegmentAnalysis analysis1, SegmentAna { return SegmentMetadataQueryQueryToolChest.finalizeAnalysis( SegmentMetadataQueryQueryToolChest.mergeAnalyses( + null, analysis1, analysis2, false @@ -406,6 +411,7 @@ private static SegmentAnalysis mergeLenient(SegmentAnalysis analysis1, SegmentAn { return SegmentMetadataQueryQueryToolChest.finalizeAnalysis( SegmentMetadataQueryQueryToolChest.mergeAnalyses( + null, analysis1, analysis2, true diff --git a/processing/src/test/java/org/apache/druid/query/metadata/SegmentMetadataQueryTest.java b/processing/src/test/java/org/apache/druid/query/metadata/SegmentMetadataQueryTest.java index ded726d5838e..0a93dd373aca 100644 --- a/processing/src/test/java/org/apache/druid/query/metadata/SegmentMetadataQueryTest.java +++ b/processing/src/test/java/org/apache/druid/query/metadata/SegmentMetadataQueryTest.java @@ -69,6 +69,7 @@ import java.util.Collections; import java.util.EnumSet; import java.util.HashMap; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.concurrent.ExecutorService; @@ -202,42 +203,44 @@ public SegmentMetadataQueryTest( expectedSegmentAnalysis1 = new SegmentAnalysis( id1.toString(), ImmutableList.of(Intervals.of("2011-01-12T00:00:00.000Z/2011-04-15T00:00:00.001Z")), - ImmutableMap.of( - "__time", - new ColumnAnalysis( - ColumnType.LONG, - ValueType.LONG.toString(), - false, - false, - 12090, - null, - null, - null, - null - ), - "index", - new ColumnAnalysis( - ColumnType.DOUBLE, - ValueType.DOUBLE.toString(), - false, - false, - 9672, - null, - null, - null, - null - ), - "placement", - new ColumnAnalysis( - ColumnType.STRING, - ValueType.STRING.toString(), - false, - false, - preferedSize1, - 1, - "preferred", - "preferred", - null + new LinkedHashMap<>( + ImmutableMap.of( + "__time", + new ColumnAnalysis( + ColumnType.LONG, + ValueType.LONG.toString(), + false, + false, + 12090, + null, + null, + null, + null + ), + "index", + new ColumnAnalysis( + ColumnType.DOUBLE, + ValueType.DOUBLE.toString(), + false, + false, + 9672, + null, + null, + null, + null + ), + "placement", + new ColumnAnalysis( + ColumnType.STRING, + ValueType.STRING.toString(), + false, + false, + preferedSize1, + 1, + "preferred", + "preferred", + null + ) ) ), overallSize1, @@ -250,42 +253,44 @@ public SegmentMetadataQueryTest( expectedSegmentAnalysis2 = new SegmentAnalysis( id2.toString(), ImmutableList.of(Intervals.of("2011-01-12T00:00:00.000Z/2011-04-15T00:00:00.001Z")), - ImmutableMap.of( - "__time", - new ColumnAnalysis( - ColumnType.LONG, - ValueType.LONG.toString(), - false, - false, - 12090, - null, - null, - null, - null - ), - "index", - new ColumnAnalysis( - ColumnType.DOUBLE, - ValueType.DOUBLE.toString(), - false, - false, - 9672, - null, - null, - null, - null - ), - "placement", - new ColumnAnalysis( - ColumnType.STRING, - ValueType.STRING.toString(), - false, - false, - placementSize2, - 1, - null, - null, - null + new LinkedHashMap<>( + ImmutableMap.of( + "__time", + new ColumnAnalysis( + ColumnType.LONG, + ValueType.LONG.toString(), + false, + false, + 12090, + null, + null, + null, + null + ), + "index", + new ColumnAnalysis( + ColumnType.DOUBLE, + ValueType.DOUBLE.toString(), + false, + false, + 9672, + null, + null, + null, + null + ), + "placement", + new ColumnAnalysis( + ColumnType.STRING, + ValueType.STRING.toString(), + false, + false, + placementSize2, + 1, + null, + null, + null + ) ) ), // null_column will be included only for incremental index, which makes a little bigger result than expected @@ -313,30 +318,32 @@ public void testSegmentMetadataQueryWithRollupMerge() SegmentAnalysis mergedSegmentAnalysis = new SegmentAnalysis( differentIds ? "merged" : SegmentId.dummy("testSegment").toString(), null, - ImmutableMap.of( - "placement", - new ColumnAnalysis( - ColumnType.STRING, - ValueType.STRING.toString(), - false, - false, - 0, - 0, - NullHandling.defaultStringValue(), - NullHandling.defaultStringValue(), - null - ), - "placementish", - new ColumnAnalysis( - ColumnType.STRING, - ValueType.STRING.toString(), - true, - false, - 0, - 0, - NullHandling.defaultStringValue(), - NullHandling.defaultStringValue(), - null + new LinkedHashMap<>( + ImmutableMap.of( + "placement", + new ColumnAnalysis( + ColumnType.STRING, + ValueType.STRING.toString(), + false, + false, + 0, + 0, + NullHandling.defaultStringValue(), + NullHandling.defaultStringValue(), + null + ), + "placementish", + new ColumnAnalysis( + ColumnType.STRING, + ValueType.STRING.toString(), + true, + false, + 0, + 0, + NullHandling.defaultStringValue(), + NullHandling.defaultStringValue(), + null + ) ) ), 0, @@ -385,30 +392,32 @@ public void testSegmentMetadataQueryWithHasMultipleValuesMerge() SegmentAnalysis mergedSegmentAnalysis = new SegmentAnalysis( differentIds ? "merged" : SegmentId.dummy("testSegment").toString(), null, - ImmutableMap.of( - "placement", - new ColumnAnalysis( - ColumnType.STRING, - ValueType.STRING.toString(), - false, - false, - 0, - 1, - NullHandling.defaultStringValue(), - NullHandling.defaultStringValue(), - null - ), - "placementish", - new ColumnAnalysis( - ColumnType.STRING, - ValueType.STRING.toString(), - true, - false, - 0, - 9, - NullHandling.defaultStringValue(), - NullHandling.defaultStringValue(), - null + new LinkedHashMap<>( + ImmutableMap.of( + "placement", + new ColumnAnalysis( + ColumnType.STRING, + ValueType.STRING.toString(), + false, + false, + 0, + 1, + NullHandling.defaultStringValue(), + NullHandling.defaultStringValue(), + null + ), + "placementish", + new ColumnAnalysis( + ColumnType.STRING, + ValueType.STRING.toString(), + true, + false, + 0, + 9, + NullHandling.defaultStringValue(), + NullHandling.defaultStringValue(), + null + ) ) ), 0, @@ -457,30 +466,32 @@ public void testSegmentMetadataQueryWithComplexColumnMerge() SegmentAnalysis mergedSegmentAnalysis = new SegmentAnalysis( differentIds ? "merged" : SegmentId.dummy("testSegment").toString(), null, - ImmutableMap.of( - "placement", - new ColumnAnalysis( - ColumnType.STRING, - ValueType.STRING.toString(), - false, - false, - 0, - 1, - NullHandling.defaultStringValue(), - NullHandling.defaultStringValue(), - null - ), - "quality_uniques", - new ColumnAnalysis( - ColumnType.ofComplex("hyperUnique"), - "hyperUnique", - false, - true, - 0, - null, - null, - null, - null + new LinkedHashMap<>( + ImmutableMap.of( + "placement", + new ColumnAnalysis( + ColumnType.STRING, + ValueType.STRING.toString(), + false, + false, + 0, + 1, + NullHandling.defaultStringValue(), + NullHandling.defaultStringValue(), + null + ), + "quality_uniques", + new ColumnAnalysis( + ColumnType.ofComplex("hyperUnique"), + "hyperUnique", + false, + true, + 0, + null, + null, + null, + null + ) ) ), 0, @@ -600,33 +611,35 @@ private void testSegmentMetadataQueryWithDefaultAnalysisMerge( SegmentAnalysis mergedSegmentAnalysis = new SegmentAnalysis( differentIds ? "merged" : SegmentId.dummy("testSegment").toString(), ImmutableList.of(expectedSegmentAnalysis1.getIntervals().get(0)), - ImmutableMap.of( - "__time", - new ColumnAnalysis( - ColumnType.LONG, - ValueType.LONG.toString(), - false, - false, - 12090 * 2, - null, - null, - null, - null - ), - "index", - new ColumnAnalysis( - ColumnType.DOUBLE, - ValueType.DOUBLE.toString(), - false, - false, - 9672 * 2, - null, - null, - null, - null - ), - column, - analysis + new LinkedHashMap<>( + ImmutableMap.of( + "__time", + new ColumnAnalysis( + ColumnType.LONG, + ValueType.LONG.toString(), + false, + false, + 12090 * 2, + null, + null, + null, + null + ), + "index", + new ColumnAnalysis( + ColumnType.DOUBLE, + ValueType.DOUBLE.toString(), + false, + false, + 9672 * 2, + null, + null, + null, + null + ), + column, + analysis + ) ), expectedSegmentAnalysis1.getSize() + expectedSegmentAnalysis2.getSize(), expectedSegmentAnalysis1.getNumRows() + expectedSegmentAnalysis2.getNumRows(), @@ -668,18 +681,20 @@ public void testSegmentMetadataQueryWithNoAnalysisTypesMerge() SegmentAnalysis mergedSegmentAnalysis = new SegmentAnalysis( differentIds ? "merged" : SegmentId.dummy("testSegment").toString(), null, - ImmutableMap.of( - "placement", - new ColumnAnalysis( - ColumnType.STRING, - ValueType.STRING.toString(), - false, - false, - 0, - 0, - NullHandling.defaultStringValue(), - NullHandling.defaultStringValue(), - null + new LinkedHashMap<>( + ImmutableMap.of( + "placement", + new ColumnAnalysis( + ColumnType.STRING, + ValueType.STRING.toString(), + false, + false, + 0, + 0, + NullHandling.defaultStringValue(), + NullHandling.defaultStringValue(), + null + ) ) ), 0, @@ -732,18 +747,20 @@ public void testSegmentMetadataQueryWithAggregatorsMerge() SegmentAnalysis mergedSegmentAnalysis = new SegmentAnalysis( differentIds ? "merged" : SegmentId.dummy("testSegment").toString(), null, - ImmutableMap.of( - "placement", - new ColumnAnalysis( - ColumnType.STRING, - ValueType.STRING.toString(), - false, - false, - 0, - 0, - NullHandling.defaultStringValue(), - NullHandling.defaultStringValue(), - null + new LinkedHashMap<>( + ImmutableMap.of( + "placement", + new ColumnAnalysis( + ColumnType.STRING, + ValueType.STRING.toString(), + false, + false, + 0, + 0, + NullHandling.defaultStringValue(), + NullHandling.defaultStringValue(), + null + ) ) ), 0, @@ -792,18 +809,20 @@ public void testSegmentMetadataQueryWithTimestampSpecMerge() SegmentAnalysis mergedSegmentAnalysis = new SegmentAnalysis( differentIds ? "merged" : SegmentId.dummy("testSegment").toString(), null, - ImmutableMap.of( - "placement", - new ColumnAnalysis( - ColumnType.STRING, - ValueType.STRING.toString(), - false, - false, - 0, - 0, - NullHandling.defaultStringValue(), - NullHandling.defaultStringValue(), - null + new LinkedHashMap<>( + ImmutableMap.of( + "placement", + new ColumnAnalysis( + ColumnType.STRING, + ValueType.STRING.toString(), + false, + false, + 0, + 0, + NullHandling.defaultStringValue(), + NullHandling.defaultStringValue(), + null + ) ) ), 0, @@ -852,18 +871,20 @@ public void testSegmentMetadataQueryWithQueryGranularityMerge() SegmentAnalysis mergedSegmentAnalysis = new SegmentAnalysis( differentIds ? "merged" : SegmentId.dummy("testSegment").toString(), null, - ImmutableMap.of( - "placement", - new ColumnAnalysis( - ColumnType.STRING, - ValueType.STRING.toString(), - false, - false, - 0, - 0, - NullHandling.defaultStringValue(), - NullHandling.defaultStringValue(), - null + new LinkedHashMap<>( + ImmutableMap.of( + "placement", + new ColumnAnalysis( + ColumnType.STRING, + ValueType.STRING.toString(), + false, + false, + 0, + 0, + NullHandling.defaultStringValue(), + NullHandling.defaultStringValue(), + null + ) ) ), 0, @@ -937,7 +958,10 @@ public void testBySegmentResults() TestHelper.assertExpectedObjects( ImmutableList.of(bySegmentResult, bySegmentResult), - myRunner.run(QueryPlus.wrap(testQuery.withOverriddenContext(ImmutableMap.of(QueryContexts.BY_SEGMENT_KEY, true)))), + myRunner.run(QueryPlus.wrap(testQuery.withOverriddenContext(ImmutableMap.of( + QueryContexts.BY_SEGMENT_KEY, + true + )))), "failed SegmentMetadata bySegment query" ); exec.shutdownNow(); @@ -1265,12 +1289,12 @@ public void testCacheKeyWithListColumnIncluderator() .build(); final byte[] oneColumnQueryCacheKey = new SegmentMetadataQueryQueryToolChest(new SegmentMetadataQueryConfig()).getCacheStrategy( - oneColumnQuery) + oneColumnQuery) .computeCacheKey( oneColumnQuery); final byte[] twoColumnQueryCacheKey = new SegmentMetadataQueryQueryToolChest(new SegmentMetadataQueryConfig()).getCacheStrategy( - twoColumnQuery) + twoColumnQuery) .computeCacheKey( twoColumnQuery); diff --git a/processing/src/test/java/org/apache/druid/query/metadata/SegmentMetadataUnionQueryTest.java b/processing/src/test/java/org/apache/druid/query/metadata/SegmentMetadataUnionQueryTest.java index ead3c9ff62cf..b29bfeb49500 100644 --- a/processing/src/test/java/org/apache/druid/query/metadata/SegmentMetadataUnionQueryTest.java +++ b/processing/src/test/java/org/apache/druid/query/metadata/SegmentMetadataUnionQueryTest.java @@ -44,6 +44,7 @@ import org.junit.runners.Parameterized; import java.util.Collections; +import java.util.LinkedHashMap; import java.util.List; @RunWith(Parameterized.class) @@ -80,7 +81,7 @@ public static Iterable constructorFeeder() null ), true, - }, + }, new Object[]{ QueryRunnerTestHelper.makeUnionQueryRunner( FACTORY, @@ -99,18 +100,20 @@ public void testSegmentMetadataUnionQuery() SegmentAnalysis expected = new SegmentAnalysis( QueryRunnerTestHelper.SEGMENT_ID.toString(), Collections.singletonList(Intervals.of("2011-01-12T00:00:00.000Z/2011-04-15T00:00:00.001Z")), - ImmutableMap.of( - "placement", - new ColumnAnalysis( - ColumnType.STRING, - ValueType.STRING.toString(), - false, - false, - mmap ? 43524 : 43056, - 1, - "preferred", - "preferred", - null + new LinkedHashMap<>( + ImmutableMap.of( + "placement", + new ColumnAnalysis( + ColumnType.STRING, + ValueType.STRING.toString(), + false, + false, + mmap ? 43524 : 43056, + 1, + "preferred", + "preferred", + null + ) ) ), mmap ? 805380 : 803324, diff --git a/sql/src/main/java/org/apache/druid/sql/calcite/schema/DruidSchema.java b/sql/src/main/java/org/apache/druid/sql/calcite/schema/DruidSchema.java index 785913b0a6af..898489cce394 100644 --- a/sql/src/main/java/org/apache/druid/sql/calcite/schema/DruidSchema.java +++ b/sql/src/main/java/org/apache/druid/sql/calcite/schema/DruidSchema.java @@ -73,6 +73,7 @@ import java.util.Comparator; import java.util.EnumSet; import java.util.HashSet; +import java.util.LinkedHashMap; import java.util.Map; import java.util.Optional; import java.util.Set; @@ -783,7 +784,9 @@ private Set refreshSegmentsForDataSource(final String dataSource, fin DruidTable buildDruidTable(final String dataSource) { ConcurrentSkipListMap segmentsMap = segmentMetadataInfo.get(dataSource); - final Map columnTypes = new TreeMap<>(); + + // Preserve order. + final Map columnTypes = new LinkedHashMap<>(); if (segmentsMap != null && !segmentsMap.isEmpty()) { for (AvailableSegmentMetadata availableSegmentMetadata : segmentsMap.values()) { diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteExplainQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteExplainQueryTest.java index f07f60980d59..31246b4bca17 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteExplainQueryTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteExplainQueryTest.java @@ -108,8 +108,8 @@ public void testExplainExactCountDistinctOfSemiJoinResult() throws Exception + ")"; final String legacyExplanation = "DruidOuterQueryRel(query=[{\"queryType\":\"groupBy\",\"dataSource\":{\"type\":\"query\",\"query\":{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"__subquery__\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"resultFormat\":\"list\",\"granularity\":{\"type\":\"all\"}}},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"granularity\":{\"type\":\"all\"},\"dimensions\":[],\"aggregations\":[{\"type\":\"count\",\"name\":\"a0\"}],\"limitSpec\":{\"type\":\"NoopLimitSpec\"},\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"sqlQueryId\":\"dummy\",\"vectorize\":\"false\",\"vectorizeVirtualColumns\":\"false\"}}], signature=[{a0:LONG}])\n" - + " DruidJoinQueryRel(condition=[=(SUBSTRING($3, 1, 1), $8)], joinType=[inner], query=[{\"queryType\":\"groupBy\",\"dataSource\":{\"type\":\"table\",\"name\":\"__join__\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"granularity\":{\"type\":\"all\"},\"dimensions\":[{\"type\":\"default\",\"dimension\":\"dim2\",\"outputName\":\"d0\",\"outputType\":\"STRING\"}],\"limitSpec\":{\"type\":\"NoopLimitSpec\"},\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"sqlQueryId\":\"dummy\",\"vectorize\":\"false\",\"vectorizeVirtualColumns\":\"false\"}}], signature=[{d0:STRING}])\n" - + " DruidQueryRel(query=[{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"resultFormat\":\"compactedList\",\"columns\":[\"__time\",\"cnt\",\"dim1\",\"dim2\",\"dim3\",\"m1\",\"m2\",\"unique_dim1\"],\"legacy\":false,\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"sqlQueryId\":\"dummy\",\"vectorize\":\"false\",\"vectorizeVirtualColumns\":\"false\"},\"granularity\":{\"type\":\"all\"}}], signature=[{__time:LONG, cnt:LONG, dim1:STRING, dim2:STRING, dim3:STRING, m1:FLOAT, m2:DOUBLE, unique_dim1:COMPLEX}])\n" + + " DruidJoinQueryRel(condition=[=(SUBSTRING($2, 1, 1), $8)], joinType=[inner], query=[{\"queryType\":\"groupBy\",\"dataSource\":{\"type\":\"table\",\"name\":\"__join__\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"granularity\":{\"type\":\"all\"},\"dimensions\":[{\"type\":\"default\",\"dimension\":\"dim2\",\"outputName\":\"d0\",\"outputType\":\"STRING\"}],\"limitSpec\":{\"type\":\"NoopLimitSpec\"},\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"sqlQueryId\":\"dummy\",\"vectorize\":\"false\",\"vectorizeVirtualColumns\":\"false\"}}], signature=[{d0:STRING}])\n" + + " DruidQueryRel(query=[{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"resultFormat\":\"compactedList\",\"columns\":[\"__time\",\"cnt\",\"dim1\",\"dim2\",\"dim3\",\"m1\",\"m2\",\"unique_dim1\"],\"legacy\":false,\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"sqlQueryId\":\"dummy\",\"vectorize\":\"false\",\"vectorizeVirtualColumns\":\"false\"},\"granularity\":{\"type\":\"all\"}}], signature=[{__time:LONG, dim1:STRING, dim2:STRING, dim3:STRING, cnt:LONG, m1:FLOAT, m2:DOUBLE, unique_dim1:COMPLEX}])\n" + " DruidQueryRel(query=[{\"queryType\":\"groupBy\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"filter\":{\"type\":\"not\",\"field\":{\"type\":\"selector\",\"dimension\":\"dim1\",\"value\":null}},\"granularity\":{\"type\":\"all\"},\"dimensions\":[{\"type\":\"extraction\",\"dimension\":\"dim1\",\"outputName\":\"d0\",\"outputType\":\"STRING\",\"extractionFn\":{\"type\":\"substring\",\"index\":0,\"length\":1}}],\"limitSpec\":{\"type\":\"NoopLimitSpec\"},\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"sqlQueryId\":\"dummy\",\"vectorize\":\"false\",\"vectorizeVirtualColumns\":\"false\"}}], signature=[{d0:STRING}])\n"; final String explanation = "[" + "{\"query\":{\"queryType\":\"groupBy\"," @@ -153,8 +153,8 @@ public void testExplainSelectStarWithOverrides() throws Exception // Skip vectorization since otherwise the "context" will change for each subtest. skipVectorize(); - String legacyExplanation = "DruidQueryRel(query=[{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"resultFormat\":\"compactedList\",\"columns\":[\"__time\",\"cnt\",\"dim1\",\"dim2\",\"dim3\",\"m1\",\"m2\",\"unique_dim1\"],\"legacy\":false,\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"sqlQueryId\":\"dummy\",\"vectorize\":\"false\",\"vectorizeVirtualColumns\":\"false\"},\"granularity\":{\"type\":\"all\"}}], signature=[{__time:LONG, cnt:LONG, dim1:STRING, dim2:STRING, dim3:STRING, m1:FLOAT, m2:DOUBLE, unique_dim1:COMPLEX}])\n"; - String legacyExplanationWithContext = "DruidQueryRel(query=[{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"resultFormat\":\"compactedList\",\"columns\":[\"__time\",\"cnt\",\"dim1\",\"dim2\",\"dim3\",\"m1\",\"m2\",\"unique_dim1\"],\"legacy\":false,\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"sqlQueryId\":\"dummy\",\"useNativeQueryExplain\":false},\"granularity\":{\"type\":\"all\"}}], signature=[{__time:LONG, cnt:LONG, dim1:STRING, dim2:STRING, dim3:STRING, m1:FLOAT, m2:DOUBLE, unique_dim1:COMPLEX}])\n"; + String legacyExplanation = "DruidQueryRel(query=[{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"resultFormat\":\"compactedList\",\"columns\":[\"__time\",\"cnt\",\"dim1\",\"dim2\",\"dim3\",\"m1\",\"m2\",\"unique_dim1\"],\"legacy\":false,\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"sqlQueryId\":\"dummy\",\"vectorize\":\"false\",\"vectorizeVirtualColumns\":\"false\"},\"granularity\":{\"type\":\"all\"}}], signature=[{__time:LONG, dim1:STRING, dim2:STRING, dim3:STRING, cnt:LONG, m1:FLOAT, m2:DOUBLE, unique_dim1:COMPLEX}])\n"; + String legacyExplanationWithContext = "DruidQueryRel(query=[{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"resultFormat\":\"compactedList\",\"columns\":[\"__time\",\"cnt\",\"dim1\",\"dim2\",\"dim3\",\"m1\",\"m2\",\"unique_dim1\"],\"legacy\":false,\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"sqlQueryId\":\"dummy\",\"useNativeQueryExplain\":false},\"granularity\":{\"type\":\"all\"}}], signature=[{__time:LONG, dim1:STRING, dim2:STRING, dim3:STRING, cnt:LONG, m1:FLOAT, m2:DOUBLE, unique_dim1:COMPLEX}])\n"; String explanation = "[{" + "\"query\":{\"queryType\":\"scan\"," + "\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"}," @@ -164,7 +164,7 @@ public void testExplainSelectStarWithOverrides() throws Exception + "\"legacy\":false," + "\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"sqlQueryId\":\"dummy\",\"vectorize\":\"false\",\"vectorizeVirtualColumns\":\"false\"}," + "\"granularity\":{\"type\":\"all\"}}," - + "\"signature\":[{\"name\":\"__time\",\"type\":\"LONG\"},{\"name\":\"cnt\",\"type\":\"LONG\"},{\"name\":\"dim1\",\"type\":\"STRING\"},{\"name\":\"dim2\",\"type\":\"STRING\"},{\"name\":\"dim3\",\"type\":\"STRING\"},{\"name\":\"m1\",\"type\":\"FLOAT\"},{\"name\":\"m2\",\"type\":\"DOUBLE\"},{\"name\":\"unique_dim1\",\"type\":\"COMPLEX\"}]" + + "\"signature\":[{\"name\":\"__time\",\"type\":\"LONG\"},{\"name\":\"dim1\",\"type\":\"STRING\"},{\"name\":\"dim2\",\"type\":\"STRING\"},{\"name\":\"dim3\",\"type\":\"STRING\"},{\"name\":\"cnt\",\"type\":\"LONG\"},{\"name\":\"m1\",\"type\":\"FLOAT\"},{\"name\":\"m2\",\"type\":\"DOUBLE\"},{\"name\":\"unique_dim1\",\"type\":\"COMPLEX\"}]" + "}]"; String explanationWithContext = "[{" @@ -176,7 +176,7 @@ public void testExplainSelectStarWithOverrides() throws Exception + "\"legacy\":false," + "\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"sqlQueryId\":\"dummy\",\"useNativeQueryExplain\":true,\"vectorize\":\"false\",\"vectorizeVirtualColumns\":\"false\"}," + "\"granularity\":{\"type\":\"all\"}}," - + "\"signature\":[{\"name\":\"__time\",\"type\":\"LONG\"},{\"name\":\"cnt\",\"type\":\"LONG\"},{\"name\":\"dim1\",\"type\":\"STRING\"},{\"name\":\"dim2\",\"type\":\"STRING\"},{\"name\":\"dim3\",\"type\":\"STRING\"},{\"name\":\"m1\",\"type\":\"FLOAT\"},{\"name\":\"m2\",\"type\":\"DOUBLE\"},{\"name\":\"unique_dim1\",\"type\":\"COMPLEX\"}]" + + "\"signature\":[{\"name\":\"__time\",\"type\":\"LONG\"},{\"name\":\"dim1\",\"type\":\"STRING\"},{\"name\":\"dim2\",\"type\":\"STRING\"},{\"name\":\"dim3\",\"type\":\"STRING\"},{\"name\":\"cnt\",\"type\":\"LONG\"},{\"name\":\"m1\",\"type\":\"FLOAT\"},{\"name\":\"m2\",\"type\":\"DOUBLE\"},{\"name\":\"unique_dim1\",\"type\":\"COMPLEX\"}]" + "}]"; String sql = "EXPLAIN PLAN FOR SELECT * FROM druid.foo"; String resources = "[{\"name\":\"foo\",\"type\":\"DATASOURCE\"}]"; diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteIngestionDmlTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteIngestionDmlTest.java index 7040e7fc5394..8200fa2dc1ff 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteIngestionDmlTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteIngestionDmlTest.java @@ -70,10 +70,10 @@ public class CalciteIngestionDmlTest extends BaseCalciteQueryTest protected static final RowSignature FOO_TABLE_SIGNATURE = RowSignature.builder() .addTimeColumn() - .add("cnt", ColumnType.LONG) .add("dim1", ColumnType.STRING) .add("dim2", ColumnType.STRING) .add("dim3", ColumnType.STRING) + .add("cnt", ColumnType.LONG) .add("m1", ColumnType.FLOAT) .add("m2", ColumnType.DOUBLE) .add("unique_dim1", HyperUniquesAggregatorFactory.TYPE) diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java index 15680504c6db..77e6ca22d239 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java @@ -282,10 +282,10 @@ public void testInformationSchemaColumnsOnTable() throws Exception ImmutableList.of(), ImmutableList.of( new Object[]{"__time", "TIMESTAMP", "NO"}, - new Object[]{"cnt", "BIGINT", useDefault ? "NO" : "YES"}, new Object[]{"dim1", "VARCHAR", "YES"}, new Object[]{"dim2", "VARCHAR", "YES"}, new Object[]{"dim3", "VARCHAR", "YES"}, + new Object[]{"cnt", "BIGINT", useDefault ? "NO" : "YES"}, new Object[]{"m1", "FLOAT", useDefault ? "NO" : "YES"}, new Object[]{"m2", "DOUBLE", useDefault ? "NO" : "YES"}, new Object[]{"unique_dim1", "COMPLEX", "YES"} @@ -313,9 +313,9 @@ public void testInformationSchemaColumnsOnForbiddenTable() throws Exception ImmutableList.of(), ImmutableList.of( new Object[]{"__time", "TIMESTAMP", "NO"}, - new Object[]{"cnt", "BIGINT", useDefault ? "NO" : "YES"}, new Object[]{"dim1", "VARCHAR", "YES"}, new Object[]{"dim2", "VARCHAR", "YES"}, + new Object[]{"cnt", "BIGINT", useDefault ? "NO" : "YES"}, new Object[]{"m1", "FLOAT", useDefault ? "NO" : "YES"}, new Object[]{"m2", "DOUBLE", useDefault ? "NO" : "YES"}, new Object[]{"unique_dim1", "COMPLEX", "YES"} diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteSelectQueryTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteSelectQueryTest.java index 310c2eb8e234..4f39dff99f03 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteSelectQueryTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteSelectQueryTest.java @@ -592,9 +592,9 @@ public void testSelectStarWithDimFilter() throws Exception .build() ), ImmutableList.of( - new Object[]{timestamp("2000-01-01"), 1L, "", "a", "[\"a\",\"b\"]", 1.0f, 1.0d, HLLC_STRING}, - new Object[]{timestamp("2001-01-01"), 1L, "1", "a", "", 4.0f, 4.0d, HLLC_STRING}, - new Object[]{timestamp("2001-01-02"), 1L, "def", "abc", NULL_STRING, 5.0f, 5.0d, HLLC_STRING} + new Object[]{timestamp("2000-01-01"), "", "a", "[\"a\",\"b\"]", 1L, 1.0f, 1.0d, HLLC_STRING}, + new Object[]{timestamp("2001-01-01"), "1", "a", "", 1L, 4.0f, 4.0d, HLLC_STRING}, + new Object[]{timestamp("2001-01-02"), "def", "abc", NULL_STRING, 1L, 5.0f, 5.0d, HLLC_STRING} ) ); } @@ -1097,12 +1097,12 @@ public void testSelectStar() throws Exception .build() ), ImmutableList.of( - new Object[]{timestamp("2000-01-01"), 1L, "", "a", "[\"a\",\"b\"]", 1f, 1.0, HLLC_STRING}, - new Object[]{timestamp("2000-01-02"), 1L, "10.1", NULL_STRING, "[\"b\",\"c\"]", 2f, 2.0, HLLC_STRING}, - new Object[]{timestamp("2000-01-03"), 1L, "2", "", "d", 3f, 3.0, HLLC_STRING}, - new Object[]{timestamp("2001-01-01"), 1L, "1", "a", "", 4f, 4.0, HLLC_STRING}, - new Object[]{timestamp("2001-01-02"), 1L, "def", "abc", NULL_STRING, 5f, 5.0, HLLC_STRING}, - new Object[]{timestamp("2001-01-03"), 1L, "abc", NULL_STRING, NULL_STRING, 6f, 6.0, HLLC_STRING} + new Object[]{timestamp("2000-01-01"), "", "a", "[\"a\",\"b\"]", 1L, 1f, 1.0, HLLC_STRING}, + new Object[]{timestamp("2000-01-02"), "10.1", NULL_STRING, "[\"b\",\"c\"]", 1L, 2f, 2.0, HLLC_STRING}, + new Object[]{timestamp("2000-01-03"), "2", "", "d", 1L, 3f, 3.0, HLLC_STRING}, + new Object[]{timestamp("2001-01-01"), "1", "a", "", 1L, 4f, 4.0, HLLC_STRING}, + new Object[]{timestamp("2001-01-02"), "def", "abc", NULL_STRING, 1L, 5f, 5.0, HLLC_STRING}, + new Object[]{timestamp("2001-01-03"), "abc", NULL_STRING, NULL_STRING, 1L, 6f, 6.0, HLLC_STRING} ) ); } @@ -1131,18 +1131,18 @@ public void testSelectStarOnForbiddenTable() throws Exception ImmutableList.of( new Object[]{ timestamp("2000-01-01"), - 1L, "forbidden", "abcd", + 1L, 9999.0f, NullHandling.defaultDoubleValue(), "\"AQAAAQAAAALFBA==\"" }, new Object[]{ timestamp("2000-01-02"), - 1L, "forbidden", "a", + 1L, 1234.0f, NullHandling.defaultDoubleValue(), "\"AQAAAQAAAALFBA==\"" @@ -1271,7 +1271,7 @@ public void testExplainSelectStar() throws Exception skipVectorize(); final String query = "EXPLAIN PLAN FOR SELECT * FROM druid.foo"; - final String legacyExplanation = "DruidQueryRel(query=[{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"resultFormat\":\"compactedList\",\"columns\":[\"__time\",\"cnt\",\"dim1\",\"dim2\",\"dim3\",\"m1\",\"m2\",\"unique_dim1\"],\"legacy\":false,\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"sqlQueryId\":\"dummy\",\"vectorize\":\"false\",\"vectorizeVirtualColumns\":\"false\"},\"granularity\":{\"type\":\"all\"}}], signature=[{__time:LONG, cnt:LONG, dim1:STRING, dim2:STRING, dim3:STRING, m1:FLOAT, m2:DOUBLE, unique_dim1:COMPLEX}])\n"; + final String legacyExplanation = "DruidQueryRel(query=[{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"resultFormat\":\"compactedList\",\"columns\":[\"__time\",\"cnt\",\"dim1\",\"dim2\",\"dim3\",\"m1\",\"m2\",\"unique_dim1\"],\"legacy\":false,\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"sqlQueryId\":\"dummy\",\"vectorize\":\"false\",\"vectorizeVirtualColumns\":\"false\"},\"granularity\":{\"type\":\"all\"}}], signature=[{__time:LONG, dim1:STRING, dim2:STRING, dim3:STRING, cnt:LONG, m1:FLOAT, m2:DOUBLE, unique_dim1:COMPLEX}])\n"; final String explanation = "[{" + "\"query\":{\"queryType\":\"scan\"," + "\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"}," @@ -1281,7 +1281,7 @@ public void testExplainSelectStar() throws Exception + "\"legacy\":false," + "\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"sqlQueryId\":\"dummy\",\"vectorize\":\"false\",\"vectorizeVirtualColumns\":\"false\"}," + "\"granularity\":{\"type\":\"all\"}}," - + "\"signature\":[{\"name\":\"__time\",\"type\":\"LONG\"},{\"name\":\"cnt\",\"type\":\"LONG\"},{\"name\":\"dim1\",\"type\":\"STRING\"},{\"name\":\"dim2\",\"type\":\"STRING\"},{\"name\":\"dim3\",\"type\":\"STRING\"},{\"name\":\"m1\",\"type\":\"FLOAT\"},{\"name\":\"m2\",\"type\":\"DOUBLE\"},{\"name\":\"unique_dim1\",\"type\":\"COMPLEX\"}]" + + "\"signature\":[{\"name\":\"__time\",\"type\":\"LONG\"},{\"name\":\"dim1\",\"type\":\"STRING\"},{\"name\":\"dim2\",\"type\":\"STRING\"},{\"name\":\"dim3\",\"type\":\"STRING\"},{\"name\":\"cnt\",\"type\":\"LONG\"},{\"name\":\"m1\",\"type\":\"FLOAT\"},{\"name\":\"m2\",\"type\":\"DOUBLE\"},{\"name\":\"unique_dim1\",\"type\":\"COMPLEX\"}]" + "}]"; final String resources = "[{\"name\":\"foo\",\"type\":\"DATASOURCE\"}]"; @@ -1328,8 +1328,8 @@ public void testSelectStarWithLimit() throws Exception .build() ), ImmutableList.of( - new Object[]{timestamp("2000-01-01"), 1L, "", "a", "[\"a\",\"b\"]", 1.0f, 1.0, HLLC_STRING}, - new Object[]{timestamp("2000-01-02"), 1L, "10.1", NULL_STRING, "[\"b\",\"c\"]", 2.0f, 2.0, HLLC_STRING} + new Object[]{timestamp("2000-01-01"), "", "a", "[\"a\",\"b\"]", 1L, 1.0f, 1.0, HLLC_STRING}, + new Object[]{timestamp("2000-01-02"), "10.1", NULL_STRING, "[\"b\",\"c\"]", 1L, 2.0f, 2.0, HLLC_STRING} ) ); } @@ -1354,8 +1354,8 @@ public void testSelectStarWithLimitAndOffset() throws Exception .build() ), ImmutableList.of( - new Object[]{timestamp("2000-01-02"), 1L, "10.1", NULL_STRING, "[\"b\",\"c\"]", 2.0f, 2.0, HLLC_STRING}, - new Object[]{timestamp("2000-01-03"), 1L, "2", "", "d", 3f, 3.0, HLLC_STRING} + new Object[]{timestamp("2000-01-02"), "10.1", NULL_STRING, "[\"b\",\"c\"]", 1L, 2.0f, 2.0, HLLC_STRING}, + new Object[]{timestamp("2000-01-03"), "2", "", "d", 1L, 3f, 3.0, HLLC_STRING} ) ); } @@ -1429,8 +1429,8 @@ public void testSelectStarWithLimitTimeDescending() throws Exception .build() ), ImmutableList.of( - new Object[]{timestamp("2001-01-03"), 1L, "abc", NULL_STRING, NULL_STRING, 6f, 6d, HLLC_STRING}, - new Object[]{timestamp("2001-01-02"), 1L, "def", "abc", NULL_STRING, 5f, 5d, HLLC_STRING} + new Object[]{timestamp("2001-01-03"), "abc", NULL_STRING, NULL_STRING, 1L, 6f, 6d, HLLC_STRING}, + new Object[]{timestamp("2001-01-02"), "def", "abc", NULL_STRING, 1L, 5f, 5d, HLLC_STRING} ) ); } @@ -1455,12 +1455,12 @@ public void testSelectStarWithoutLimitTimeAscending() throws Exception .build() ), ImmutableList.of( - new Object[]{timestamp("2000-01-01"), 1L, "", "a", "[\"a\",\"b\"]", 1f, 1.0, HLLC_STRING}, - new Object[]{timestamp("2000-01-02"), 1L, "10.1", NULL_STRING, "[\"b\",\"c\"]", 2f, 2.0, HLLC_STRING}, - new Object[]{timestamp("2000-01-03"), 1L, "2", "", "d", 3f, 3.0, HLLC_STRING}, - new Object[]{timestamp("2001-01-01"), 1L, "1", "a", "", 4f, 4.0, HLLC_STRING}, - new Object[]{timestamp("2001-01-02"), 1L, "def", "abc", NULL_STRING, 5f, 5.0, HLLC_STRING}, - new Object[]{timestamp("2001-01-03"), 1L, "abc", NULL_STRING, NULL_STRING, 6f, 6.0, HLLC_STRING} + new Object[]{timestamp("2000-01-01"), "", "a", "[\"a\",\"b\"]", 1L, 1f, 1.0, HLLC_STRING}, + new Object[]{timestamp("2000-01-02"), "10.1", NULL_STRING, "[\"b\",\"c\"]", 1L, 2f, 2.0, HLLC_STRING}, + new Object[]{timestamp("2000-01-03"), "2", "", "d", 1L, 3f, 3.0, HLLC_STRING}, + new Object[]{timestamp("2001-01-01"), "1", "a", "", 1L, 4f, 4.0, HLLC_STRING}, + new Object[]{timestamp("2001-01-02"), "def", "abc", NULL_STRING, 1L, 5f, 5.0, HLLC_STRING}, + new Object[]{timestamp("2001-01-03"), "abc", NULL_STRING, NULL_STRING, 1L, 6f, 6.0, HLLC_STRING} ) ); } diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/schema/DruidSchemaTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/schema/DruidSchemaTest.java index 708ba50c4d1c..4602a6823155 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/schema/DruidSchemaTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/schema/DruidSchemaTest.java @@ -78,6 +78,7 @@ import java.io.IOException; import java.util.EnumSet; import java.util.HashSet; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; @@ -206,7 +207,10 @@ void markDataSourceAsNeedRebuild(String datasource) CalciteTests.createMockQueryLifecycleFactory(walker, conglomerate), serverView, segmentManager, - new MapJoinableFactory(ImmutableSet.of(globalTableJoinable), ImmutableMap.of(globalTableJoinable.getClass(), GlobalTableDataSource.class)), + new MapJoinableFactory( + ImmutableSet.of(globalTableJoinable), + ImmutableMap.of(globalTableJoinable.getClass(), GlobalTableDataSource.class) + ), PLANNER_CONFIG_DEFAULT, new NoopEscalator(), new BrokerInternalQueryConfig(), @@ -288,16 +292,16 @@ public void testGetTableMapFoo() Assert.assertEquals("__time", fields.get(0).getName()); Assert.assertEquals(SqlTypeName.TIMESTAMP, fields.get(0).getType().getSqlTypeName()); - Assert.assertEquals("cnt", fields.get(1).getName()); - Assert.assertEquals(SqlTypeName.BIGINT, fields.get(1).getType().getSqlTypeName()); + Assert.assertEquals("dim2", fields.get(1).getName()); + Assert.assertEquals(SqlTypeName.VARCHAR, fields.get(1).getType().getSqlTypeName()); - Assert.assertEquals("dim1", fields.get(2).getName()); - Assert.assertEquals(SqlTypeName.VARCHAR, fields.get(2).getType().getSqlTypeName()); + Assert.assertEquals("m1", fields.get(2).getName()); + Assert.assertEquals(SqlTypeName.BIGINT, fields.get(2).getType().getSqlTypeName()); - Assert.assertEquals("dim2", fields.get(3).getName()); + Assert.assertEquals("dim1", fields.get(3).getName()); Assert.assertEquals(SqlTypeName.VARCHAR, fields.get(3).getType().getSqlTypeName()); - Assert.assertEquals("m1", fields.get(4).getName()); + Assert.assertEquals("cnt", fields.get(4).getName()); Assert.assertEquals(SqlTypeName.BIGINT, fields.get(4).getType().getSqlTypeName()); Assert.assertEquals("unique_dim1", fields.get(5).getName()); @@ -1065,7 +1069,7 @@ public void testRunSegmentMetadataQueryWithContext() throws Exception new TableDataSource(segment.getDataSource()), new MultipleSpecificSegmentSpec( segmentIterable.stream() - .map(SegmentId::toDescriptor).collect(Collectors.toList())), + .map(SegmentId::toDescriptor).collect(Collectors.toList())), new AllColumnIncluderator(), false, queryContext, @@ -1094,7 +1098,8 @@ public void testRunSegmentMetadataQueryWithContext() throws Exception EasyMock.expect(factoryMock.factorize()).andReturn(lifecycleMock).once(); // This is the mat of the test, making sure that the query created by the method under test matches the expected query, specifically the operator configured context - EasyMock.expect(lifecycleMock.runSimple(expectedMetadataQuery, AllowAllAuthenticator.ALLOW_ALL_RESULT, Access.OK)).andReturn(null); + EasyMock.expect(lifecycleMock.runSimple(expectedMetadataQuery, AllowAllAuthenticator.ALLOW_ALL_RESULT, Access.OK)) + .andReturn(null); EasyMock.replay(factoryMock, lifecycleMock); @@ -1107,36 +1112,28 @@ public void testRunSegmentMetadataQueryWithContext() throws Exception @Test public void testSegmentMetadataColumnType() { + // Verify order is preserved. + final LinkedHashMap columns = new LinkedHashMap<>(); + columns.put( + "a", + new ColumnAnalysis(ColumnType.STRING, ColumnType.STRING.asTypeString(), false, true, 1234, 26, "a", "z", null) + ); + + columns.put( + "count", + new ColumnAnalysis(ColumnType.LONG, ColumnType.LONG.asTypeString(), false, true, 1234, 26, "a", "z", null) + ); + + columns.put( + "b", + new ColumnAnalysis(ColumnType.DOUBLE, ColumnType.DOUBLE.asTypeString(), false, true, 1234, 26, null, null, null) + ); + RowSignature signature = DruidSchema.analysisToRowSignature( new SegmentAnalysis( "id", ImmutableList.of(Intervals.utc(1L, 2L)), - ImmutableMap.of( - "a", - new ColumnAnalysis( - ColumnType.STRING, - ColumnType.STRING.asTypeString(), - false, - true, - 1234, - 26, - "a", - "z", - null - ), - "count", - new ColumnAnalysis( - ColumnType.LONG, - ColumnType.LONG.asTypeString(), - false, - true, - 1234, - 26, - "a", - "z", - null - ) - ), + columns, 1234, 100, null, @@ -1147,7 +1144,11 @@ public void testSegmentMetadataColumnType() ); Assert.assertEquals( - RowSignature.builder().add("a", ColumnType.STRING).add("count", ColumnType.LONG).build(), + RowSignature.builder() + .add("a", ColumnType.STRING) + .add("count", ColumnType.LONG) + .add("b", ColumnType.DOUBLE) + .build(), signature ); } @@ -1160,30 +1161,32 @@ public void testSegmentMetadataFallbackType() new SegmentAnalysis( "id", ImmutableList.of(Intervals.utc(1L, 2L)), - ImmutableMap.of( - "a", - new ColumnAnalysis( - null, - ColumnType.STRING.asTypeString(), - false, - true, - 1234, - 26, - "a", - "z", - null - ), - "count", - new ColumnAnalysis( - null, - ColumnType.LONG.asTypeString(), - false, - true, - 1234, - 26, + new LinkedHashMap<>( + ImmutableMap.of( "a", - "z", - null + new ColumnAnalysis( + null, + ColumnType.STRING.asTypeString(), + false, + true, + 1234, + 26, + "a", + "z", + null + ), + "count", + new ColumnAnalysis( + null, + ColumnType.LONG.asTypeString(), + false, + true, + 1234, + 26, + "a", + "z", + null + ) ) ), 1234, From 03f41157cbcd871a6339f904de7e9949699c087a Mon Sep 17 00:00:00 2001 From: Gian Merlino Date: Wed, 6 Jul 2022 22:38:40 -0700 Subject: [PATCH 2/3] Additional test adjustments. --- .../druid/query/metadata/SegmentAnalyzer.java | 1 - .../SegmentMetadataQueryQueryToolChest.java | 1 - .../SegmentMetadataQueryRunnerFactory.java | 1 - .../segment/QueryableIndexStorageAdapter.java | 2 - .../sql/avatica/DruidAvaticaHandlerTest.java | 40 +++++++++---------- .../druid/sql/avatica/DruidStatementTest.java | 6 +-- .../sql/calcite/schema/DruidSchemaTest.java | 2 +- .../druid/sql/http/SqlResourceTest.java | 30 +++++++------- 8 files changed, 38 insertions(+), 45 deletions(-) diff --git a/processing/src/main/java/org/apache/druid/query/metadata/SegmentAnalyzer.java b/processing/src/main/java/org/apache/druid/query/metadata/SegmentAnalyzer.java index f22b2f68dc90..88cc5dcb8fe5 100644 --- a/processing/src/main/java/org/apache/druid/query/metadata/SegmentAnalyzer.java +++ b/processing/src/main/java/org/apache/druid/query/metadata/SegmentAnalyzer.java @@ -60,7 +60,6 @@ import java.util.EnumSet; import java.util.LinkedHashMap; import java.util.Map; -import java.util.TreeMap; public class SegmentAnalyzer { diff --git a/processing/src/main/java/org/apache/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java b/processing/src/main/java/org/apache/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java index b2864b45f2d0..45cc18ff5a39 100644 --- a/processing/src/main/java/org/apache/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java +++ b/processing/src/main/java/org/apache/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java @@ -67,7 +67,6 @@ import java.util.List; import java.util.Map; import java.util.Set; -import java.util.TreeMap; import java.util.function.BinaryOperator; public class SegmentMetadataQueryQueryToolChest extends QueryToolChest diff --git a/processing/src/main/java/org/apache/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java b/processing/src/main/java/org/apache/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java index 6a586342823c..bc7dc9339b9e 100644 --- a/processing/src/main/java/org/apache/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java +++ b/processing/src/main/java/org/apache/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java @@ -55,7 +55,6 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; -import java.util.TreeMap; import java.util.concurrent.CancellationException; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; diff --git a/processing/src/main/java/org/apache/druid/segment/QueryableIndexStorageAdapter.java b/processing/src/main/java/org/apache/druid/segment/QueryableIndexStorageAdapter.java index 79d010392223..0b8727f4cbc6 100644 --- a/processing/src/main/java/org/apache/druid/segment/QueryableIndexStorageAdapter.java +++ b/processing/src/main/java/org/apache/druid/segment/QueryableIndexStorageAdapter.java @@ -20,7 +20,6 @@ package org.apache.druid.segment; import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.Sets; import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.java.util.common.ISE; import org.apache.druid.java.util.common.granularity.Granularities; @@ -44,7 +43,6 @@ import javax.annotation.Nullable; import java.io.IOException; import java.io.UncheckedIOException; -import java.util.HashSet; import java.util.LinkedHashSet; import java.util.Objects; import java.util.Set; diff --git a/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java b/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java index f20fca2af3a4..6c21a8ced513 100644 --- a/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java +++ b/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java @@ -570,14 +570,6 @@ public void testDatabaseMetaDataColumns() throws Exception Pair.of("TYPE_NAME", "TIMESTAMP"), Pair.of("IS_NULLABLE", "NO") ), - row( - Pair.of("TABLE_SCHEM", "druid"), - Pair.of("TABLE_NAME", "foo"), - Pair.of("COLUMN_NAME", "cnt"), - Pair.of("DATA_TYPE", Types.BIGINT), - Pair.of("TYPE_NAME", "BIGINT"), - Pair.of("IS_NULLABLE", nullNumeric ? "YES" : "NO") - ), row( Pair.of("TABLE_SCHEM", "druid"), Pair.of("TABLE_NAME", "foo"), @@ -602,6 +594,14 @@ public void testDatabaseMetaDataColumns() throws Exception Pair.of("TYPE_NAME", "VARCHAR"), Pair.of("IS_NULLABLE", "YES") ), + row( + Pair.of("TABLE_SCHEM", "druid"), + Pair.of("TABLE_NAME", "foo"), + Pair.of("COLUMN_NAME", "cnt"), + Pair.of("DATA_TYPE", Types.BIGINT), + Pair.of("TYPE_NAME", "BIGINT"), + Pair.of("IS_NULLABLE", nullNumeric ? "YES" : "NO") + ), row( Pair.of("TABLE_SCHEM", "druid"), Pair.of("TABLE_NAME", "foo"), @@ -661,14 +661,6 @@ public void testDatabaseMetaDataColumnsWithSuperuser() throws Exception Pair.of("TYPE_NAME", "TIMESTAMP"), Pair.of("IS_NULLABLE", "NO") ), - row( - Pair.of("TABLE_SCHEM", "druid"), - Pair.of("TABLE_NAME", CalciteTests.FORBIDDEN_DATASOURCE), - Pair.of("COLUMN_NAME", "cnt"), - Pair.of("DATA_TYPE", Types.BIGINT), - Pair.of("TYPE_NAME", "BIGINT"), - Pair.of("IS_NULLABLE", nullNumeric ? "YES" : "NO") - ), row( Pair.of("TABLE_SCHEM", "druid"), Pair.of("TABLE_NAME", CalciteTests.FORBIDDEN_DATASOURCE), @@ -685,6 +677,14 @@ public void testDatabaseMetaDataColumnsWithSuperuser() throws Exception Pair.of("TYPE_NAME", "VARCHAR"), Pair.of("IS_NULLABLE", "YES") ), + row( + Pair.of("TABLE_SCHEM", "druid"), + Pair.of("TABLE_NAME", CalciteTests.FORBIDDEN_DATASOURCE), + Pair.of("COLUMN_NAME", "cnt"), + Pair.of("DATA_TYPE", Types.BIGINT), + Pair.of("TYPE_NAME", "BIGINT"), + Pair.of("IS_NULLABLE", nullNumeric ? "YES" : "NO") + ), row( Pair.of("TABLE_SCHEM", "druid"), Pair.of("TABLE_NAME", CalciteTests.FORBIDDEN_DATASOURCE), @@ -1180,22 +1180,22 @@ public void testEscapingForGetColumns() throws Exception row( Pair.of("TABLE_SCHEM", "druid"), Pair.of("TABLE_NAME", CalciteTests.SOME_DATASOURCE), - Pair.of("COLUMN_NAME", "cnt") + Pair.of("COLUMN_NAME", "dim1") ), row( Pair.of("TABLE_SCHEM", "druid"), Pair.of("TABLE_NAME", CalciteTests.SOME_DATASOURCE), - Pair.of("COLUMN_NAME", "dim1") + Pair.of("COLUMN_NAME", "dim2") ), row( Pair.of("TABLE_SCHEM", "druid"), Pair.of("TABLE_NAME", CalciteTests.SOME_DATASOURCE), - Pair.of("COLUMN_NAME", "dim2") + Pair.of("COLUMN_NAME", "dim3") ), row( Pair.of("TABLE_SCHEM", "druid"), Pair.of("TABLE_NAME", CalciteTests.SOME_DATASOURCE), - Pair.of("COLUMN_NAME", "dim3") + Pair.of("COLUMN_NAME", "cnt") ), row( Pair.of("TABLE_SCHEM", "druid"), diff --git a/sql/src/test/java/org/apache/druid/sql/avatica/DruidStatementTest.java b/sql/src/test/java/org/apache/druid/sql/avatica/DruidStatementTest.java index dd643aaf14a6..5315c27b8507 100644 --- a/sql/src/test/java/org/apache/druid/sql/avatica/DruidStatementTest.java +++ b/sql/src/test/java/org/apache/druid/sql/avatica/DruidStatementTest.java @@ -123,10 +123,10 @@ public void testSignature() Assert.assertEquals( Lists.newArrayList( Lists.newArrayList("__time", "TIMESTAMP", "java.lang.Long"), - Lists.newArrayList("cnt", "BIGINT", "java.lang.Number"), Lists.newArrayList("dim1", "VARCHAR", "java.lang.String"), Lists.newArrayList("dim2", "VARCHAR", "java.lang.String"), Lists.newArrayList("dim3", "VARCHAR", "java.lang.String"), + Lists.newArrayList("cnt", "BIGINT", "java.lang.Number"), Lists.newArrayList("m1", "FLOAT", "java.lang.Float"), Lists.newArrayList("m2", "DOUBLE", "java.lang.Double"), Lists.newArrayList("unique_dim1", "OTHER", "java.lang.Object") @@ -163,9 +163,7 @@ public void testSubQueryWithOrderBy() true, Lists.newArrayList( new Object[]{""}, - new Object[]{ - "1" - }, + new Object[]{"1"}, new Object[]{"10.1"}, new Object[]{"2"}, new Object[]{"abc"}, diff --git a/sql/src/test/java/org/apache/druid/sql/calcite/schema/DruidSchemaTest.java b/sql/src/test/java/org/apache/druid/sql/calcite/schema/DruidSchemaTest.java index 4602a6823155..36e575e2bdbe 100644 --- a/sql/src/test/java/org/apache/druid/sql/calcite/schema/DruidSchemaTest.java +++ b/sql/src/test/java/org/apache/druid/sql/calcite/schema/DruidSchemaTest.java @@ -1162,7 +1162,7 @@ public void testSegmentMetadataFallbackType() "id", ImmutableList.of(Intervals.utc(1L, 2L)), new LinkedHashMap<>( - ImmutableMap.of( + ImmutableMap.of( "a", new ColumnAnalysis( null, diff --git a/sql/src/test/java/org/apache/druid/sql/http/SqlResourceTest.java b/sql/src/test/java/org/apache/druid/sql/http/SqlResourceTest.java index 77a7c7720bc9..fe6560368571 100644 --- a/sql/src/test/java/org/apache/druid/sql/http/SqlResourceTest.java +++ b/sql/src/test/java/org/apache/druid/sql/http/SqlResourceTest.java @@ -121,13 +121,13 @@ public class SqlResourceTest extends CalciteTestBase private static final String DUMMY_SQL_QUERY_ID = "dummy"; private static final List EXPECTED_COLUMNS_FOR_RESULT_FORMAT_TESTS = - Arrays.asList("__time", "cnt", "dim1", "dim2", "dim3", "m1", "m2", "unique_dim1", "EXPR$8"); + Arrays.asList("__time", "dim1", "dim2", "dim3", "cnt", "m1", "m2", "unique_dim1", "EXPR$8"); private static final List EXPECTED_TYPES_FOR_RESULT_FORMAT_TESTS = - Arrays.asList("LONG", "LONG", "STRING", "STRING", "STRING", "FLOAT", "DOUBLE", "COMPLEX", "STRING"); + Arrays.asList("LONG", "STRING", "STRING", "STRING", "LONG", "FLOAT", "DOUBLE", "COMPLEX", "STRING"); private static final List EXPECTED_SQL_TYPES_FOR_RESULT_FORMAT_TESTS = - Arrays.asList("TIMESTAMP", "BIGINT", "VARCHAR", "VARCHAR", "VARCHAR", "FLOAT", "DOUBLE", "OTHER", "VARCHAR"); + Arrays.asList("TIMESTAMP", "VARCHAR", "VARCHAR", "VARCHAR", "BIGINT", "FLOAT", "DOUBLE", "OTHER", "VARCHAR"); private static QueryRunnerFactoryConglomerate conglomerate; private static Closer resourceCloser; @@ -541,10 +541,10 @@ public void testArrayResultFormat() throws Exception ImmutableList.of( Arrays.asList( "2000-01-01T00:00:00.000Z", - 1, "", "a", "[\"a\",\"b\"]", + 1, 1.0, 1.0, "org.apache.druid.hll.VersionOneHyperLogLogCollector", @@ -552,10 +552,10 @@ public void testArrayResultFormat() throws Exception ), Arrays.asList( "2000-01-02T00:00:00.000Z", - 1, "10.1", nullStr, "[\"b\",\"c\"]", + 1, 2.0, 2.0, "org.apache.druid.hll.VersionOneHyperLogLogCollector", @@ -652,10 +652,10 @@ public void testArrayResultFormatWithHeader() throws Exception EXPECTED_SQL_TYPES_FOR_RESULT_FORMAT_TESTS, Arrays.asList( "2000-01-01T00:00:00.000Z", - 1, "", "a", "[\"a\",\"b\"]", + 1, 1.0, 1.0, "org.apache.druid.hll.VersionOneHyperLogLogCollector", @@ -663,10 +663,10 @@ public void testArrayResultFormatWithHeader() throws Exception ), Arrays.asList( "2000-01-02T00:00:00.000Z", - 1, "10.1", nullStr, "[\"b\",\"c\"]", + 1, 2.0, 2.0, "org.apache.druid.hll.VersionOneHyperLogLogCollector", @@ -720,10 +720,10 @@ public void testArrayLinesResultFormat() throws Exception Assert.assertEquals( Arrays.asList( "2000-01-01T00:00:00.000Z", - 1, "", "a", "[\"a\",\"b\"]", + 1, 1.0, 1.0, "org.apache.druid.hll.VersionOneHyperLogLogCollector", @@ -734,10 +734,10 @@ public void testArrayLinesResultFormat() throws Exception Assert.assertEquals( Arrays.asList( "2000-01-02T00:00:00.000Z", - 1, "10.1", nullStr, "[\"b\",\"c\"]", + 1, 2.0, 2.0, "org.apache.druid.hll.VersionOneHyperLogLogCollector", @@ -768,10 +768,10 @@ public void testArrayLinesResultFormatWithHeader() throws Exception Assert.assertEquals( Arrays.asList( "2000-01-01T00:00:00.000Z", - 1, "", "a", "[\"a\",\"b\"]", + 1, 1.0, 1.0, "org.apache.druid.hll.VersionOneHyperLogLogCollector", @@ -782,10 +782,10 @@ public void testArrayLinesResultFormatWithHeader() throws Exception Assert.assertEquals( Arrays.asList( "2000-01-02T00:00:00.000Z", - 1, "10.1", nullStr, "[\"b\",\"c\"]", + 1, 2.0, 2.0, "org.apache.druid.hll.VersionOneHyperLogLogCollector", @@ -1096,8 +1096,8 @@ public void testCsvResultFormat() throws Exception Assert.assertEquals( ImmutableList.of( - "2000-01-01T00:00:00.000Z,1,,a,\"[\"\"a\"\",\"\"b\"\"]\",1.0,1.0,org.apache.druid.hll.VersionOneHyperLogLogCollector,", - "2000-01-02T00:00:00.000Z,1,10.1,,\"[\"\"b\"\",\"\"c\"\"]\",2.0,2.0,org.apache.druid.hll.VersionOneHyperLogLogCollector,", + "2000-01-01T00:00:00.000Z,,a,\"[\"\"a\"\",\"\"b\"\"]\",1,1.0,1.0,org.apache.druid.hll.VersionOneHyperLogLogCollector,", + "2000-01-02T00:00:00.000Z,10.1,,\"[\"\"b\"\",\"\"c\"\"]\",1,2.0,2.0,org.apache.druid.hll.VersionOneHyperLogLogCollector,", "", "" ), @@ -1121,8 +1121,8 @@ public void testCsvResultFormatWithHeaders() throws Exception String.join(",", EXPECTED_COLUMNS_FOR_RESULT_FORMAT_TESTS), String.join(",", EXPECTED_TYPES_FOR_RESULT_FORMAT_TESTS), String.join(",", EXPECTED_SQL_TYPES_FOR_RESULT_FORMAT_TESTS), - "2000-01-01T00:00:00.000Z,1,,a,\"[\"\"a\"\",\"\"b\"\"]\",1.0,1.0,org.apache.druid.hll.VersionOneHyperLogLogCollector,", - "2000-01-02T00:00:00.000Z,1,10.1,,\"[\"\"b\"\",\"\"c\"\"]\",2.0,2.0,org.apache.druid.hll.VersionOneHyperLogLogCollector,", + "2000-01-01T00:00:00.000Z,,a,\"[\"\"a\"\",\"\"b\"\"]\",1,1.0,1.0,org.apache.druid.hll.VersionOneHyperLogLogCollector,", + "2000-01-02T00:00:00.000Z,10.1,,\"[\"\"b\"\",\"\"c\"\"]\",1,2.0,2.0,org.apache.druid.hll.VersionOneHyperLogLogCollector,", "", "" ), From c21b8d60291a5a74e9f1b0e1c9a5c1c56af35548 Mon Sep 17 00:00:00 2001 From: Gian Merlino Date: Thu, 7 Jul 2022 01:38:25 -0700 Subject: [PATCH 3/3] Adjust imports. --- .../druid/benchmark/DruidSchemaInternRowSignatureBenchmark.java | 2 -- 1 file changed, 2 deletions(-) diff --git a/benchmarks/src/test/java/org/apache/druid/benchmark/DruidSchemaInternRowSignatureBenchmark.java b/benchmarks/src/test/java/org/apache/druid/benchmark/DruidSchemaInternRowSignatureBenchmark.java index be39b252cff7..c9b46d678e59 100644 --- a/benchmarks/src/test/java/org/apache/druid/benchmark/DruidSchemaInternRowSignatureBenchmark.java +++ b/benchmarks/src/test/java/org/apache/druid/benchmark/DruidSchemaInternRowSignatureBenchmark.java @@ -57,9 +57,7 @@ import org.openjdk.jmh.infra.Blackhole; import java.io.IOException; -import java.util.HashMap; import java.util.LinkedHashMap; -import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit;