diff --git a/processing/src/main/java/org/apache/druid/error/DruidException.java b/processing/src/main/java/org/apache/druid/error/DruidException.java
index a04f3f6512cf..f6af298d40eb 100644
--- a/processing/src/main/java/org/apache/druid/error/DruidException.java
+++ b/processing/src/main/java/org/apache/druid/error/DruidException.java
@@ -24,6 +24,7 @@
import org.apache.druid.java.util.common.StringUtils;
import javax.annotation.concurrent.NotThreadSafe;
+import java.util.Arrays;
import java.util.LinkedHashMap;
import java.util.Map;
@@ -130,6 +131,8 @@
@NotThreadSafe
public class DruidException extends RuntimeException
{
+ public static final String CLASS_NAME_STR = DruidException.class.getName();
+
/**
* Starts building a "general" DruidException targeting the specified persona.
*
@@ -467,7 +470,7 @@ public DruidException build(String formatMe, Object... vals)
public DruidException build(Throwable cause, String formatMe, Object... vals)
{
- return new DruidException(
+ final DruidException retVal = new DruidException(
cause,
errorCode,
targetPersona,
@@ -475,6 +478,19 @@ public DruidException build(Throwable cause, String formatMe, Object... vals)
StringUtils.nonStrictFormat(formatMe, vals),
deserialized
);
+
+ StackTraceElement[] stackTrace = retVal.getStackTrace();
+ int firstNonDruidExceptionIndex = 0;
+ while (
+ firstNonDruidExceptionIndex < stackTrace.length
+ && stackTrace[firstNonDruidExceptionIndex].getClassName().startsWith(CLASS_NAME_STR)) {
+ ++firstNonDruidExceptionIndex;
+ }
+ if (firstNonDruidExceptionIndex > 0) {
+ retVal.setStackTrace(Arrays.copyOfRange(stackTrace, firstNonDruidExceptionIndex, stackTrace.length));
+ }
+
+ return retVal;
}
}
diff --git a/processing/src/main/java/org/apache/druid/error/NotYetImplemented.java b/processing/src/main/java/org/apache/druid/error/NotYetImplemented.java
new file mode 100644
index 000000000000..b283034fab33
--- /dev/null
+++ b/processing/src/main/java/org/apache/druid/error/NotYetImplemented.java
@@ -0,0 +1,67 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.error;
+
+/**
+ * A failure class that is used to indicate that something is just not implemented yet. This is useful when a
+ * developer builds something and they intentionally do not implement a specific branch of code or type of object.
+ *
+ * The lack of implementation is not necessarily a statement that it SHOULDN'T be implemented, it's just an indication
+ * that it has not YET been implemented. When one of these exceptions is seen, it is usually an indication that it is
+ * now time to actually implement the path that was previously elided.
+ *
+ * Oftentimes, the code path wasn't implemented because the developer thought that it wasn't actually possible to
+ * see it executed. So, collecting and providing information about why the particular path got executed is often
+ * extremely helpful in understanding why it happened and accelerating the implementation of what the correct behavior
+ * should be.
+ */
+public class NotYetImplemented extends DruidException.Failure
+{
+ public static DruidException ex(Throwable t, String msg, Object... args)
+ {
+ return DruidException.fromFailure(new NotYetImplemented(t, msg, args));
+ }
+
+ private final Throwable t;
+ private final String msg;
+ private final Object[] args;
+
+ public NotYetImplemented(Throwable t, String msg, Object[] args)
+ {
+ super("notYetImplemented");
+ this.t = t;
+ this.msg = msg;
+ this.args = args;
+ }
+
+
+ @Override
+ protected DruidException makeException(DruidException.DruidExceptionBuilder bob)
+ {
+ bob = bob.forPersona(DruidException.Persona.DEVELOPER)
+ .ofCategory(DruidException.Category.DEFENSIVE);
+
+ if (t == null) {
+ return bob.build(msg, args);
+ } else {
+ return bob.build(t, msg, args);
+ }
+ }
+}
diff --git a/processing/src/main/java/org/apache/druid/frame/read/FrameReader.java b/processing/src/main/java/org/apache/druid/frame/read/FrameReader.java
index 8ddf99325d39..46a848fb6b15 100644
--- a/processing/src/main/java/org/apache/druid/frame/read/FrameReader.java
+++ b/processing/src/main/java/org/apache/druid/frame/read/FrameReader.java
@@ -20,6 +20,7 @@
package org.apache.druid.frame.read;
import com.google.common.base.Preconditions;
+import org.apache.druid.error.DruidException;
import org.apache.druid.frame.Frame;
import org.apache.druid.frame.field.FieldReader;
import org.apache.druid.frame.field.FieldReaders;
@@ -31,7 +32,6 @@
import org.apache.druid.frame.segment.row.FrameCursorFactory;
import org.apache.druid.frame.write.FrameWriterUtils;
import org.apache.druid.java.util.common.IAE;
-import org.apache.druid.java.util.common.ISE;
import org.apache.druid.segment.CursorFactory;
import org.apache.druid.segment.column.ColumnCapabilities;
import org.apache.druid.segment.column.ColumnType;
@@ -44,7 +44,7 @@
/**
* Embeds the logic to read frames with a given {@link RowSignature}.
- *
+ *
* Stateless and immutable.
*/
public class FrameReader
@@ -146,7 +146,7 @@ public CursorFactory makeCursorFactory(final Frame frame)
case ROW_BASED:
return new FrameCursorFactory(frame, this, fieldReaders);
default:
- throw new ISE("Unrecognized frame type [%s]", frame.type());
+ throw DruidException.defensive("Unrecognized frame type [%s]", frame.type());
}
}
diff --git a/processing/src/main/java/org/apache/druid/guice/JsonConfigurator.java b/processing/src/main/java/org/apache/druid/guice/JsonConfigurator.java
index 1e4f18dc1cd7..8a53dbffabf0 100644
--- a/processing/src/main/java/org/apache/druid/guice/JsonConfigurator.java
+++ b/processing/src/main/java/org/apache/druid/guice/JsonConfigurator.java
@@ -236,7 +236,7 @@ private static void hieraricalPutValue(
// to configure ParametrizedUriEmitterConfig object. So skipping xxx=yyy key-value pair when configuring Emitter
// doesn't make any difference. That is why we just log this situation, instead of throwing an exception.
log.info(
- "Skipping %s property: one of it's prefixes is also used as a property key. Prefix: %s",
+ "Skipping property [%s]: one of it's prefixes [%s] is also used as a property key.",
originalProperty,
propertyPrefix
);
diff --git a/processing/src/main/java/org/apache/druid/query/rowsandcols/MapOfColumnsRowsAndColumns.java b/processing/src/main/java/org/apache/druid/query/rowsandcols/MapOfColumnsRowsAndColumns.java
index 121e4863bcdc..42972f9340da 100644
--- a/processing/src/main/java/org/apache/druid/query/rowsandcols/MapOfColumnsRowsAndColumns.java
+++ b/processing/src/main/java/org/apache/druid/query/rowsandcols/MapOfColumnsRowsAndColumns.java
@@ -25,6 +25,7 @@
import org.apache.druid.query.rowsandcols.column.Column;
import org.apache.druid.query.rowsandcols.column.DoubleArrayColumn;
import org.apache.druid.query.rowsandcols.column.IntArrayColumn;
+import org.apache.druid.query.rowsandcols.column.LongArrayColumn;
import org.apache.druid.query.rowsandcols.column.ObjectArrayColumn;
import org.apache.druid.query.rowsandcols.semantic.AppendableRowsAndColumns;
import org.apache.druid.segment.column.ColumnType;
@@ -170,6 +171,12 @@ public Builder add(String name, int[] vals)
return add(name, new IntArrayColumn(vals));
}
+ @SuppressWarnings("unused")
+ public Builder add(String name, long[] vals)
+ {
+ return add(name, new LongArrayColumn(vals));
+ }
+
public Builder add(String name, double[] vals)
{
return add(name, new DoubleArrayColumn(vals));
diff --git a/processing/src/main/java/org/apache/druid/query/rowsandcols/column/BinarySearchableAccessor.java b/processing/src/main/java/org/apache/druid/query/rowsandcols/column/BinarySearchableAccessor.java
index 4eddcc77f1c5..f7c339b2080d 100644
--- a/processing/src/main/java/org/apache/druid/query/rowsandcols/column/BinarySearchableAccessor.java
+++ b/processing/src/main/java/org/apache/druid/query/rowsandcols/column/BinarySearchableAccessor.java
@@ -22,6 +22,10 @@
import org.apache.druid.java.util.common.ISE;
import org.apache.druid.query.rowsandcols.util.FindResult;
+/**
+ * The implementations of this interface will not validate that things are sorted for the binary search, it assumes that
+ * they must be. As such, behavior are undefined if the column is not actually sorted.
+ */
public interface BinarySearchableAccessor extends ColumnAccessor
{
static BinarySearchableAccessor fromColumn(Column col)
diff --git a/processing/src/main/java/org/apache/druid/query/rowsandcols/column/ConstantObjectColumn.java b/processing/src/main/java/org/apache/druid/query/rowsandcols/column/ConstantObjectColumn.java
index 28a7c3dd10db..01af9b07536b 100644
--- a/processing/src/main/java/org/apache/druid/query/rowsandcols/column/ConstantObjectColumn.java
+++ b/processing/src/main/java/org/apache/druid/query/rowsandcols/column/ConstantObjectColumn.java
@@ -19,7 +19,7 @@
package org.apache.druid.query.rowsandcols.column;
-import org.apache.druid.java.util.common.ISE;
+import org.apache.druid.error.DruidException;
import org.apache.druid.query.rowsandcols.util.FindResult;
import org.apache.druid.segment.column.ColumnType;
@@ -55,7 +55,13 @@ public T as(Class extends T> clazz)
if (VectorCopier.class.equals(clazz)) {
return (T) (VectorCopier) (into, intoStart) -> {
if (Integer.MAX_VALUE - numRows < intoStart) {
- throw new ISE("too many rows!!! intoStart[%,d], numRows[%,d] combine to exceed max_int", intoStart, numRows);
+ throw DruidException.forPersona(DruidException.Persona.USER)
+ .ofCategory(DruidException.Category.CAPACITY_EXCEEDED)
+ .build(
+ "too many rows!!! intoStart[%,d], vals.length[%,d] combine to exceed max_int",
+ intoStart,
+ numRows
+ );
}
Arrays.fill(into, intoStart, intoStart + numRows, obj);
};
diff --git a/processing/src/main/java/org/apache/druid/query/rowsandcols/column/DoubleArrayColumn.java b/processing/src/main/java/org/apache/druid/query/rowsandcols/column/DoubleArrayColumn.java
index 9c3b799d30e8..18cb8ad9c5a8 100644
--- a/processing/src/main/java/org/apache/druid/query/rowsandcols/column/DoubleArrayColumn.java
+++ b/processing/src/main/java/org/apache/druid/query/rowsandcols/column/DoubleArrayColumn.java
@@ -19,8 +19,8 @@
package org.apache.druid.query.rowsandcols.column;
-import org.apache.druid.java.util.common.ISE;
-import org.apache.druid.java.util.common.Numbers;
+import org.apache.druid.error.DruidException;
+import org.apache.druid.error.NotYetImplemented;
import org.apache.druid.query.rowsandcols.util.FindResult;
import org.apache.druid.segment.column.ColumnType;
@@ -54,11 +54,13 @@ public T as(Class extends T> clazz)
if (VectorCopier.class.equals(clazz)) {
return (T) (VectorCopier) (into, intoStart) -> {
if (Integer.MAX_VALUE - vals.length < intoStart) {
- throw new ISE(
- "too many rows!!! intoStart[%,d], vals.length[%,d] combine to exceed max_int",
- intoStart,
- vals.length
- );
+ throw DruidException.forPersona(DruidException.Persona.USER)
+ .ofCategory(DruidException.Category.CAPACITY_EXCEEDED)
+ .build(
+ "too many rows!!! intoStart[%,d], vals.length[%,d] combine to exceed max_int",
+ intoStart,
+ vals.length
+ );
}
for (int i = 0; i < vals.length; ++i) {
into[intoStart + i] = vals[i];
@@ -183,13 +185,13 @@ public FindResult findLong(int startIndex, int endIndex, long val)
@Override
public FindResult findString(int startIndex, int endIndex, String val)
{
- return findDouble(startIndex, endIndex, Numbers.tryParseDouble(val, 0));
+ throw NotYetImplemented.ex(null, "findString is not currently supported for DoubleArrayColumns");
}
@Override
public FindResult findComplex(int startIndex, int endIndex, Object val)
{
- return findDouble(startIndex, endIndex, Numbers.tryParseDouble(val, 0));
+ throw NotYetImplemented.ex(null, "findComplex is not currently supported for DoubleArrayColumns");
}
}
}
diff --git a/processing/src/main/java/org/apache/druid/query/rowsandcols/column/IntArrayColumn.java b/processing/src/main/java/org/apache/druid/query/rowsandcols/column/IntArrayColumn.java
index 673cebf0e2e4..4a9d7c2c5b92 100644
--- a/processing/src/main/java/org/apache/druid/query/rowsandcols/column/IntArrayColumn.java
+++ b/processing/src/main/java/org/apache/druid/query/rowsandcols/column/IntArrayColumn.java
@@ -19,8 +19,8 @@
package org.apache.druid.query.rowsandcols.column;
-import org.apache.druid.java.util.common.ISE;
-import org.apache.druid.java.util.common.Numbers;
+import org.apache.druid.error.DruidException;
+import org.apache.druid.error.NotYetImplemented;
import org.apache.druid.query.rowsandcols.util.FindResult;
import org.apache.druid.segment.column.ColumnType;
@@ -54,11 +54,13 @@ public T as(Class extends T> clazz)
if (VectorCopier.class.equals(clazz)) {
return (T) (VectorCopier) (into, intoStart) -> {
if (Integer.MAX_VALUE - vals.length < intoStart) {
- throw new ISE(
- "too many rows!!! intoStart[%,d], vals.length[%,d] combine to exceed max_int",
- intoStart,
- vals.length
- );
+ throw DruidException.forPersona(DruidException.Persona.USER)
+ .ofCategory(DruidException.Category.CAPACITY_EXCEEDED)
+ .build(
+ "too many rows!!! intoStart[%,d], vals.length[%,d] combine to exceed max_int",
+ intoStart,
+ vals.length
+ );
}
for (int i = 0; i < vals.length; ++i) {
into[intoStart + i] = vals[i];
@@ -189,13 +191,13 @@ public FindResult findInt(int startIndex, int endIndex, int val)
@Override
public FindResult findString(int startIndex, int endIndex, String val)
{
- return findInt(startIndex, endIndex, (int) Numbers.tryParseLong(val, 0));
+ throw NotYetImplemented.ex(null, "findString is not currently supported for IntArrayColumns");
}
@Override
public FindResult findComplex(int startIndex, int endIndex, Object val)
{
- return findDouble(startIndex, endIndex, (int) Numbers.tryParseLong(val, 0));
+ throw NotYetImplemented.ex(null, "findComplex is not currently supported for IntArrayColumns");
}
}
}
diff --git a/processing/src/main/java/org/apache/druid/query/rowsandcols/column/LongArrayColumn.java b/processing/src/main/java/org/apache/druid/query/rowsandcols/column/LongArrayColumn.java
new file mode 100644
index 000000000000..bddf235eeb86
--- /dev/null
+++ b/processing/src/main/java/org/apache/druid/query/rowsandcols/column/LongArrayColumn.java
@@ -0,0 +1,204 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.query.rowsandcols.column;
+
+import org.apache.druid.error.DruidException;
+import org.apache.druid.error.NotYetImplemented;
+import org.apache.druid.query.rowsandcols.util.FindResult;
+import org.apache.druid.segment.column.ColumnType;
+
+import javax.annotation.Nonnull;
+import javax.annotation.Nullable;
+import java.util.Arrays;
+
+public class LongArrayColumn implements Column
+{
+ private final long[] vals;
+
+ public LongArrayColumn(
+ long[] vals
+ )
+ {
+ this.vals = vals;
+ }
+
+ @Nonnull
+ @Override
+ public ColumnAccessor toAccessor()
+ {
+ return new MyColumnAccessor();
+ }
+
+ @Nullable
+ @SuppressWarnings("unchecked")
+ @Override
+ public T as(Class extends T> clazz)
+ {
+ if (VectorCopier.class.equals(clazz)) {
+ return (T) (VectorCopier) (into, intoStart) -> {
+ if (Integer.MAX_VALUE - vals.length < intoStart) {
+ throw DruidException.forPersona(DruidException.Persona.USER)
+ .ofCategory(DruidException.Category.CAPACITY_EXCEEDED)
+ .build(
+ "too many rows!!! intoStart[%,d], vals.length[%,d] combine to exceed max_int",
+ intoStart,
+ vals.length
+ );
+ }
+ for (int i = 0; i < vals.length; ++i) {
+ into[intoStart + i] = vals[i];
+ }
+ };
+ }
+ if (ColumnValueSwapper.class.equals(clazz)) {
+ return (T) (ColumnValueSwapper) (lhs, rhs) -> {
+ long tmp = vals[lhs];
+ vals[lhs] = vals[rhs];
+ vals[rhs] = tmp;
+ };
+ }
+ return null;
+ }
+
+ private class MyColumnAccessor implements BinarySearchableAccessor
+ {
+ @Override
+ public ColumnType getType()
+ {
+ return ColumnType.LONG;
+ }
+
+ @Override
+ public int numRows()
+ {
+ return vals.length;
+ }
+
+ @Override
+ public boolean isNull(int rowNum)
+ {
+ return false;
+ }
+
+ @Override
+ public Object getObject(int rowNum)
+ {
+ return vals[rowNum];
+ }
+
+ @Override
+ public double getDouble(int rowNum)
+ {
+ return vals[rowNum];
+ }
+
+ @Override
+ public float getFloat(int rowNum)
+ {
+ return vals[rowNum];
+ }
+
+ @Override
+ public long getLong(int rowNum)
+ {
+ return vals[rowNum];
+ }
+
+ @Override
+ public int getInt(int rowNum)
+ {
+ return (int) vals[rowNum];
+ }
+
+ @Override
+ public int compareRows(int lhsRowNum, int rhsRowNum)
+ {
+ return Long.compare(vals[lhsRowNum], vals[rhsRowNum]);
+ }
+
+
+ @Override
+ public FindResult findNull(int startIndex, int endIndex)
+ {
+ return FindResult.notFound(endIndex);
+ }
+
+ @Override
+ public FindResult findDouble(int startIndex, int endIndex, double val)
+ {
+ return findLong(startIndex, endIndex, (long) val);
+ }
+
+ @Override
+ public FindResult findFloat(int startIndex, int endIndex, float val)
+ {
+ return findLong(startIndex, endIndex, (long) val);
+ }
+
+ @Override
+ public FindResult findLong(int startIndex, int endIndex, long val)
+ {
+ if (vals[startIndex] == val) {
+ int end = startIndex + 1;
+
+ while (end < endIndex && vals[end] == val) {
+ ++end;
+ }
+ return FindResult.found(startIndex, end);
+ }
+
+ int i = Arrays.binarySearch(vals, startIndex, endIndex, val);
+ if (i > 0) {
+ int foundStart = i;
+ int foundEnd = i + 1;
+
+ while (foundStart - 1 >= startIndex && vals[foundStart - 1] == val) {
+ --foundStart;
+ }
+
+ while (foundEnd < endIndex && vals[foundEnd] == val) {
+ ++foundEnd;
+ }
+
+ return FindResult.found(foundStart, foundEnd);
+ } else {
+ return FindResult.notFound(-(i + 1));
+ }
+ }
+
+ @SuppressWarnings("unused")
+ public FindResult findInt(int startIndex, int endIndex, int val)
+ {
+ return findLong(startIndex, endIndex, val);
+ }
+
+ @Override
+ public FindResult findString(int startIndex, int endIndex, String val)
+ {
+ throw NotYetImplemented.ex(null, "findString is not currently supported for LongArrayColumns");
+ }
+
+ @Override
+ public FindResult findComplex(int startIndex, int endIndex, Object val)
+ {
+ throw NotYetImplemented.ex(null, "findComplex is not currently supported for LongArrayColumns");
+ }
+ }
+}
diff --git a/processing/src/main/java/org/apache/druid/query/rowsandcols/concrete/ColumnBasedFrameRowsAndColumns.java b/processing/src/main/java/org/apache/druid/query/rowsandcols/concrete/ColumnBasedFrameRowsAndColumns.java
index ada3da164ea1..71c2541b387c 100644
--- a/processing/src/main/java/org/apache/druid/query/rowsandcols/concrete/ColumnBasedFrameRowsAndColumns.java
+++ b/processing/src/main/java/org/apache/druid/query/rowsandcols/concrete/ColumnBasedFrameRowsAndColumns.java
@@ -28,7 +28,6 @@
import org.apache.druid.java.util.common.Intervals;
import org.apache.druid.query.rowsandcols.RowsAndColumns;
import org.apache.druid.query.rowsandcols.column.Column;
-import org.apache.druid.query.rowsandcols.semantic.WireTransferable;
import org.apache.druid.segment.CloseableShapeshifter;
import org.apache.druid.segment.StorageAdapter;
import org.apache.druid.segment.column.ColumnType;
@@ -80,7 +79,6 @@ public Column findColumn(String name)
}
}
return colCache.get(name);
-
}
@SuppressWarnings("unchecked")
@@ -91,9 +89,6 @@ public T as(Class clazz)
if (StorageAdapter.class.equals(clazz)) {
return (T) new FrameStorageAdapter(frame, FrameReader.create(signature), Intervals.ETERNITY);
}
- if (WireTransferable.class.equals(clazz)) {
- return (T) this;
- }
return null;
}
diff --git a/processing/src/main/java/org/apache/druid/query/rowsandcols/concrete/ColumnHolderRACColumn.java b/processing/src/main/java/org/apache/druid/query/rowsandcols/concrete/ColumnHolderRACColumn.java
index ed4f8ead52e8..d68f8872bf46 100644
--- a/processing/src/main/java/org/apache/druid/query/rowsandcols/concrete/ColumnHolderRACColumn.java
+++ b/processing/src/main/java/org/apache/druid/query/rowsandcols/concrete/ColumnHolderRACColumn.java
@@ -91,7 +91,7 @@ public int numRows()
public boolean isNull(int rowNum)
{
offset.set(rowNum);
- return valueSelector.isNull();
+ return valueSelector.getObject() == null;
}
@Nullable
diff --git a/processing/src/main/java/org/apache/druid/segment/AutoTypeColumnMerger.java b/processing/src/main/java/org/apache/druid/segment/AutoTypeColumnMerger.java
index 5d1198f5460a..801eaf112a5f 100644
--- a/processing/src/main/java/org/apache/druid/segment/AutoTypeColumnMerger.java
+++ b/processing/src/main/java/org/apache/druid/segment/AutoTypeColumnMerger.java
@@ -22,7 +22,7 @@
import com.google.common.base.Preconditions;
import com.google.common.collect.Iterators;
import com.google.common.collect.PeekingIterator;
-import org.apache.druid.java.util.common.ISE;
+import org.apache.druid.error.DruidException;
import org.apache.druid.java.util.common.io.Closer;
import org.apache.druid.java.util.common.logger.Logger;
import org.apache.druid.segment.column.ColumnDescriptor;
@@ -212,7 +212,7 @@ public void writeMergedValueDictionary(List adapters) throws I
);
break;
default:
- throw new ISE(
+ throw DruidException.defensive(
"How did we get here? Column [%s] with type [%s] does not have specialized serializer",
name,
logicalType
@@ -349,7 +349,7 @@ public boolean hasOnlyNulls()
@Override
public ColumnDescriptor makeColumnDescriptor()
{
- ColumnDescriptor.Builder descriptorBuilder = new ColumnDescriptor.Builder();
+ ColumnDescriptor.Builder descriptorBuilder = ColumnDescriptor.builder();
final NestedCommonFormatColumnPartSerde partSerde =
NestedCommonFormatColumnPartSerde.serializerBuilder()
diff --git a/processing/src/main/java/org/apache/druid/segment/column/StringUtf8DictionaryEncodedColumn.java b/processing/src/main/java/org/apache/druid/segment/column/StringUtf8DictionaryEncodedColumn.java
index fe8ade4a9ed6..c3ebde1854c0 100644
--- a/processing/src/main/java/org/apache/druid/segment/column/StringUtf8DictionaryEncodedColumn.java
+++ b/processing/src/main/java/org/apache/druid/segment/column/StringUtf8DictionaryEncodedColumn.java
@@ -528,11 +528,11 @@ public Indexed getStringDictionary()
/**
* Base type for a {@link SingleValueDimensionVectorSelector} for a dictionary encoded {@link ColumnType#STRING}
* built around a {@link ColumnarInts}. Dictionary not included - BYO dictionary lookup methods.
- *
+ *
* Assumes that all implementations return true for {@link #supportsLookupNameUtf8()}.
*/
public abstract static class StringSingleValueDimensionVectorSelector
- implements SingleValueDimensionVectorSelector, IdLookup
+ implements SingleValueDimensionVectorSelector, IdLookup
{
private final ColumnarInts column;
private final ReadableVectorOffset offset;
@@ -540,8 +540,8 @@ public abstract static class StringSingleValueDimensionVectorSelector
private int id = ReadableVectorInspector.NULL_ID;
public StringSingleValueDimensionVectorSelector(
- ColumnarInts column,
- ReadableVectorOffset offset
+ ColumnarInts column,
+ ReadableVectorOffset offset
)
{
this.column = column;
@@ -601,11 +601,11 @@ public int getMaxVectorSize()
/**
* Base type for a {@link MultiValueDimensionVectorSelector} for a dictionary encoded {@link ColumnType#STRING}
* built around a {@link ColumnarMultiInts}. Dictionary not included - BYO dictionary lookup methods.
- *
+ *
* Assumes that all implementations return true for {@link #supportsLookupNameUtf8()}.
*/
public abstract static class StringMultiValueDimensionVectorSelector
- implements MultiValueDimensionVectorSelector, IdLookup
+ implements MultiValueDimensionVectorSelector, IdLookup
{
private final ColumnarMultiInts multiValueColumn;
private final ReadableVectorOffset offset;
@@ -614,8 +614,8 @@ public abstract static class StringMultiValueDimensionVectorSelector
private int id = ReadableVectorInspector.NULL_ID;
public StringMultiValueDimensionVectorSelector(
- ColumnarMultiInts multiValueColumn,
- ReadableVectorOffset offset
+ ColumnarMultiInts multiValueColumn,
+ ReadableVectorOffset offset
)
{
this.multiValueColumn = multiValueColumn;
@@ -670,6 +670,7 @@ public IdLookup idLookup()
{
return this;
}
+
@Override
public int getCurrentVectorSize()
{
@@ -697,8 +698,8 @@ public abstract static class StringVectorObjectSelector implements VectorObjectS
private int id = ReadableVectorInspector.NULL_ID;
public StringVectorObjectSelector(
- ColumnarInts column,
- ReadableVectorOffset offset
+ ColumnarInts column,
+ ReadableVectorOffset offset
)
{
this.column = column;
@@ -757,8 +758,8 @@ public abstract static class MultiValueStringVectorObjectSelector implements Vec
private int id = ReadableVectorInspector.NULL_ID;
public MultiValueStringVectorObjectSelector(
- ColumnarMultiInts multiValueColumn,
- ReadableVectorOffset offset
+ ColumnarMultiInts multiValueColumn,
+ ReadableVectorOffset offset
)
{
this.multiValueColumn = multiValueColumn;
diff --git a/processing/src/main/java/org/apache/druid/segment/data/BitmapSerde.java b/processing/src/main/java/org/apache/druid/segment/data/BitmapSerde.java
index aa80186e10dd..b25862b17495 100644
--- a/processing/src/main/java/org/apache/druid/segment/data/BitmapSerde.java
+++ b/processing/src/main/java/org/apache/druid/segment/data/BitmapSerde.java
@@ -20,7 +20,10 @@
package org.apache.druid.segment.data;
import com.fasterxml.jackson.annotation.JsonTypeName;
+import org.apache.druid.collections.bitmap.BitmapFactory;
+import org.apache.druid.collections.bitmap.ConciseBitmapFactory;
import org.apache.druid.collections.bitmap.RoaringBitmapFactory;
+import org.apache.druid.error.DruidException;
public class BitmapSerde
{
@@ -48,4 +51,14 @@ public static BitmapSerdeFactory createLegacyFactory()
{
return new LegacyBitmapSerdeFactory();
}
+
+ public static BitmapSerdeFactory forBitmapFactory(BitmapFactory factory)
+ {
+ if (factory instanceof RoaringBitmapFactory) {
+ return new DefaultBitmapSerdeFactory();
+ } else if (factory instanceof ConciseBitmapFactory) {
+ return new ConciseBitmapSerdeFactory();
+ }
+ throw DruidException.defensive("Unknown type of bitmapFactory [%s]", factory.getClass());
+ }
}
diff --git a/processing/src/main/java/org/apache/druid/segment/data/CompressionFactory.java b/processing/src/main/java/org/apache/druid/segment/data/CompressionFactory.java
index dde6a440d9ea..91ec70b7f171 100644
--- a/processing/src/main/java/org/apache/druid/segment/data/CompressionFactory.java
+++ b/processing/src/main/java/org/apache/druid/segment/data/CompressionFactory.java
@@ -233,6 +233,14 @@ public interface LongEncodingWriter
void write(long value) throws IOException;
+ @SuppressWarnings("unused")
+ default void write(long[] values, int offset, int length) throws IOException
+ {
+ for (int i = offset; i < length; ++i) {
+ write(values[i]);
+ }
+ }
+
/**
* Flush the unwritten content to the current output.
*/
@@ -294,6 +302,9 @@ public interface LongEncodingReader
* various duplicates.
*/
LongEncodingReader duplicate();
+
+ @SuppressWarnings("unused")
+ LongEncodingStrategy getStrategy();
}
public static Supplier getLongSupplier(
diff --git a/processing/src/main/java/org/apache/druid/segment/data/DeltaLongEncodingReader.java b/processing/src/main/java/org/apache/druid/segment/data/DeltaLongEncodingReader.java
index 435aa2ddfd1a..b7feb3b1dd34 100644
--- a/processing/src/main/java/org/apache/druid/segment/data/DeltaLongEncodingReader.java
+++ b/processing/src/main/java/org/apache/druid/segment/data/DeltaLongEncodingReader.java
@@ -82,4 +82,10 @@ public CompressionFactory.LongEncodingReader duplicate()
{
return new DeltaLongEncodingReader(buffer.duplicate(), base, bitsPerValue);
}
+
+ @Override
+ public CompressionFactory.LongEncodingStrategy getStrategy()
+ {
+ return CompressionFactory.LongEncodingStrategy.AUTO;
+ }
}
diff --git a/processing/src/main/java/org/apache/druid/segment/data/DictionaryWriter.java b/processing/src/main/java/org/apache/druid/segment/data/DictionaryWriter.java
index 170f6975f28e..5901e2e13205 100644
--- a/processing/src/main/java/org/apache/druid/segment/data/DictionaryWriter.java
+++ b/processing/src/main/java/org/apache/druid/segment/data/DictionaryWriter.java
@@ -28,12 +28,41 @@ public interface DictionaryWriter extends Serializer
{
boolean isSorted();
+ /**
+ * Prepares the writer for writing
+ *
+ * @throws IOException if there is a problem with IO
+ */
void open() throws IOException;
- void write(@Nullable T objectToWrite) throws IOException;
+ /**
+ * Writes an object to the dictionary.
+ *
+ * Returns the index of the value that was just written. This is defined as the `int` value that can be passed
+ * into {@link #get} such that it will return the same value back.
+ *
+ * @param objectToWrite object to be written to the dictionary
+ * @return index of the value that was just written
+ * @throws IOException if there is a problem with IO
+ */
+ int write(@Nullable T objectToWrite) throws IOException;
+ /**
+ * Returns an object that has already been written via the {@link #write} method.
+ *
+ * @param dictId index of the object to return
+ * @return the object identified by the given index
+ * @throws IOException if there is a problem with IO
+ */
@Nullable
T get(int dictId) throws IOException;
+ /**
+ * Returns the number of items that have been written so far in this dictionary. Any number lower than this
+ * cardinality can be passed into {@link #get} and a value will be returned. If a value greater than or equal to
+ * the cardinality is passed into {@link #get} all sorts of things could happen, but likely none of them are good.
+ *
+ * @return the number of items that have been written so far
+ */
int getCardinality();
}
diff --git a/processing/src/main/java/org/apache/druid/segment/data/EncodedStringDictionaryWriter.java b/processing/src/main/java/org/apache/druid/segment/data/EncodedStringDictionaryWriter.java
index 371a73bebd74..799ed3766f28 100644
--- a/processing/src/main/java/org/apache/druid/segment/data/EncodedStringDictionaryWriter.java
+++ b/processing/src/main/java/org/apache/druid/segment/data/EncodedStringDictionaryWriter.java
@@ -58,9 +58,9 @@ public void open() throws IOException
}
@Override
- public void write(@Nullable String objectToWrite) throws IOException
+ public int write(@Nullable String objectToWrite) throws IOException
{
- delegate.write(StringUtils.toUtf8Nullable(NullHandling.emptyToNullIfNeeded(objectToWrite)));
+ return delegate.write(StringUtils.toUtf8Nullable(NullHandling.emptyToNullIfNeeded(objectToWrite)));
}
@Nullable
diff --git a/processing/src/main/java/org/apache/druid/segment/data/FixedIndexedWriter.java b/processing/src/main/java/org/apache/druid/segment/data/FixedIndexedWriter.java
index b1b473b3419d..42ca16b78f42 100644
--- a/processing/src/main/java/org/apache/druid/segment/data/FixedIndexedWriter.java
+++ b/processing/src/main/java/org/apache/druid/segment/data/FixedIndexedWriter.java
@@ -20,8 +20,8 @@
package org.apache.druid.segment.data;
import org.apache.druid.common.config.NullHandling;
+import org.apache.druid.error.DruidException;
import org.apache.druid.io.Channels;
-import org.apache.druid.java.util.common.ISE;
import org.apache.druid.java.util.common.io.smoosh.FileSmoosher;
import org.apache.druid.segment.column.TypeStrategy;
import org.apache.druid.segment.writeout.SegmentWriteOutMedium;
@@ -46,14 +46,16 @@ public class FixedIndexedWriter implements DictionaryWriter
private final Comparator comparator;
private final ByteBuffer scratch;
private final ByteBuffer readBuffer;
- private int numWritten;
+ private final boolean isSorted;
+ private final int width;
+
+ private int cardinality = 0;
+
@Nullable
private WriteOutBytes valuesOut = null;
private boolean hasNulls = false;
- private boolean isSorted;
@Nullable
private T prevObject = null;
- private final int width;
public FixedIndexedWriter(
SegmentWriteOutMedium segmentWriteOutMedium,
@@ -87,7 +89,7 @@ public void open() throws IOException
@Override
public int getCardinality()
{
- return hasNulls ? numWritten + 1 : numWritten;
+ return cardinality;
}
@Override
@@ -97,28 +99,31 @@ public long getSerializedSize()
}
@Override
- public void write(@Nullable T objectToWrite) throws IOException
+ public int write(@Nullable T objectToWrite) throws IOException
{
if (prevObject != null && isSorted && comparator.compare(prevObject, objectToWrite) >= 0) {
- throw new ISE(
+ throw DruidException.defensive(
"Values must be sorted and unique. Element [%s] with value [%s] is before or equivalent to [%s]",
- numWritten,
+ cardinality,
objectToWrite,
prevObject
);
}
if (objectToWrite == null) {
+ if (cardinality != 0) {
+ throw DruidException.defensive("Null must come first, got it at cardinality[%,d]!=0", cardinality);
+ }
hasNulls = true;
- return;
+ return cardinality++;
}
scratch.clear();
typeStrategy.write(scratch, objectToWrite, width);
scratch.flip();
Channels.writeFully(valuesOut, scratch);
- numWritten++;
prevObject = objectToWrite;
+ return cardinality++;
}
@Override
@@ -141,7 +146,7 @@ public void writeTo(
scratch.flip();
Channels.writeFully(channel, scratch);
scratch.clear();
- scratch.putInt(numWritten);
+ scratch.putInt(hasNulls ? cardinality - 1 : cardinality); // we don't actually write the null entry, so subtract 1
scratch.flip();
Channels.writeFully(channel, scratch);
valuesOut.writeTo(channel);
@@ -166,7 +171,7 @@ public T get(int index) throws IOException
public Iterator getIterator()
{
final ByteBuffer iteratorBuffer = ByteBuffer.allocate(width * PAGE_SIZE).order(readBuffer.order());
- final int totalCount = hasNulls ? 1 + numWritten : numWritten;
+ final int totalCount = cardinality;
final int startPos = hasNulls ? 1 : 0;
return new Iterator()
@@ -197,13 +202,8 @@ private void readPage()
{
iteratorBuffer.clear();
try {
- if (numWritten - (pos - startPos) < PAGE_SIZE) {
- int size = (numWritten - (pos - startPos)) * width;
- iteratorBuffer.limit(size);
- valuesOut.readFully((long) (pos - startPos) * width, iteratorBuffer);
- } else {
- valuesOut.readFully((long) (pos - startPos) * width, iteratorBuffer);
- }
+ iteratorBuffer.limit(Math.min(PAGE_SIZE, (cardinality - pos) * width));
+ valuesOut.readFully((long) (pos - startPos) * width, iteratorBuffer);
iteratorBuffer.clear();
}
catch (IOException e) {
diff --git a/processing/src/main/java/org/apache/druid/segment/data/FrontCodedIndexedWriter.java b/processing/src/main/java/org/apache/druid/segment/data/FrontCodedIndexedWriter.java
index c24d2e55d71d..707e3894793a 100644
--- a/processing/src/main/java/org/apache/druid/segment/data/FrontCodedIndexedWriter.java
+++ b/processing/src/main/java/org/apache/druid/segment/data/FrontCodedIndexedWriter.java
@@ -21,6 +21,7 @@
import com.google.common.primitives.Ints;
import org.apache.druid.common.config.NullHandling;
+import org.apache.druid.error.DruidException;
import org.apache.druid.io.Channels;
import org.apache.druid.java.util.common.IAE;
import org.apache.druid.java.util.common.ISE;
@@ -102,7 +103,7 @@ public void open() throws IOException
}
@Override
- public void write(@Nullable byte[] value) throws IOException
+ public int write(@Nullable byte[] value) throws IOException
{
if (prevObject != null && compareNullableUtf8UsingJavaStringOrdering(prevObject, value) >= 0) {
throw new ISE(
@@ -114,8 +115,11 @@ public void write(@Nullable byte[] value) throws IOException
}
if (value == null) {
+ if (numWritten != 0) {
+ throw DruidException.defensive("Null must come first, got it at cardinality[%,d]!=0", numWritten);
+ }
hasNulls = true;
- return;
+ return 0;
}
// if the bucket buffer is full, write the bucket
@@ -143,8 +147,9 @@ public void write(@Nullable byte[] value) throws IOException
bucketBuffer[numWritten % bucketSize] = value;
- ++numWritten;
+ int retVal = numWritten++;
prevObject = value;
+ return retVal + (hasNulls ? 1 : 0);
}
diff --git a/processing/src/main/java/org/apache/druid/segment/data/FrontCodedIntArrayIndexedWriter.java b/processing/src/main/java/org/apache/druid/segment/data/FrontCodedIntArrayIndexedWriter.java
index 8116882191b0..50e350f3d640 100644
--- a/processing/src/main/java/org/apache/druid/segment/data/FrontCodedIntArrayIndexedWriter.java
+++ b/processing/src/main/java/org/apache/druid/segment/data/FrontCodedIntArrayIndexedWriter.java
@@ -21,6 +21,7 @@
import com.google.common.primitives.Ints;
import org.apache.druid.common.config.NullHandling;
+import org.apache.druid.error.DruidException;
import org.apache.druid.io.Channels;
import org.apache.druid.java.util.common.IAE;
import org.apache.druid.java.util.common.ISE;
@@ -81,6 +82,10 @@ public class FrontCodedIntArrayIndexedWriter implements DictionaryWriter
private boolean isClosed = false;
private boolean hasNulls = false;
+ private int readCachedBucket = -1;
+ @Nullable
+ private ByteBuffer readBufferCache = null;
+
public FrontCodedIntArrayIndexedWriter(
SegmentWriteOutMedium segmentWriteOutMedium,
ByteOrder byteOrder,
@@ -107,7 +112,7 @@ public void open() throws IOException
}
@Override
- public void write(@Nullable int[] value) throws IOException
+ public int write(@Nullable int[] value) throws IOException
{
if (prevObject != null && ARRAY_COMPARATOR.compare(prevObject, value) >= 0) {
@@ -120,8 +125,11 @@ public void write(@Nullable int[] value) throws IOException
}
if (value == null) {
+ if (numWritten != 0) {
+ throw DruidException.defensive("Null must come first, got it at numWritten[%,d]!=0", numWritten);
+ }
hasNulls = true;
- return;
+ return 0;
}
// if the bucket buffer is full, write the bucket
@@ -147,8 +155,9 @@ public void write(@Nullable int[] value) throws IOException
bucketBuffer[numWritten % bucketSize] = value;
- ++numWritten;
+ int retVal = numWritten++;
prevObject = value;
+ return retVal + (hasNulls ? 1 : 0);
}
@@ -206,6 +215,11 @@ public int[] get(int index) throws IOException
return bucketBuffer[relativeIndex];
} else {
final int bucket = adjustedIndex >> div;
+ if (readCachedBucket == bucket) {
+ readBufferCache.position(0);
+ return getFromBucket(readBufferCache, relativeIndex);
+ }
+
long startOffset;
if (bucket == 0) {
startOffset = 0;
@@ -217,10 +231,17 @@ public int[] get(int index) throws IOException
if (currentBucketSize == 0) {
return null;
}
- final ByteBuffer bucketBuffer = ByteBuffer.allocate(currentBucketSize).order(byteOrder);
- valuesOut.readFully(startOffset, bucketBuffer);
- bucketBuffer.clear();
- return getFromBucket(bucketBuffer, relativeIndex);
+ if (readBufferCache == null || readBufferCache.capacity() < currentBucketSize) {
+ readBufferCache = ByteBuffer.allocate(currentBucketSize).order(byteOrder);
+ }
+ readBufferCache.clear();
+ readBufferCache.limit(currentBucketSize);
+ valuesOut.readFully(startOffset, readBufferCache);
+
+ readCachedBucket = bucket;
+
+ readBufferCache.position(0);
+ return getFromBucket(readBufferCache, relativeIndex);
}
}
diff --git a/processing/src/main/java/org/apache/druid/segment/data/GenericIndexedWriter.java b/processing/src/main/java/org/apache/druid/segment/data/GenericIndexedWriter.java
index 8b38125322ba..a87a61843fac 100644
--- a/processing/src/main/java/org/apache/druid/segment/data/GenericIndexedWriter.java
+++ b/processing/src/main/java/org/apache/druid/segment/data/GenericIndexedWriter.java
@@ -242,7 +242,7 @@ void setIntMaxForCasting(final int intMaxForCasting)
}
@Override
- public void write(@Nullable T objectToWrite) throws IOException
+ public int write(@Nullable T objectToWrite) throws IOException
{
if (objectsSorted && prevObject != null && strategy.compare(prevObject, objectToWrite) >= 0) {
objectsSorted = false;
@@ -263,7 +263,7 @@ public void write(@Nullable T objectToWrite) throws IOException
// Increment number of values written. Important to do this after the check above, since numWritten is
// accessed during "initializeHeaderOutLong" to determine the length of the header.
- ++numWritten;
+ int retVal = numWritten++;
if (!requireMultipleFiles) {
headerOut.writeInt(checkedCastNonnegativeLongToInt(valuesOut.size()));
@@ -280,6 +280,7 @@ public void write(@Nullable T objectToWrite) throws IOException
if (objectsSorted) {
prevObject = objectToWrite;
}
+ return retVal;
}
@Nullable
diff --git a/processing/src/main/java/org/apache/druid/segment/data/LongsLongEncodingReader.java b/processing/src/main/java/org/apache/druid/segment/data/LongsLongEncodingReader.java
index 2ed0459121af..7df866f22c7f 100644
--- a/processing/src/main/java/org/apache/druid/segment/data/LongsLongEncodingReader.java
+++ b/processing/src/main/java/org/apache/druid/segment/data/LongsLongEncodingReader.java
@@ -71,4 +71,10 @@ public CompressionFactory.LongEncodingReader duplicate()
{
return new LongsLongEncodingReader(buffer.getByteBuffer(), buffer.getTypeByteOrder());
}
+
+ @Override
+ public CompressionFactory.LongEncodingStrategy getStrategy()
+ {
+ return CompressionFactory.LongEncodingStrategy.LONGS;
+ }
}
diff --git a/processing/src/main/java/org/apache/druid/segment/data/LongsLongEncodingWriter.java b/processing/src/main/java/org/apache/druid/segment/data/LongsLongEncodingWriter.java
index 2aeb194d9a8a..728a50aa2fc4 100644
--- a/processing/src/main/java/org/apache/druid/segment/data/LongsLongEncodingWriter.java
+++ b/processing/src/main/java/org/apache/druid/segment/data/LongsLongEncodingWriter.java
@@ -22,7 +22,6 @@
import org.apache.druid.segment.writeout.WriteOutBytes;
import javax.annotation.Nullable;
-
import java.io.IOException;
import java.io.OutputStream;
import java.nio.ByteBuffer;
diff --git a/processing/src/main/java/org/apache/druid/segment/data/RoaringBitmapSerdeFactory.java b/processing/src/main/java/org/apache/druid/segment/data/RoaringBitmapSerdeFactory.java
index ea6bb9bd994d..f7cf9bdbb00b 100644
--- a/processing/src/main/java/org/apache/druid/segment/data/RoaringBitmapSerdeFactory.java
+++ b/processing/src/main/java/org/apache/druid/segment/data/RoaringBitmapSerdeFactory.java
@@ -84,7 +84,7 @@ public ImmutableBitmap fromByteBuffer(ByteBuffer buffer, int numBytes)
@Override
public byte[] toBytes(@Nullable ImmutableBitmap val)
{
- if (val == null || val.size() == 0) {
+ if (val == null || val.isEmpty()) {
return new byte[]{};
}
return val.toBytes();
diff --git a/processing/src/main/java/org/apache/druid/segment/data/TableLongEncodingReader.java b/processing/src/main/java/org/apache/druid/segment/data/TableLongEncodingReader.java
index 6a5e17b1080b..7e9b1fdc927e 100644
--- a/processing/src/main/java/org/apache/druid/segment/data/TableLongEncodingReader.java
+++ b/processing/src/main/java/org/apache/druid/segment/data/TableLongEncodingReader.java
@@ -88,4 +88,10 @@ public CompressionFactory.LongEncodingReader duplicate()
{
return new TableLongEncodingReader(buffer.duplicate(), table, bitsPerValue);
}
+
+ @Override
+ public CompressionFactory.LongEncodingStrategy getStrategy()
+ {
+ return CompressionFactory.LongEncodingStrategy.AUTO;
+ }
}
diff --git a/processing/src/main/java/org/apache/druid/segment/nested/DictionaryIdLookup.java b/processing/src/main/java/org/apache/druid/segment/nested/DictionaryIdLookup.java
index f4176db220cd..44c37da01f26 100644
--- a/processing/src/main/java/org/apache/druid/segment/nested/DictionaryIdLookup.java
+++ b/processing/src/main/java/org/apache/druid/segment/nested/DictionaryIdLookup.java
@@ -51,7 +51,7 @@
/**
* Value to dictionary id lookup, backed with memory mapped dictionaries populated lazily by the supplied
- * @link DictionaryWriter}.
+ * {@link DictionaryWriter}.
*/
public final class DictionaryIdLookup implements Closeable
{
diff --git a/processing/src/main/java/org/apache/druid/segment/serde/MetaSerdeHelper.java b/processing/src/main/java/org/apache/druid/segment/serde/MetaSerdeHelper.java
index 113821cee150..08e97e3e5017 100644
--- a/processing/src/main/java/org/apache/druid/segment/serde/MetaSerdeHelper.java
+++ b/processing/src/main/java/org/apache/druid/segment/serde/MetaSerdeHelper.java
@@ -112,7 +112,11 @@ public void writeTo(WritableByteChannel channel, T x) throws IOException
public int size(T x)
{
- return fieldWriters.stream().mapToInt(w -> w.size(x)).sum();
+ int retVal = 0;
+ for (FieldWriter fieldWriter : fieldWriters) {
+ retVal += fieldWriter.size(x);
+ }
+ return retVal;
}
public interface FieldWriter
diff --git a/processing/src/main/java/org/apache/druid/segment/serde/cell/IOIterator.java b/processing/src/main/java/org/apache/druid/segment/serde/cell/IOIterator.java
index 887f1fb65ac6..3931601dd4f3 100644
--- a/processing/src/main/java/org/apache/druid/segment/serde/cell/IOIterator.java
+++ b/processing/src/main/java/org/apache/druid/segment/serde/cell/IOIterator.java
@@ -22,6 +22,12 @@
import java.io.Closeable;
import java.io.IOException;
+/**
+ * An Iterator-like interface that is intentionally not extending Iterator. This is because it is Closeable
+ * and we never want to lose track of the fact that the object needs to be closed.
+ *
+ * @param
+ */
public interface IOIterator extends Closeable
{
boolean hasNext() throws IOException;
diff --git a/processing/src/test/java/org/apache/druid/error/ExceptionTest.java b/processing/src/test/java/org/apache/druid/error/ExceptionTest.java
new file mode 100644
index 000000000000..bf587e4cbd9a
--- /dev/null
+++ b/processing/src/test/java/org/apache/druid/error/ExceptionTest.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.error;
+
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.util.Map;
+
+public class ExceptionTest
+{
+ @Test
+ public void testNoCause()
+ {
+ DruidException exception = DruidException.defensive().build("defensive");
+ StackTraceElement[] stackTrace = exception.getStackTrace();
+ for (StackTraceElement stackTraceElement : stackTrace) {
+ Assert.assertFalse(stackTraceElement.getClassName().startsWith(DruidException.CLASS_NAME_STR));
+ }
+ }
+
+ @Test
+ public void testNoStacktrace()
+ {
+ ErrorResponse errorResponse = new ErrorResponse(Forbidden.exception());
+ final Map asMap = errorResponse.getAsMap();
+ DruidException exception = ErrorResponse.fromMap(asMap).getUnderlyingException();
+ Assert.assertTrue(exception.getCause() instanceof DruidException);
+ Assert.assertEquals(0, exception.getCause().getStackTrace().length);
+ }
+}
diff --git a/processing/src/test/java/org/apache/druid/query/rowsandcols/column/LongArrayColumnTest.java b/processing/src/test/java/org/apache/druid/query/rowsandcols/column/LongArrayColumnTest.java
new file mode 100644
index 000000000000..38a53a17a332
--- /dev/null
+++ b/processing/src/test/java/org/apache/druid/query/rowsandcols/column/LongArrayColumnTest.java
@@ -0,0 +1,82 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.query.rowsandcols.column;
+
+import org.apache.druid.query.rowsandcols.util.FindResult;
+import org.junit.Assert;
+import org.junit.Test;
+
+public class LongArrayColumnTest
+{
+ @Test
+ public void testLongArrayColumnWithLongValues()
+ {
+ Column column = new LongArrayColumn(new long[]{0, 1, 2, 3, 4, 5, 6, 7, 8, 9});
+ ColumnAccessor accessor = column.toAccessor();
+
+ for (int i = 0; i < 10; i++) {
+ Assert.assertFalse(accessor.isNull(i));
+ Assert.assertEquals(i, accessor.getLong(i));
+ Assert.assertEquals((long) i, accessor.getObject(i));
+ Assert.assertEquals(i, accessor.getDouble(i), 0);
+ Assert.assertEquals(i, accessor.getInt(i));
+ }
+ }
+
+ @Test
+ public void testFindLong()
+ {
+ Column column = new LongArrayColumn(new long[] {1, 1, 1, 3, 5, 5, 6, 7, 8, 9});
+ BinarySearchableAccessor accessor = (BinarySearchableAccessor) column.toAccessor();
+
+ FindResult findResult = accessor.findLong(0, accessor.numRows(), 1);
+ Assert.assertTrue(findResult.wasFound());
+ Assert.assertEquals(0, findResult.getStartRow());
+ Assert.assertEquals(3, findResult.getEndRow());
+
+ findResult = accessor.findLong(0, accessor.numRows(), 6);
+ Assert.assertTrue(findResult.wasFound());
+ Assert.assertEquals(6, findResult.getStartRow());
+ Assert.assertEquals(7, findResult.getEndRow());
+
+ Assert.assertFalse(accessor.findLong(0, accessor.numRows(), 2).wasFound());
+ Assert.assertFalse(accessor.findLong(0, 3, 9).wasFound());
+ }
+
+ @Test
+ public void testOtherTypeFinds()
+ {
+ Column column = new LongArrayColumn(new long[] {0, 1, 2, 3, 4, 5, Long.MAX_VALUE});
+ BinarySearchableAccessor accessor = (BinarySearchableAccessor) column.toAccessor();
+
+ FindResult findResult = accessor.findNull(0, accessor.numRows());
+ Assert.assertFalse(findResult.wasFound()); // Always false for long array columns
+
+ findResult = accessor.findDouble(0, accessor.numRows(), 3.0);
+ Assert.assertTrue(findResult.wasFound());
+ Assert.assertEquals(3, findResult.getStartRow());
+ Assert.assertEquals(4, findResult.getEndRow());
+
+ findResult = accessor.findFloat(0, accessor.numRows(), 1.0f);
+ Assert.assertTrue(findResult.wasFound());
+ Assert.assertEquals(1, findResult.getStartRow());
+ Assert.assertEquals(2, findResult.getEndRow());
+ }
+}
diff --git a/processing/src/test/java/org/apache/druid/query/rowsandcols/semantic/AppendableRowsAndColumnsTest.java b/processing/src/test/java/org/apache/druid/query/rowsandcols/semantic/AppendableRowsAndColumnsTest.java
index e5d6eb1faa79..a305e98ff9e5 100644
--- a/processing/src/test/java/org/apache/druid/query/rowsandcols/semantic/AppendableRowsAndColumnsTest.java
+++ b/processing/src/test/java/org/apache/druid/query/rowsandcols/semantic/AppendableRowsAndColumnsTest.java
@@ -24,6 +24,7 @@
import org.apache.druid.query.rowsandcols.MapOfColumnsRowsAndColumns;
import org.apache.druid.query.rowsandcols.RowsAndColumns;
import org.apache.druid.query.rowsandcols.column.IntArrayColumn;
+import org.apache.druid.query.rowsandcols.column.LongArrayColumn;
import org.apache.druid.query.rowsandcols.column.ObjectArrayColumn;
import org.apache.druid.segment.column.ColumnType;
import org.junit.Assert;
@@ -48,7 +49,7 @@ public void testAppendableRowsAndColumns()
RowsAndColumns rac = make(MapOfColumnsRowsAndColumns.fromMap(
ImmutableMap.of(
"colA", new IntArrayColumn(new int[]{1, 2, 3, 4, 5, 6, 7, 8, 9, 10}),
- "colB", new IntArrayColumn(new int[]{4, -4, 3, -3, 4, 82, -90, 4, 0, 0})
+ "colB", new LongArrayColumn(new long[]{4, -4, 3, -3, 4, 82, -90, 4, 0, 0})
)
));
@@ -58,7 +59,7 @@ public void testAppendableRowsAndColumns()
new RowsAndColumnsHelper()
.expectColumn("colA", new int[]{1, 2, 3, 4, 5, 6, 7, 8, 9, 10})
- .expectColumn("colB", new int[]{4, -4, 3, -3, 4, 82, -90, 4, 0, 0})
+ .expectColumn("colB", new long[]{4, -4, 3, -3, 4, 82, -90, 4, 0, 0})
.expectColumn("newCol", new int[]{1, 2, 3, 4, 5, 6, 7, 8, 9, 10})
.allColumnsRegistered()
.validate(appender);
diff --git a/processing/src/test/java/org/apache/druid/segment/data/BitmapSerdeFactoryTest.java b/processing/src/test/java/org/apache/druid/segment/data/BitmapSerdeFactoryTest.java
index f799d35059b2..34b93f101652 100644
--- a/processing/src/test/java/org/apache/druid/segment/data/BitmapSerdeFactoryTest.java
+++ b/processing/src/test/java/org/apache/druid/segment/data/BitmapSerdeFactoryTest.java
@@ -20,6 +20,8 @@
package org.apache.druid.segment.data;
import com.fasterxml.jackson.databind.ObjectMapper;
+import org.apache.druid.collections.bitmap.ConciseBitmapFactory;
+import org.apache.druid.collections.bitmap.RoaringBitmapFactory;
import org.apache.druid.jackson.DefaultObjectMapper;
import org.junit.Assert;
import org.junit.Test;
@@ -46,4 +48,11 @@ public void testDeserialization() throws Exception
Assert.assertTrue(mapper.readValue("{\"type\":\"concise\"}", BitmapSerdeFactory.class) instanceof ConciseBitmapSerdeFactory);
Assert.assertTrue(mapper.readValue("{\"type\":\"BitmapSerde$SomeRandomClass\"}", BitmapSerdeFactory.class) instanceof RoaringBitmapSerdeFactory);
}
+
+ @Test
+ public void testForBitmapFactory()
+ {
+ Assert.assertTrue(BitmapSerde.forBitmapFactory(new RoaringBitmapFactory()) instanceof BitmapSerde.DefaultBitmapSerdeFactory);
+ Assert.assertTrue(BitmapSerde.forBitmapFactory(new ConciseBitmapFactory()) instanceof ConciseBitmapSerdeFactory);
+ }
}
diff --git a/processing/src/test/java/org/apache/druid/segment/data/FrontCodedIndexedTest.java b/processing/src/test/java/org/apache/druid/segment/data/FrontCodedIndexedTest.java
index 8b055188e634..c1312731b913 100644
--- a/processing/src/test/java/org/apache/druid/segment/data/FrontCodedIndexedTest.java
+++ b/processing/src/test/java/org/apache/druid/segment/data/FrontCodedIndexedTest.java
@@ -436,7 +436,7 @@ private static long persistToBuffer(
while (sortedStrings.hasNext()) {
final String next = sortedStrings.next();
final byte[] nextBytes = StringUtils.toUtf8Nullable(next);
- writer.write(nextBytes);
+ Assert.assertEquals(index, writer.write(nextBytes));
if (nextBytes == null) {
Assert.assertNull(writer.get(index));
} else {