diff --git a/docs/source/developers/java/development.rst b/docs/source/developers/java/development.rst
index 261cd5702ae..c993d024003 100644
--- a/docs/source/developers/java/development.rst
+++ b/docs/source/developers/java/development.rst
@@ -110,15 +110,67 @@ integration tests, you would do:
Code Style
==========
-Code style is enforced with Checkstyle. The configuration is located at `checkstyle`_.
-You can also just check the style without building the project.
-This checks the code style of all source code under the current directory or from within an individual module.
+The current Java code styles are configured as follows:
+
+- Indent: Tabs & spaces (2 spaces per tab)
+- Google Java Format: Reformats Java source code to comply with `Google Java Style`_.
+- Configure license headers for Java & XML files
+
+Java code style is checked by `Spotless`_ during the build, and the continuous integration build will verify
+that changes adhere to the style guide.
+
+.. code-block:: xml
+
+
+
+ ...
+
+
+
+ ...
+
+
+
+
+ ...
+
+
+ ...
+
+
+
+Automatically fixing code style issues
+--------------------------------------
+
+- You can also just check the style without building the project with `mvn spotless:check`.
+- The Java code style can be corrected from the command line by using the following commands: `mvn spotless:apply`.
+
+.. code-block:: bash
+
+ user@machine repo % mvn spotless:check
+ [ERROR] > The following files had format violations:
+ [ERROR] src\main\java\com\diffplug\gradle\spotless\FormatExtension.java
+ [ERROR] -\t\t····if·(targets.length·==·0)·{
+ [ERROR] +\t\tif·(targets.length·==·0)·{
+ [ERROR] Run 'mvn spotless:apply' to fix these violations.
+ user@machine repo % mvn spotless:apply
+ [INFO] BUILD SUCCESS
+ user@machine repo % mvn spotless:check
+ [INFO] BUILD SUCCESS
+
+Code Formatter for Intellij IDEA and Eclipse
+--------------------------------------------
+
+Follow the instructions for:
+
+- `Eclipse`_
+- `IntelliJ`_
-.. code-block::
-
- $ mvn checkstyle:check
.. _benchmark: https://github.com/ursacomputing/benchmarks
.. _archery: https://github.com/apache/arrow/blob/main/dev/conbench_envs/README.md#L188
.. _conbench: https://github.com/conbench/conbench
-.. _checkstyle: https://github.com/apache/arrow/blob/main/java/dev/checkstyle/checkstyle.xml
+.. _Spotless: https://github.com/diffplug/spotless
+.. _Google Java Style: https://google.github.io/styleguide/javaguide.html
+.. _Eclipse: https://github.com/google/google-java-format?tab=readme-ov-file#eclipse
+.. _IntelliJ: https://github.com/google/google-java-format?tab=readme-ov-file#intellij-android-studio-and-other-jetbrains-ides
\ No newline at end of file
diff --git a/java/.mvn/.DS_Store b/java/.mvn/.DS_Store
new file mode 100644
index 00000000000..e102fa8ec70
Binary files /dev/null and b/java/.mvn/.DS_Store differ
diff --git a/java/.mvn/wrapper/maven-wrapper.jar b/java/.mvn/wrapper/maven-wrapper.jar
new file mode 100644
index 00000000000..cb28b0e37c7
Binary files /dev/null and b/java/.mvn/wrapper/maven-wrapper.jar differ
diff --git a/java/.mvn/wrapper/maven-wrapper.properties b/java/.mvn/wrapper/maven-wrapper.properties
new file mode 100644
index 00000000000..ac184013fc0
--- /dev/null
+++ b/java/.mvn/wrapper/maven-wrapper.properties
@@ -0,0 +1,18 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.9.4/apache-maven-3.9.4-bin.zip
+wrapperUrl=https://repo.maven.apache.org/maven2/org/apache/maven/wrapper/maven-wrapper/3.2.0/maven-wrapper-3.2.0.jar
diff --git a/java/adapter/avro/pom.xml b/java/adapter/avro/pom.xml
index 6644748b5e5..05d6d6b0eb4 100644
--- a/java/adapter/avro/pom.xml
+++ b/java/adapter/avro/pom.xml
@@ -1,15 +1,14 @@
-
+ license agreements. See the NOTICE file distributed with this work for additional
+ information regarding copyright ownership. The ASF licenses this file to
+ You under the Apache License, Version 2.0 (the "License"); you may not use
+ this file except in compliance with the License. You may obtain a copy of
+ the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required
+ by applicable law or agreed to in writing, software distributed under the
+ License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS
+ OF ANY KIND, either express or implied. See the License for the specific
+ language governing permissions and limitations under the License. -->
4.0.0
diff --git a/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/AvroToArrow.java b/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/AvroToArrow.java
index 8baa60a72dd..3d4380614a0 100644
--- a/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/AvroToArrow.java
+++ b/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/AvroToArrow.java
@@ -18,20 +18,17 @@
package org.apache.arrow.adapter.avro;
import java.io.IOException;
-
import org.apache.arrow.util.Preconditions;
import org.apache.arrow.vector.VectorSchemaRoot;
import org.apache.avro.Schema;
import org.apache.avro.io.Decoder;
-/**
- * Utility class to convert Avro objects to columnar Arrow format objects.
- */
+/** Utility class to convert Avro objects to columnar Arrow format objects. */
public class AvroToArrow {
/**
- * Fetch the data from {@link Decoder} and convert it to Arrow objects.
- * Only for testing purpose.
+ * Fetch the data from {@link Decoder} and convert it to Arrow objects. Only for testing purpose.
+ *
* @param schema avro schema.
* @param decoder avro decoder
* @param config configuration of the conversion.
@@ -48,15 +45,14 @@ static VectorSchemaRoot avroToArrow(Schema schema, Decoder decoder, AvroToArrowC
/**
* Fetch the data from {@link Decoder} and iteratively convert it to Arrow objects.
+ *
* @param schema avro schema
* @param decoder avro decoder
* @param config configuration of the conversion.
* @throws IOException on error
*/
public static AvroToArrowVectorIterator avroToArrowIterator(
- Schema schema,
- Decoder decoder,
- AvroToArrowConfig config) throws IOException {
+ Schema schema, Decoder decoder, AvroToArrowConfig config) throws IOException {
Preconditions.checkNotNull(schema, "Avro schema object cannot be null");
Preconditions.checkNotNull(decoder, "Avro decoder object cannot be null");
diff --git a/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/AvroToArrowConfig.java b/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/AvroToArrowConfig.java
index f9210fb0125..c5419c7e263 100644
--- a/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/AvroToArrowConfig.java
+++ b/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/AvroToArrowConfig.java
@@ -18,36 +18,33 @@
package org.apache.arrow.adapter.avro;
import java.util.Set;
-
import org.apache.arrow.memory.BufferAllocator;
import org.apache.arrow.util.Preconditions;
import org.apache.arrow.vector.dictionary.DictionaryProvider;
-/**
- * This class configures the Avro-to-Arrow conversion process.
- */
+/** This class configures the Avro-to-Arrow conversion process. */
public class AvroToArrowConfig {
private final BufferAllocator allocator;
+
/**
- * The maximum rowCount to read each time when partially convert data.
- * Default value is 1024 and -1 means read all data into one vector.
+ * The maximum rowCount to read each time when partially convert data. Default value is 1024 and
+ * -1 means read all data into one vector.
*/
private final int targetBatchSize;
/**
- * The dictionary provider used for enum type.
- * If avro schema has enum type, will create dictionary and update this provider.
+ * The dictionary provider used for enum type. If avro schema has enum type, will create
+ * dictionary and update this provider.
*/
private final DictionaryProvider.MapDictionaryProvider provider;
- /**
- * The field names which to skip when reading decoder values.
- */
+ /** The field names which to skip when reading decoder values. */
private final Set skipFieldNames;
/**
* Instantiate an instance.
+ *
* @param allocator The memory allocator to construct the Arrow vectors with.
* @param targetBatchSize The maximum rowCount to read each time when partially convert data.
* @param provider The dictionary provider used for enum type, adapter will update this provider.
@@ -59,8 +56,10 @@ public class AvroToArrowConfig {
DictionaryProvider.MapDictionaryProvider provider,
Set skipFieldNames) {
- Preconditions.checkArgument(targetBatchSize == AvroToArrowVectorIterator.NO_LIMIT_BATCH_SIZE ||
- targetBatchSize > 0, "invalid targetBatchSize: %s", targetBatchSize);
+ Preconditions.checkArgument(
+ targetBatchSize == AvroToArrowVectorIterator.NO_LIMIT_BATCH_SIZE || targetBatchSize > 0,
+ "invalid targetBatchSize: %s",
+ targetBatchSize);
this.allocator = allocator;
this.targetBatchSize = targetBatchSize;
diff --git a/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/AvroToArrowConfigBuilder.java b/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/AvroToArrowConfigBuilder.java
index 41e486d0a1c..f1db4e58909 100644
--- a/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/AvroToArrowConfigBuilder.java
+++ b/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/AvroToArrowConfigBuilder.java
@@ -19,13 +19,10 @@
import java.util.HashSet;
import java.util.Set;
-
import org.apache.arrow.memory.BufferAllocator;
import org.apache.arrow.vector.dictionary.DictionaryProvider;
-/**
- * This class builds {@link AvroToArrowConfig}s.
- */
+/** This class builds {@link AvroToArrowConfig}s. */
public class AvroToArrowConfigBuilder {
private BufferAllocator allocator;
@@ -36,9 +33,7 @@ public class AvroToArrowConfigBuilder {
private Set skipFieldNames;
- /**
- * Default constructor for the {@link AvroToArrowConfigBuilder}.
- */
+ /** Default constructor for the {@link AvroToArrowConfigBuilder}. */
public AvroToArrowConfigBuilder(BufferAllocator allocator) {
this.allocator = allocator;
this.targetBatchSize = AvroToArrowVectorIterator.DEFAULT_BATCH_SIZE;
@@ -61,14 +56,8 @@ public AvroToArrowConfigBuilder setSkipFieldNames(Set skipFieldNames) {
return this;
}
- /**
- * This builds the {@link AvroToArrowConfig} from the provided params.
- */
+ /** This builds the {@link AvroToArrowConfig} from the provided params. */
public AvroToArrowConfig build() {
- return new AvroToArrowConfig(
- allocator,
- targetBatchSize,
- provider,
- skipFieldNames);
+ return new AvroToArrowConfig(allocator, targetBatchSize, provider, skipFieldNames);
}
}
diff --git a/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/AvroToArrowUtils.java b/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/AvroToArrowUtils.java
index 1f5ad9e7689..a8fe7f8a4e3 100644
--- a/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/AvroToArrowUtils.java
+++ b/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/AvroToArrowUtils.java
@@ -30,7 +30,6 @@
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
-
import org.apache.arrow.adapter.avro.consumers.AvroArraysConsumer;
import org.apache.arrow.adapter.avro.consumers.AvroBooleanConsumer;
import org.apache.arrow.adapter.avro.consumers.AvroBytesConsumer;
@@ -106,36 +105,37 @@ public class AvroToArrowUtils {
/**
* Creates a {@link Consumer} from the {@link Schema}
*
-
This method currently performs following type mapping for Avro data types to corresponding Arrow data types.
+ *
This method currently performs following type mapping for Avro data types to corresponding
+ * Arrow data types.
*
*
*/
-
private static Consumer createConsumer(Schema schema, String name, AvroToArrowConfig config) {
return createConsumer(schema, name, false, config, null);
}
- private static Consumer createConsumer(Schema schema, String name, AvroToArrowConfig config, FieldVector vector) {
+ private static Consumer createConsumer(
+ Schema schema, String name, AvroToArrowConfig config, FieldVector vector) {
return createConsumer(schema, name, false, config, vector);
}
@@ -144,7 +144,8 @@ private static Consumer createConsumer(Schema schema, String name, AvroToArrowCo
*
* @param schema avro schema
* @param name arrow field name
- * @param consumerVector vector to keep in consumer, if v == null, will create a new vector via field.
+ * @param consumerVector vector to keep in consumer, if v == null, will create a new vector via
+ * field.
* @return consumer
*/
private static Consumer createConsumer(
@@ -185,7 +186,7 @@ private static Consumer createConsumer(
break;
case STRING:
arrowType = new ArrowType.Utf8();
- fieldType = new FieldType(nullable, arrowType, /*dictionary=*/null, getMetaData(schema));
+ fieldType = new FieldType(nullable, arrowType, /* dictionary= */ null, getMetaData(schema));
vector = createVector(consumerVector, fieldType, name, allocator);
consumer = new AvroStringConsumer((VarCharVector) vector);
break;
@@ -193,12 +194,18 @@ private static Consumer createConsumer(
Map extProps = createExternalProps(schema);
if (logicalType instanceof LogicalTypes.Decimal) {
arrowType = createDecimalArrowType((LogicalTypes.Decimal) logicalType);
- fieldType = new FieldType(nullable, arrowType, /*dictionary=*/null, getMetaData(schema, extProps));
+ fieldType =
+ new FieldType(
+ nullable, arrowType, /* dictionary= */ null, getMetaData(schema, extProps));
vector = createVector(consumerVector, fieldType, name, allocator);
- consumer = new AvroDecimalConsumer.FixedDecimalConsumer((DecimalVector) vector, schema.getFixedSize());
+ consumer =
+ new AvroDecimalConsumer.FixedDecimalConsumer(
+ (DecimalVector) vector, schema.getFixedSize());
} else {
arrowType = new ArrowType.FixedSizeBinary(schema.getFixedSize());
- fieldType = new FieldType(nullable, arrowType, /*dictionary=*/null, getMetaData(schema, extProps));
+ fieldType =
+ new FieldType(
+ nullable, arrowType, /* dictionary= */ null, getMetaData(schema, extProps));
vector = createVector(consumerVector, fieldType, name, allocator);
consumer = new AvroFixedConsumer((FixedSizeBinaryVector) vector, schema.getFixedSize());
}
@@ -206,84 +213,94 @@ private static Consumer createConsumer(
case INT:
if (logicalType instanceof LogicalTypes.Date) {
arrowType = new ArrowType.Date(DateUnit.DAY);
- fieldType = new FieldType(nullable, arrowType, /*dictionary=*/null, getMetaData(schema));
+ fieldType =
+ new FieldType(nullable, arrowType, /* dictionary= */ null, getMetaData(schema));
vector = createVector(consumerVector, fieldType, name, allocator);
consumer = new AvroDateConsumer((DateDayVector) vector);
} else if (logicalType instanceof LogicalTypes.TimeMillis) {
arrowType = new ArrowType.Time(TimeUnit.MILLISECOND, 32);
- fieldType = new FieldType(nullable, arrowType, /*dictionary=*/null, getMetaData(schema));
+ fieldType =
+ new FieldType(nullable, arrowType, /* dictionary= */ null, getMetaData(schema));
vector = createVector(consumerVector, fieldType, name, allocator);
consumer = new AvroTimeMillisConsumer((TimeMilliVector) vector);
} else {
- arrowType = new ArrowType.Int(32, /*signed=*/true);
- fieldType = new FieldType(nullable, arrowType, /*dictionary=*/null, getMetaData(schema));
+ arrowType = new ArrowType.Int(32, /* signed= */ true);
+ fieldType =
+ new FieldType(nullable, arrowType, /* dictionary= */ null, getMetaData(schema));
vector = createVector(consumerVector, fieldType, name, allocator);
consumer = new AvroIntConsumer((IntVector) vector);
}
break;
case BOOLEAN:
arrowType = new ArrowType.Bool();
- fieldType = new FieldType(nullable, arrowType, /*dictionary=*/null, getMetaData(schema));
+ fieldType = new FieldType(nullable, arrowType, /* dictionary= */ null, getMetaData(schema));
vector = createVector(consumerVector, fieldType, name, allocator);
consumer = new AvroBooleanConsumer((BitVector) vector);
break;
case LONG:
if (logicalType instanceof LogicalTypes.TimeMicros) {
arrowType = new ArrowType.Time(TimeUnit.MICROSECOND, 64);
- fieldType = new FieldType(nullable, arrowType, /*dictionary=*/null, getMetaData(schema));
+ fieldType =
+ new FieldType(nullable, arrowType, /* dictionary= */ null, getMetaData(schema));
vector = createVector(consumerVector, fieldType, name, allocator);
consumer = new AvroTimeMicroConsumer((TimeMicroVector) vector);
} else if (logicalType instanceof LogicalTypes.TimestampMillis) {
arrowType = new ArrowType.Timestamp(TimeUnit.MILLISECOND, null);
- fieldType = new FieldType(nullable, arrowType, /*dictionary=*/null, getMetaData(schema));
+ fieldType =
+ new FieldType(nullable, arrowType, /* dictionary= */ null, getMetaData(schema));
vector = createVector(consumerVector, fieldType, name, allocator);
consumer = new AvroTimestampMillisConsumer((TimeStampMilliVector) vector);
} else if (logicalType instanceof LogicalTypes.TimestampMicros) {
arrowType = new ArrowType.Timestamp(TimeUnit.MICROSECOND, null);
- fieldType = new FieldType(nullable, arrowType, /*dictionary=*/null, getMetaData(schema));
+ fieldType =
+ new FieldType(nullable, arrowType, /* dictionary= */ null, getMetaData(schema));
vector = createVector(consumerVector, fieldType, name, allocator);
consumer = new AvroTimestampMicrosConsumer((TimeStampMicroVector) vector);
} else {
- arrowType = new ArrowType.Int(64, /*signed=*/true);
- fieldType = new FieldType(nullable, arrowType, /*dictionary=*/null, getMetaData(schema));
+ arrowType = new ArrowType.Int(64, /* signed= */ true);
+ fieldType =
+ new FieldType(nullable, arrowType, /* dictionary= */ null, getMetaData(schema));
vector = createVector(consumerVector, fieldType, name, allocator);
consumer = new AvroLongConsumer((BigIntVector) vector);
}
break;
case FLOAT:
arrowType = new ArrowType.FloatingPoint(SINGLE);
- fieldType = new FieldType(nullable, arrowType, /*dictionary=*/null, getMetaData(schema));
+ fieldType = new FieldType(nullable, arrowType, /* dictionary= */ null, getMetaData(schema));
vector = createVector(consumerVector, fieldType, name, allocator);
consumer = new AvroFloatConsumer((Float4Vector) vector);
break;
case DOUBLE:
arrowType = new ArrowType.FloatingPoint(DOUBLE);
- fieldType = new FieldType(nullable, arrowType, /*dictionary=*/null, getMetaData(schema));
+ fieldType = new FieldType(nullable, arrowType, /* dictionary= */ null, getMetaData(schema));
vector = createVector(consumerVector, fieldType, name, allocator);
consumer = new AvroDoubleConsumer((Float8Vector) vector);
break;
case BYTES:
if (logicalType instanceof LogicalTypes.Decimal) {
arrowType = createDecimalArrowType((LogicalTypes.Decimal) logicalType);
- fieldType = new FieldType(nullable, arrowType, /*dictionary=*/null, getMetaData(schema));
+ fieldType =
+ new FieldType(nullable, arrowType, /* dictionary= */ null, getMetaData(schema));
vector = createVector(consumerVector, fieldType, name, allocator);
consumer = new AvroDecimalConsumer.BytesDecimalConsumer((DecimalVector) vector);
} else {
arrowType = new ArrowType.Binary();
- fieldType = new FieldType(nullable, arrowType, /*dictionary=*/null, getMetaData(schema));
+ fieldType =
+ new FieldType(nullable, arrowType, /* dictionary= */ null, getMetaData(schema));
vector = createVector(consumerVector, fieldType, name, allocator);
consumer = new AvroBytesConsumer((VarBinaryVector) vector);
}
break;
case NULL:
arrowType = new ArrowType.Null();
- fieldType = new FieldType(nullable, arrowType, /*dictionary=*/null, getMetaData(schema));
- vector = fieldType.createNewSingleVector(name, allocator, /*schemaCallback=*/null);
+ fieldType = new FieldType(nullable, arrowType, /* dictionary= */ null, getMetaData(schema));
+ vector = fieldType.createNewSingleVector(name, allocator, /* schemaCallback= */ null);
consumer = new AvroNullConsumer((NullVector) vector);
break;
default:
// no-op, shouldn't get here
- throw new UnsupportedOperationException("Can't convert avro type %s to arrow type." + type.getName());
+ throw new UnsupportedOperationException(
+ "Can't convert avro type %s to arrow type." + type.getName());
}
return consumer;
}
@@ -291,15 +308,16 @@ private static Consumer createConsumer(
private static ArrowType createDecimalArrowType(LogicalTypes.Decimal logicalType) {
final int scale = logicalType.getScale();
final int precision = logicalType.getPrecision();
- Preconditions.checkArgument(precision > 0 && precision <= 38,
- "Precision must be in range of 1 to 38");
- Preconditions.checkArgument(scale >= 0 && scale <= 38,
- "Scale must be in range of 0 to 38.");
- Preconditions.checkArgument(scale <= precision,
- "Invalid decimal scale: %s (greater than precision: %s)", scale, precision);
+ Preconditions.checkArgument(
+ precision > 0 && precision <= 38, "Precision must be in range of 1 to 38");
+ Preconditions.checkArgument(scale >= 0 && scale <= 38, "Scale must be in range of 0 to 38.");
+ Preconditions.checkArgument(
+ scale <= precision,
+ "Invalid decimal scale: %s (greater than precision: %s)",
+ scale,
+ precision);
return new ArrowType.Decimal(precision, scale, 128);
-
}
private static Consumer createSkipConsumer(Schema schema) {
@@ -309,41 +327,46 @@ private static Consumer createSkipConsumer(Schema schema) {
switch (type) {
case UNION:
- List unionDelegates = schema.getTypes().stream().map(s ->
- createSkipConsumer(s)).collect(Collectors.toList());
+ List unionDelegates =
+ schema.getTypes().stream().map(s -> createSkipConsumer(s)).collect(Collectors.toList());
skipFunction = decoder -> unionDelegates.get(decoder.readInt()).consume(decoder);
break;
case ARRAY:
Consumer elementDelegate = createSkipConsumer(schema.getElementType());
- skipFunction = decoder -> {
- for (long i = decoder.skipArray(); i != 0; i = decoder.skipArray()) {
- for (long j = 0; j < i; j++) {
- elementDelegate.consume(decoder);
- }
- }
- };
+ skipFunction =
+ decoder -> {
+ for (long i = decoder.skipArray(); i != 0; i = decoder.skipArray()) {
+ for (long j = 0; j < i; j++) {
+ elementDelegate.consume(decoder);
+ }
+ }
+ };
break;
case MAP:
Consumer valueDelegate = createSkipConsumer(schema.getValueType());
- skipFunction = decoder -> {
- for (long i = decoder.skipMap(); i != 0; i = decoder.skipMap()) {
- for (long j = 0; j < i; j++) {
- decoder.skipString(); // Discard key
- valueDelegate.consume(decoder);
- }
- }
- };
+ skipFunction =
+ decoder -> {
+ for (long i = decoder.skipMap(); i != 0; i = decoder.skipMap()) {
+ for (long j = 0; j < i; j++) {
+ decoder.skipString(); // Discard key
+ valueDelegate.consume(decoder);
+ }
+ }
+ };
break;
case RECORD:
- List delegates = schema.getFields().stream().map(field ->
- createSkipConsumer(field.schema())).collect(Collectors.toList());
+ List delegates =
+ schema.getFields().stream()
+ .map(field -> createSkipConsumer(field.schema()))
+ .collect(Collectors.toList());
- skipFunction = decoder -> {
- for (Consumer consumer : delegates) {
- consumer.consume(decoder);
- }
- };
+ skipFunction =
+ decoder -> {
+ for (Consumer consumer : delegates) {
+ consumer.consume(decoder);
+ }
+ };
break;
case ENUM:
@@ -374,7 +397,7 @@ private static Consumer createSkipConsumer(Schema schema) {
skipFunction = decoder -> decoder.skipBytes();
break;
case NULL:
- skipFunction = decoder -> { };
+ skipFunction = decoder -> {};
break;
default:
// no-op, shouldn't get here
@@ -384,8 +407,7 @@ private static Consumer createSkipConsumer(Schema schema) {
return new SkipConsumer(skipFunction);
}
- static CompositeAvroConsumer createCompositeConsumer(
- Schema schema, AvroToArrowConfig config) {
+ static CompositeAvroConsumer createCompositeConsumer(Schema schema, AvroToArrowConfig config) {
List consumers = new ArrayList<>();
final Set skipFieldNames = config.getSkipFieldNames();
@@ -399,7 +421,6 @@ static CompositeAvroConsumer createCompositeConsumer(
Consumer consumer = createConsumer(field.schema(), field.name(), config);
consumers.add(consumer);
}
-
}
} else {
Consumer consumer = createConsumer(schema, "", config);
@@ -409,9 +430,11 @@ static CompositeAvroConsumer createCompositeConsumer(
return new CompositeAvroConsumer(consumers);
}
- private static FieldVector createVector(FieldVector consumerVector, FieldType fieldType,
- String name, BufferAllocator allocator) {
- return consumerVector != null ? consumerVector : fieldType.createNewSingleVector(name, allocator, null);
+ private static FieldVector createVector(
+ FieldVector consumerVector, FieldType fieldType, String name, BufferAllocator allocator) {
+ return consumerVector != null
+ ? consumerVector
+ : fieldType.createNewSingleVector(name, allocator, null);
}
private static String getDefaultFieldName(ArrowType type) {
@@ -424,10 +447,7 @@ private static Field avroSchemaToField(Schema schema, String name, AvroToArrowCo
}
private static Field avroSchemaToField(
- Schema schema,
- String name,
- AvroToArrowConfig config,
- Map externalProps) {
+ Schema schema, String name, AvroToArrowConfig config, Map externalProps) {
final Type type = schema.getType();
final LogicalType logicalType = schema.getLogicalType();
@@ -441,7 +461,8 @@ private static Field avroSchemaToField(
// Union child vector should use default name
children.add(avroSchemaToField(childSchema, null, config));
}
- fieldType = createFieldType(new ArrowType.Union(UnionMode.Sparse, null), schema, externalProps);
+ fieldType =
+ createFieldType(new ArrowType.Union(UnionMode.Sparse, null), schema, externalProps);
break;
case ARRAY:
Schema elementSchema = schema.getElementType();
@@ -450,14 +471,18 @@ private static Field avroSchemaToField(
break;
case MAP:
// MapVector internal struct field and key field should be non-nullable
- FieldType keyFieldType = new FieldType(/*nullable=*/false, new ArrowType.Utf8(), /*dictionary=*/null);
- Field keyField = new Field("key", keyFieldType, /*children=*/null);
+ FieldType keyFieldType =
+ new FieldType(/* nullable= */ false, new ArrowType.Utf8(), /* dictionary= */ null);
+ Field keyField = new Field("key", keyFieldType, /* children= */ null);
Field valueField = avroSchemaToField(schema.getValueType(), "value", config);
- FieldType structFieldType = new FieldType(false, new ArrowType.Struct(), /*dictionary=*/null);
- Field structField = new Field("internal", structFieldType, Arrays.asList(keyField, valueField));
+ FieldType structFieldType =
+ new FieldType(false, new ArrowType.Struct(), /* dictionary= */ null);
+ Field structField =
+ new Field("internal", structFieldType, Arrays.asList(keyField, valueField));
children.add(structField);
- fieldType = createFieldType(new ArrowType.Map(/*keySorted=*/false), schema, externalProps);
+ fieldType =
+ createFieldType(new ArrowType.Map(/* keySorted= */ false), schema, externalProps);
break;
case RECORD:
final Set skipFieldNames = config.getSkipFieldNames();
@@ -486,8 +511,12 @@ private static Field avroSchemaToField(
int enumCount = schema.getEnumSymbols().size();
ArrowType.Int indexType = DictionaryEncoder.getIndexType(enumCount);
- fieldType = createFieldType(indexType, schema, externalProps,
- new DictionaryEncoding(current, /*ordered=*/false, /*indexType=*/indexType));
+ fieldType =
+ createFieldType(
+ indexType,
+ schema,
+ externalProps,
+ new DictionaryEncoding(current, /* ordered= */ false, /* indexType= */ indexType));
break;
case STRING:
@@ -509,7 +538,7 @@ private static Field avroSchemaToField(
} else if (logicalType instanceof LogicalTypes.TimeMillis) {
intArrowType = new ArrowType.Time(TimeUnit.MILLISECOND, 32);
} else {
- intArrowType = new ArrowType.Int(32, /*signed=*/true);
+ intArrowType = new ArrowType.Int(32, /* signed= */ true);
}
fieldType = createFieldType(intArrowType, schema, externalProps);
break;
@@ -525,7 +554,7 @@ private static Field avroSchemaToField(
} else if (logicalType instanceof LogicalTypes.TimestampMicros) {
longArrowType = new ArrowType.Timestamp(TimeUnit.MICROSECOND, null);
} else {
- longArrowType = new ArrowType.Int(64, /*signed=*/true);
+ longArrowType = new ArrowType.Int(64, /* signed= */ true);
}
fieldType = createFieldType(longArrowType, schema, externalProps);
break;
@@ -558,8 +587,8 @@ private static Field avroSchemaToField(
return new Field(name, fieldType, children.size() == 0 ? null : children);
}
- private static Consumer createArrayConsumer(Schema schema, String name, AvroToArrowConfig config,
- FieldVector consumerVector) {
+ private static Consumer createArrayConsumer(
+ Schema schema, String name, AvroToArrowConfig config, FieldVector consumerVector) {
ListVector listVector;
if (consumerVector == null) {
@@ -578,8 +607,8 @@ private static Consumer createArrayConsumer(Schema schema, String name, AvroToAr
return new AvroArraysConsumer(listVector, delegate);
}
- private static Consumer createStructConsumer(Schema schema, String name, AvroToArrowConfig config,
- FieldVector consumerVector) {
+ private static Consumer createStructConsumer(
+ Schema schema, String name, AvroToArrowConfig config, FieldVector consumerVector) {
final Set skipFieldNames = config.getSkipFieldNames();
@@ -601,19 +630,22 @@ private static Consumer createStructConsumer(Schema schema, String name, AvroToA
if (skipFieldNames.contains(fullChildName)) {
delegate = createSkipConsumer(childField.schema());
} else {
- delegate = createConsumer(childField.schema(), fullChildName, config,
- structVector.getChildrenFromFields().get(vectorIndex++));
+ delegate =
+ createConsumer(
+ childField.schema(),
+ fullChildName,
+ config,
+ structVector.getChildrenFromFields().get(vectorIndex++));
}
delegates[i] = delegate;
}
return new AvroStructConsumer(structVector, delegates);
-
}
- private static Consumer createEnumConsumer(Schema schema, String name, AvroToArrowConfig config,
- FieldVector consumerVector) {
+ private static Consumer createEnumConsumer(
+ Schema schema, String name, AvroToArrowConfig config, FieldVector consumerVector) {
BaseIntVector indexVector;
if (consumerVector == null) {
@@ -630,16 +662,14 @@ private static Consumer createEnumConsumer(Schema schema, String name, AvroToArr
for (int i = 0; i < valueCount; i++) {
dictVector.set(i, schema.getEnumSymbols().get(i).getBytes(StandardCharsets.UTF_8));
}
- Dictionary dictionary =
- new Dictionary(dictVector, indexVector.getField().getDictionary());
+ Dictionary dictionary = new Dictionary(dictVector, indexVector.getField().getDictionary());
config.getProvider().put(dictionary);
return new AvroEnumConsumer(indexVector);
-
}
- private static Consumer createMapConsumer(Schema schema, String name, AvroToArrowConfig config,
- FieldVector consumerVector) {
+ private static Consumer createMapConsumer(
+ Schema schema, String name, AvroToArrowConfig config, FieldVector consumerVector) {
MapVector mapVector;
if (consumerVector == null) {
@@ -653,10 +683,14 @@ private static Consumer createMapConsumer(Schema schema, String name, AvroToArro
StructVector structVector = (StructVector) mapVector.getDataVector();
// keys in avro map are always assumed to be strings.
- Consumer keyConsumer = new AvroStringConsumer(
- (VarCharVector) structVector.getChildrenFromFields().get(0));
- Consumer valueConsumer = createConsumer(schema.getValueType(), schema.getValueType().getName(),
- config, structVector.getChildrenFromFields().get(1));
+ Consumer keyConsumer =
+ new AvroStringConsumer((VarCharVector) structVector.getChildrenFromFields().get(0));
+ Consumer valueConsumer =
+ createConsumer(
+ schema.getValueType(),
+ schema.getValueType().getName(),
+ config,
+ structVector.getChildrenFromFields().get(1));
AvroStructConsumer internalConsumer =
new AvroStructConsumer(structVector, new Consumer[] {keyConsumer, valueConsumer});
@@ -664,8 +698,8 @@ private static Consumer createMapConsumer(Schema schema, String name, AvroToArro
return new AvroMapConsumer(mapVector, internalConsumer);
}
- private static Consumer createUnionConsumer(Schema schema, String name, AvroToArrowConfig config,
- FieldVector consumerVector) {
+ private static Consumer createUnionConsumer(
+ Schema schema, String name, AvroToArrowConfig config, FieldVector consumerVector) {
final int size = schema.getTypes().size();
final boolean nullable = schema.getTypes().stream().anyMatch(t -> t.getType() == Type.NULL);
@@ -695,14 +729,12 @@ private static Consumer createUnionConsumer(Schema schema, String name, AvroToAr
/**
* Read data from {@link Decoder} and generate a {@link VectorSchemaRoot}.
+ *
* @param schema avro schema
* @param decoder avro decoder to read data from
*/
static VectorSchemaRoot avroToArrowVectors(
- Schema schema,
- Decoder decoder,
- AvroToArrowConfig config)
- throws IOException {
+ Schema schema, Decoder decoder, AvroToArrowConfig config) throws IOException {
List vectors = new ArrayList<>();
List consumers = new ArrayList<>();
@@ -726,8 +758,8 @@ static VectorSchemaRoot avroToArrowVectors(
}
long validConsumerCount = consumers.stream().filter(c -> !c.skippable()).count();
- Preconditions.checkArgument(vectors.size() == validConsumerCount,
- "vectors size not equals consumers size.");
+ Preconditions.checkArgument(
+ vectors.size() == validConsumerCount, "vectors size not equals consumers size.");
List fields = vectors.stream().map(t -> t.getField()).collect(Collectors.toList());
@@ -767,9 +799,7 @@ private static Map getMetaData(Schema schema, Map createExternalProps(Schema schema) {
final Map extProps = new HashMap<>();
String doc = schema.getDoc();
@@ -783,8 +813,9 @@ private static Map createExternalProps(Schema schema) {
return extProps;
}
- private static FieldType createFieldType(ArrowType arrowType, Schema schema, Map externalProps) {
- return createFieldType(arrowType, schema, externalProps, /*dictionary=*/null);
+ private static FieldType createFieldType(
+ ArrowType arrowType, Schema schema, Map externalProps) {
+ return createFieldType(arrowType, schema, externalProps, /* dictionary= */ null);
}
private static FieldType createFieldType(
@@ -793,8 +824,8 @@ private static FieldType createFieldType(
Map externalProps,
DictionaryEncoding dictionary) {
- return new FieldType(/*nullable=*/false, arrowType, dictionary,
- getMetaData(schema, externalProps));
+ return new FieldType(
+ /* nullable= */ false, arrowType, dictionary, getMetaData(schema, externalProps));
}
private static String convertAliases(Set aliases) {
diff --git a/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/AvroToArrowVectorIterator.java b/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/AvroToArrowVectorIterator.java
index 4a439ade811..bdf554d7371 100644
--- a/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/AvroToArrowVectorIterator.java
+++ b/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/AvroToArrowVectorIterator.java
@@ -22,7 +22,6 @@
import java.util.Iterator;
import java.util.List;
import java.util.stream.Collectors;
-
import org.apache.arrow.adapter.avro.consumers.CompositeAvroConsumer;
import org.apache.arrow.util.Preconditions;
import org.apache.arrow.vector.FieldVector;
@@ -32,9 +31,7 @@
import org.apache.avro.Schema;
import org.apache.avro.io.Decoder;
-/**
- * VectorSchemaRoot iterator for partially converting avro data.
- */
+/** VectorSchemaRoot iterator for partially converting avro data. */
public class AvroToArrowVectorIterator implements Iterator, AutoCloseable {
public static final int NO_LIMIT_BATCH_SIZE = -1;
@@ -53,28 +50,18 @@ public class AvroToArrowVectorIterator implements Iterator, Au
private final int targetBatchSize;
- /**
- * Construct an instance.
- */
- private AvroToArrowVectorIterator(
- Decoder decoder,
- Schema schema,
- AvroToArrowConfig config) {
+ /** Construct an instance. */
+ private AvroToArrowVectorIterator(Decoder decoder, Schema schema, AvroToArrowConfig config) {
this.decoder = decoder;
this.schema = schema;
this.config = config;
this.targetBatchSize = config.getTargetBatchSize();
-
}
- /**
- * Create a ArrowVectorIterator to partially convert data.
- */
+ /** Create a ArrowVectorIterator to partially convert data. */
public static AvroToArrowVectorIterator create(
- Decoder decoder,
- Schema schema,
- AvroToArrowConfig config) {
+ Decoder decoder, Schema schema, AvroToArrowConfig config) {
AvroToArrowVectorIterator iterator = new AvroToArrowVectorIterator(decoder, schema, config);
try {
@@ -136,9 +123,10 @@ private void load(VectorSchemaRoot root) {
ValueVectorUtility.preAllocate(root, targetBatchSize);
}
- long validConsumerCount = compositeConsumer.getConsumers().stream().filter(c ->
- !c.skippable()).count();
- Preconditions.checkArgument(root.getFieldVectors().size() == validConsumerCount,
+ long validConsumerCount =
+ compositeConsumer.getConsumers().stream().filter(c -> !c.skippable()).count();
+ Preconditions.checkArgument(
+ root.getFieldVectors().size() == validConsumerCount,
"Schema root vectors size not equals to consumers size.");
compositeConsumer.resetConsumerVectors(root);
@@ -159,9 +147,7 @@ public boolean hasNext() {
return nextBatch != null;
}
- /**
- * Gets the next vector. The user is responsible for freeing its resources.
- */
+ /** Gets the next vector. The user is responsible for freeing its resources. */
public VectorSchemaRoot next() {
Preconditions.checkArgument(hasNext());
VectorSchemaRoot returned = nextBatch;
@@ -174,9 +160,7 @@ public VectorSchemaRoot next() {
return returned;
}
- /**
- * Clean up resources.
- */
+ /** Clean up resources. */
public void close() {
if (nextBatch != null) {
nextBatch.close();
diff --git a/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/AvroArraysConsumer.java b/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/AvroArraysConsumer.java
index fd25986c32b..ae84a4d9af5 100644
--- a/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/AvroArraysConsumer.java
+++ b/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/AvroArraysConsumer.java
@@ -18,21 +18,17 @@
package org.apache.arrow.adapter.avro.consumers;
import java.io.IOException;
-
import org.apache.arrow.vector.complex.ListVector;
import org.apache.avro.io.Decoder;
/**
- * Consumer which consume array type values from avro decoder.
- * Write the data to {@link ListVector}.
+ * Consumer which consume array type values from avro decoder. Write the data to {@link ListVector}.
*/
public class AvroArraysConsumer extends BaseAvroConsumer {
private final Consumer delegate;
- /**
- * Instantiate a ArrayConsumer.
- */
+ /** Instantiate a ArrayConsumer. */
public AvroArraysConsumer(ListVector vector, Consumer delegate) {
super(vector);
this.delegate = delegate;
diff --git a/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/AvroBooleanConsumer.java b/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/AvroBooleanConsumer.java
index bf41828d19f..88c07173328 100644
--- a/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/AvroBooleanConsumer.java
+++ b/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/AvroBooleanConsumer.java
@@ -18,19 +18,16 @@
package org.apache.arrow.adapter.avro.consumers;
import java.io.IOException;
-
import org.apache.arrow.vector.BitVector;
import org.apache.avro.io.Decoder;
/**
- * Consumer which consume boolean type values from avro decoder.
- * Write the data to {@link BitVector}.
+ * Consumer which consume boolean type values from avro decoder. Write the data to {@link
+ * BitVector}.
*/
public class AvroBooleanConsumer extends BaseAvroConsumer {
- /**
- * Instantiate a AvroBooleanConsumer.
- */
+ /** Instantiate a AvroBooleanConsumer. */
public AvroBooleanConsumer(BitVector vector) {
super(vector);
}
diff --git a/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/AvroBytesConsumer.java b/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/AvroBytesConsumer.java
index c8370e48060..aa063c31c67 100644
--- a/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/AvroBytesConsumer.java
+++ b/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/AvroBytesConsumer.java
@@ -19,21 +19,18 @@
import java.io.IOException;
import java.nio.ByteBuffer;
-
import org.apache.arrow.vector.VarBinaryVector;
import org.apache.avro.io.Decoder;
/**
- * Consumer which consume bytes type values from avro decoder.
- * Write the data to {@link VarBinaryVector}.
+ * Consumer which consume bytes type values from avro decoder. Write the data to {@link
+ * VarBinaryVector}.
*/
public class AvroBytesConsumer extends BaseAvroConsumer {
private ByteBuffer cacheBuffer;
- /**
- * Instantiate a AvroBytesConsumer.
- */
+ /** Instantiate a AvroBytesConsumer. */
public AvroBytesConsumer(VarBinaryVector vector) {
super(vector);
}
diff --git a/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/AvroDoubleConsumer.java b/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/AvroDoubleConsumer.java
index 7cc7dd33b15..c387896ffc0 100644
--- a/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/AvroDoubleConsumer.java
+++ b/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/AvroDoubleConsumer.java
@@ -18,19 +18,16 @@
package org.apache.arrow.adapter.avro.consumers;
import java.io.IOException;
-
import org.apache.arrow.vector.Float8Vector;
import org.apache.avro.io.Decoder;
/**
- * Consumer which consume double type values from avro decoder.
- * Write the data to {@link Float8Vector}.
+ * Consumer which consume double type values from avro decoder. Write the data to {@link
+ * Float8Vector}.
*/
public class AvroDoubleConsumer extends BaseAvroConsumer {
- /**
- * Instantiate a AvroDoubleConsumer.
- */
+ /** Instantiate a AvroDoubleConsumer. */
public AvroDoubleConsumer(Float8Vector vector) {
super(vector);
}
diff --git a/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/AvroEnumConsumer.java b/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/AvroEnumConsumer.java
index 32a2c85f6fc..6d06f3fbce2 100644
--- a/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/AvroEnumConsumer.java
+++ b/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/AvroEnumConsumer.java
@@ -18,20 +18,16 @@
package org.apache.arrow.adapter.avro.consumers;
import java.io.IOException;
-
import org.apache.arrow.vector.BaseIntVector;
import org.apache.arrow.vector.IntVector;
import org.apache.avro.io.Decoder;
/**
- * Consumer which consume enum type values from avro decoder.
- * Write the data to {@link IntVector}.
+ * Consumer which consume enum type values from avro decoder. Write the data to {@link IntVector}.
*/
public class AvroEnumConsumer extends BaseAvroConsumer {
- /**
- * Instantiate a AvroEnumConsumer.
- */
+ /** Instantiate a AvroEnumConsumer. */
public AvroEnumConsumer(BaseIntVector vector) {
super(vector);
}
diff --git a/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/AvroFixedConsumer.java b/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/AvroFixedConsumer.java
index 16b70898fd3..151fd3e653c 100644
--- a/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/AvroFixedConsumer.java
+++ b/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/AvroFixedConsumer.java
@@ -18,21 +18,18 @@
package org.apache.arrow.adapter.avro.consumers;
import java.io.IOException;
-
import org.apache.arrow.vector.FixedSizeBinaryVector;
import org.apache.avro.io.Decoder;
/**
- * Consumer which consume fixed type values from avro decoder.
- * Write the data to {@link org.apache.arrow.vector.FixedSizeBinaryVector}.
+ * Consumer which consume fixed type values from avro decoder. Write the data to {@link
+ * org.apache.arrow.vector.FixedSizeBinaryVector}.
*/
public class AvroFixedConsumer extends BaseAvroConsumer {
private final byte[] reuseBytes;
- /**
- * Instantiate a AvroFixedConsumer.
- */
+ /** Instantiate a AvroFixedConsumer. */
public AvroFixedConsumer(FixedSizeBinaryVector vector, int size) {
super(vector);
reuseBytes = new byte[size];
diff --git a/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/AvroFloatConsumer.java b/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/AvroFloatConsumer.java
index b09d2881875..ec37fe0d1f6 100644
--- a/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/AvroFloatConsumer.java
+++ b/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/AvroFloatConsumer.java
@@ -18,19 +18,16 @@
package org.apache.arrow.adapter.avro.consumers;
import java.io.IOException;
-
import org.apache.arrow.vector.Float4Vector;
import org.apache.avro.io.Decoder;
/**
- * Consumer which consume float type values from avro decoder.
- * Write the data to {@link Float4Vector}.
+ * Consumer which consume float type values from avro decoder. Write the data to {@link
+ * Float4Vector}.
*/
public class AvroFloatConsumer extends BaseAvroConsumer {
- /**
- * Instantiate a AvroFloatConsumer.
- */
+ /** Instantiate a AvroFloatConsumer. */
public AvroFloatConsumer(Float4Vector vector) {
super(vector);
}
diff --git a/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/AvroIntConsumer.java b/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/AvroIntConsumer.java
index ae5a2719c56..68c93cf8e9e 100644
--- a/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/AvroIntConsumer.java
+++ b/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/AvroIntConsumer.java
@@ -18,19 +18,15 @@
package org.apache.arrow.adapter.avro.consumers;
import java.io.IOException;
-
import org.apache.arrow.vector.IntVector;
import org.apache.avro.io.Decoder;
/**
- * Consumer which consume int type values from avro decoder.
- * Write the data to {@link IntVector}.
+ * Consumer which consume int type values from avro decoder. Write the data to {@link IntVector}.
*/
public class AvroIntConsumer extends BaseAvroConsumer {
- /**
- * Instantiate a AvroIntConsumer.
- */
+ /** Instantiate a AvroIntConsumer. */
public AvroIntConsumer(IntVector vector) {
super(vector);
}
diff --git a/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/AvroLongConsumer.java b/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/AvroLongConsumer.java
index 4db836acc45..525a6303d9e 100644
--- a/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/AvroLongConsumer.java
+++ b/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/AvroLongConsumer.java
@@ -18,19 +18,16 @@
package org.apache.arrow.adapter.avro.consumers;
import java.io.IOException;
-
import org.apache.arrow.vector.BigIntVector;
import org.apache.avro.io.Decoder;
/**
- * Consumer which consume long type values from avro decoder.
- * Write the data to {@link BigIntVector}.
+ * Consumer which consume long type values from avro decoder. Write the data to {@link
+ * BigIntVector}.
*/
public class AvroLongConsumer extends BaseAvroConsumer {
- /**
- * Instantiate a AvroLongConsumer.
- */
+ /** Instantiate a AvroLongConsumer. */
public AvroLongConsumer(BigIntVector vector) {
super(vector);
}
diff --git a/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/AvroMapConsumer.java b/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/AvroMapConsumer.java
index 1ea97e63b61..87e8a0666df 100644
--- a/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/AvroMapConsumer.java
+++ b/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/AvroMapConsumer.java
@@ -18,23 +18,19 @@
package org.apache.arrow.adapter.avro.consumers;
import java.io.IOException;
-
import org.apache.arrow.vector.FieldVector;
import org.apache.arrow.vector.complex.MapVector;
import org.apache.arrow.vector.complex.StructVector;
import org.apache.avro.io.Decoder;
/**
- * Consumer which consume map type values from avro decoder.
- * Write the data to {@link MapVector}.
+ * Consumer which consume map type values from avro decoder. Write the data to {@link MapVector}.
*/
public class AvroMapConsumer extends BaseAvroConsumer {
private final Consumer delegate;
- /**
- * Instantiate a AvroMapConsumer.
- */
+ /** Instantiate a AvroMapConsumer. */
public AvroMapConsumer(MapVector vector, Consumer delegate) {
super(vector);
this.delegate = delegate;
diff --git a/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/AvroNullConsumer.java b/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/AvroNullConsumer.java
index 4c7bb8c03ba..5e43264a8b9 100644
--- a/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/AvroNullConsumer.java
+++ b/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/AvroNullConsumer.java
@@ -18,13 +18,12 @@
package org.apache.arrow.adapter.avro.consumers;
import java.io.IOException;
-
import org.apache.arrow.vector.NullVector;
import org.apache.avro.io.Decoder;
/**
- * Consumer which consume null type values from avro decoder.
- * Corresponding to {@link org.apache.arrow.vector.NullVector}.
+ * Consumer which consume null type values from avro decoder. Corresponding to {@link
+ * org.apache.arrow.vector.NullVector}.
*/
public class AvroNullConsumer extends BaseAvroConsumer {
diff --git a/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/AvroStringConsumer.java b/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/AvroStringConsumer.java
index 072270aa6c0..fe33af4d422 100644
--- a/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/AvroStringConsumer.java
+++ b/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/AvroStringConsumer.java
@@ -19,21 +19,18 @@
import java.io.IOException;
import java.nio.ByteBuffer;
-
import org.apache.arrow.vector.VarCharVector;
import org.apache.avro.io.Decoder;
/**
- * Consumer which consume string type values from avro decoder.
- * Write the data to {@link VarCharVector}.
+ * Consumer which consume string type values from avro decoder. Write the data to {@link
+ * VarCharVector}.
*/
public class AvroStringConsumer extends BaseAvroConsumer {
private ByteBuffer cacheBuffer;
- /**
- * Instantiate a AvroStringConsumer.
- */
+ /** Instantiate a AvroStringConsumer. */
public AvroStringConsumer(VarCharVector vector) {
super(vector);
}
diff --git a/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/AvroStructConsumer.java b/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/AvroStructConsumer.java
index a02b1577f9f..b5c143e0c9a 100644
--- a/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/AvroStructConsumer.java
+++ b/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/AvroStructConsumer.java
@@ -18,23 +18,20 @@
package org.apache.arrow.adapter.avro.consumers;
import java.io.IOException;
-
import org.apache.arrow.util.AutoCloseables;
import org.apache.arrow.vector.FieldVector;
import org.apache.arrow.vector.complex.StructVector;
import org.apache.avro.io.Decoder;
/**
- * Consumer which consume nested record type values from avro decoder.
- * Write the data to {@link org.apache.arrow.vector.complex.StructVector}.
+ * Consumer which consume nested record type values from avro decoder. Write the data to {@link
+ * org.apache.arrow.vector.complex.StructVector}.
*/
public class AvroStructConsumer extends BaseAvroConsumer {
private final Consumer[] delegates;
- /**
- * Instantiate a AvroStructConsumer.
- */
+ /** Instantiate a AvroStructConsumer. */
public AvroStructConsumer(StructVector vector, Consumer[] delegates) {
super(vector);
this.delegates = delegates;
@@ -49,7 +46,6 @@ public void consume(Decoder decoder) throws IOException {
}
vector.setIndexDefined(currentIndex);
currentIndex++;
-
}
@Override
diff --git a/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/AvroUnionsConsumer.java b/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/AvroUnionsConsumer.java
index 76287543b06..b25b0ef91b8 100644
--- a/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/AvroUnionsConsumer.java
+++ b/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/AvroUnionsConsumer.java
@@ -18,7 +18,6 @@
package org.apache.arrow.adapter.avro.consumers;
import java.io.IOException;
-
import org.apache.arrow.util.AutoCloseables;
import org.apache.arrow.vector.ValueVector;
import org.apache.arrow.vector.complex.UnionVector;
@@ -26,17 +25,15 @@
import org.apache.avro.io.Decoder;
/**
- * Consumer which consume unions type values from avro decoder.
- * Write the data to {@link org.apache.arrow.vector.complex.UnionVector}.
+ * Consumer which consume unions type values from avro decoder. Write the data to {@link
+ * org.apache.arrow.vector.complex.UnionVector}.
*/
public class AvroUnionsConsumer extends BaseAvroConsumer {
private Consumer[] delegates;
private Types.MinorType[] types;
- /**
- * Instantiate an AvroUnionConsumer.
- */
+ /** Instantiate an AvroUnionConsumer. */
public AvroUnionsConsumer(UnionVector vector, Consumer[] delegates, Types.MinorType[] types) {
super(vector);
@@ -53,7 +50,8 @@ public void consume(Decoder decoder) throws IOException {
vector.setType(currentIndex, types[fieldIndex]);
// In UnionVector we need to set sub vector writer position before consume a value
- // because in the previous iterations we might not have written to the specific union sub vector.
+ // because in the previous iterations we might not have written to the specific union sub
+ // vector.
delegate.setPosition(currentIndex);
delegate.consume(decoder);
diff --git a/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/BaseAvroConsumer.java b/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/BaseAvroConsumer.java
index 66a6cda6840..3bf8704e45b 100644
--- a/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/BaseAvroConsumer.java
+++ b/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/BaseAvroConsumer.java
@@ -21,6 +21,7 @@
/**
* Base class for non-skippable avro consumers.
+ *
* @param vector type.
*/
public abstract class BaseAvroConsumer implements Consumer {
@@ -30,6 +31,7 @@ public abstract class BaseAvroConsumer implements Consume
/**
* Constructs a base avro consumer.
+ *
* @param vector the vector to consume.
*/
public BaseAvroConsumer(T vector) {
diff --git a/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/CompositeAvroConsumer.java b/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/CompositeAvroConsumer.java
index 97812226180..cd01af28209 100644
--- a/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/CompositeAvroConsumer.java
+++ b/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/CompositeAvroConsumer.java
@@ -19,15 +19,11 @@
import java.io.IOException;
import java.util.List;
-
import org.apache.arrow.util.AutoCloseables;
import org.apache.arrow.vector.VectorSchemaRoot;
import org.apache.avro.io.Decoder;
-/**
- * Composite consumer which hold all consumers.
- * It manages the consume and cleanup process.
- */
+/** Composite consumer which hold all consumers. It manages the consume and cleanup process. */
public class CompositeAvroConsumer implements AutoCloseable {
private final List consumers;
@@ -40,18 +36,14 @@ public CompositeAvroConsumer(List consumers) {
this.consumers = consumers;
}
- /**
- * Consume decoder data.
- */
+ /** Consume decoder data. */
public void consume(Decoder decoder) throws IOException {
for (Consumer consumer : consumers) {
consumer.consume(decoder);
}
}
- /**
- * Reset vector of consumers with the given {@link VectorSchemaRoot}.
- */
+ /** Reset vector of consumers with the given {@link VectorSchemaRoot}. */
public void resetConsumerVectors(VectorSchemaRoot root) {
int index = 0;
for (Consumer consumer : consumers) {
diff --git a/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/Consumer.java b/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/Consumer.java
index c2ae1ce77b2..65925630b74 100644
--- a/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/Consumer.java
+++ b/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/Consumer.java
@@ -18,54 +18,45 @@
package org.apache.arrow.adapter.avro.consumers;
import java.io.IOException;
-
import org.apache.arrow.vector.FieldVector;
import org.apache.avro.io.Decoder;
/**
* Interface that is used to consume values from avro decoder.
+ *
* @param The vector within consumer or its delegate, used for partially consume purpose.
*/
public interface Consumer extends AutoCloseable {
/**
* Consume a specific type value from avro decoder and write it to vector.
+ *
* @param decoder avro decoder to read data
* @throws IOException on error
*/
void consume(Decoder decoder) throws IOException;
- /**
- * Add null value to vector by making writer position + 1.
- */
+ /** Add null value to vector by making writer position + 1. */
void addNull();
- /**
- * Set the position to write value into vector.
- */
+ /** Set the position to write value into vector. */
void setPosition(int index);
- /**
- * Get the vector within the consumer.
- */
+ /** Get the vector within the consumer. */
FieldVector getVector();
- /**
- * Close this consumer when occurs exception to avoid potential leak.
- */
+ /** Close this consumer when occurs exception to avoid potential leak. */
void close() throws Exception;
/**
* Reset the vector within consumer for partial read purpose.
+ *
* @return true if reset is successful, false if reset is not needed.
*/
boolean resetValueVector(T vector);
- /**
- * Indicates whether the consumer is type of {@link SkipConsumer}.
- */
+ /** Indicates whether the consumer is type of {@link SkipConsumer}. */
default boolean skippable() {
return false;
}
-
}
diff --git a/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/SkipConsumer.java b/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/SkipConsumer.java
index 1ac0a6d7155..05a843afd1a 100644
--- a/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/SkipConsumer.java
+++ b/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/SkipConsumer.java
@@ -18,13 +18,10 @@
package org.apache.arrow.adapter.avro.consumers;
import java.io.IOException;
-
import org.apache.arrow.vector.FieldVector;
import org.apache.avro.io.Decoder;
-/**
- * Consumer which skip (throw away) data from the decoder.
- */
+/** Consumer which skip (throw away) data from the decoder. */
public class SkipConsumer implements Consumer {
private final SkipFunction skipFunction;
@@ -39,12 +36,10 @@ public void consume(Decoder decoder) throws IOException {
}
@Override
- public void addNull() {
- }
+ public void addNull() {}
@Override
- public void setPosition(int index) {
- }
+ public void setPosition(int index) {}
@Override
public FieldVector getVector() {
@@ -52,8 +47,7 @@ public FieldVector getVector() {
}
@Override
- public void close() throws Exception {
- }
+ public void close() throws Exception {}
@Override
public boolean resetValueVector(FieldVector vector) {
diff --git a/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/SkipFunction.java b/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/SkipFunction.java
index 93fc4a7fede..61530cb4e4e 100644
--- a/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/SkipFunction.java
+++ b/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/SkipFunction.java
@@ -18,12 +18,9 @@
package org.apache.arrow.adapter.avro.consumers;
import java.io.IOException;
-
import org.apache.avro.io.Decoder;
-/**
- * Adapter function to skip (throw away) data from the decoder.
- */
+/** Adapter function to skip (throw away) data from the decoder. */
@FunctionalInterface
public interface SkipFunction {
void apply(Decoder decoder) throws IOException;
diff --git a/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/logical/AvroDateConsumer.java b/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/logical/AvroDateConsumer.java
index a5c36d88fb7..ee4de339429 100644
--- a/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/logical/AvroDateConsumer.java
+++ b/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/logical/AvroDateConsumer.java
@@ -18,20 +18,17 @@
package org.apache.arrow.adapter.avro.consumers.logical;
import java.io.IOException;
-
import org.apache.arrow.adapter.avro.consumers.BaseAvroConsumer;
import org.apache.arrow.vector.DateDayVector;
import org.apache.avro.io.Decoder;
/**
- * Consumer which consume date type values from avro decoder.
- * Write the data to {@link DateDayVector}.
+ * Consumer which consume date type values from avro decoder. Write the data to {@link
+ * DateDayVector}.
*/
public class AvroDateConsumer extends BaseAvroConsumer {
- /**
- * Instantiate a AvroDateConsumer.
- */
+ /** Instantiate a AvroDateConsumer. */
public AvroDateConsumer(DateDayVector vector) {
super(vector);
}
diff --git a/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/logical/AvroDecimalConsumer.java b/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/logical/AvroDecimalConsumer.java
index ebe5ca3884e..2defa736b19 100644
--- a/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/logical/AvroDecimalConsumer.java
+++ b/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/logical/AvroDecimalConsumer.java
@@ -19,35 +19,28 @@
import java.io.IOException;
import java.nio.ByteBuffer;
-
import org.apache.arrow.adapter.avro.consumers.BaseAvroConsumer;
import org.apache.arrow.util.Preconditions;
import org.apache.arrow.vector.DecimalVector;
import org.apache.avro.io.Decoder;
/**
- * Consumer which consume decimal type values from avro decoder.
- * Write the data to {@link DecimalVector}.
+ * Consumer which consume decimal type values from avro decoder. Write the data to {@link
+ * DecimalVector}.
*/
public abstract class AvroDecimalConsumer extends BaseAvroConsumer {
- /**
- * Instantiate a AvroDecimalConsumer.
- */
+ /** Instantiate a AvroDecimalConsumer. */
public AvroDecimalConsumer(DecimalVector vector) {
super(vector);
}
- /**
- * Consumer for decimal logical type with original bytes type.
- */
+ /** Consumer for decimal logical type with original bytes type. */
public static class BytesDecimalConsumer extends AvroDecimalConsumer {
private ByteBuffer cacheBuffer;
- /**
- * Instantiate a BytesDecimalConsumer.
- */
+ /** Instantiate a BytesDecimalConsumer. */
public BytesDecimalConsumer(DecimalVector vector) {
super(vector);
}
@@ -60,19 +53,14 @@ public void consume(Decoder decoder) throws IOException {
cacheBuffer.get(bytes);
vector.setBigEndian(currentIndex++, bytes);
}
-
}
- /**
- * Consumer for decimal logical type with original fixed type.
- */
+ /** Consumer for decimal logical type with original fixed type. */
public static class FixedDecimalConsumer extends AvroDecimalConsumer {
private byte[] reuseBytes;
- /**
- * Instantiate a FixedDecimalConsumer.
- */
+ /** Instantiate a FixedDecimalConsumer. */
public FixedDecimalConsumer(DecimalVector vector, int size) {
super(vector);
Preconditions.checkArgument(size <= 16, "Decimal bytes length should <= 16.");
diff --git a/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/logical/AvroTimeMicroConsumer.java b/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/logical/AvroTimeMicroConsumer.java
index 89216d4ad14..e838a269765 100644
--- a/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/logical/AvroTimeMicroConsumer.java
+++ b/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/logical/AvroTimeMicroConsumer.java
@@ -18,20 +18,17 @@
package org.apache.arrow.adapter.avro.consumers.logical;
import java.io.IOException;
-
import org.apache.arrow.adapter.avro.consumers.BaseAvroConsumer;
import org.apache.arrow.vector.TimeMicroVector;
import org.apache.avro.io.Decoder;
/**
- * Consumer which consume date time-micro values from avro decoder.
- * Write the data to {@link TimeMicroVector}.
+ * Consumer which consume date time-micro values from avro decoder. Write the data to {@link
+ * TimeMicroVector}.
*/
public class AvroTimeMicroConsumer extends BaseAvroConsumer {
- /**
- * Instantiate a AvroTimeMicroConsumer.
- */
+ /** Instantiate a AvroTimeMicroConsumer. */
public AvroTimeMicroConsumer(TimeMicroVector vector) {
super(vector);
}
diff --git a/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/logical/AvroTimeMillisConsumer.java b/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/logical/AvroTimeMillisConsumer.java
index ab5df8d4bc8..3780e4bb4a0 100644
--- a/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/logical/AvroTimeMillisConsumer.java
+++ b/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/logical/AvroTimeMillisConsumer.java
@@ -18,20 +18,17 @@
package org.apache.arrow.adapter.avro.consumers.logical;
import java.io.IOException;
-
import org.apache.arrow.adapter.avro.consumers.BaseAvroConsumer;
import org.apache.arrow.vector.TimeMilliVector;
import org.apache.avro.io.Decoder;
/**
- * Consumer which consume date time-millis values from avro decoder.
- * Write the data to {@link TimeMilliVector}.
+ * Consumer which consume date time-millis values from avro decoder. Write the data to {@link
+ * TimeMilliVector}.
*/
public class AvroTimeMillisConsumer extends BaseAvroConsumer {
- /**
- * Instantiate a AvroTimeMilliConsumer.
- */
+ /** Instantiate a AvroTimeMilliConsumer. */
public AvroTimeMillisConsumer(TimeMilliVector vector) {
super(vector);
}
diff --git a/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/logical/AvroTimestampMicrosConsumer.java b/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/logical/AvroTimestampMicrosConsumer.java
index 93b39d479ff..9eb01ac76db 100644
--- a/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/logical/AvroTimestampMicrosConsumer.java
+++ b/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/logical/AvroTimestampMicrosConsumer.java
@@ -18,20 +18,17 @@
package org.apache.arrow.adapter.avro.consumers.logical;
import java.io.IOException;
-
import org.apache.arrow.adapter.avro.consumers.BaseAvroConsumer;
import org.apache.arrow.vector.TimeStampMicroVector;
import org.apache.avro.io.Decoder;
/**
- * Consumer which consume date timestamp-micro values from avro decoder.
- * Write the data to {@link TimeStampMicroVector}.
+ * Consumer which consume date timestamp-micro values from avro decoder. Write the data to {@link
+ * TimeStampMicroVector}.
*/
public class AvroTimestampMicrosConsumer extends BaseAvroConsumer {
- /**
- * Instantiate a AvroTimestampMicroConsumer.
- */
+ /** Instantiate a AvroTimestampMicroConsumer. */
public AvroTimestampMicrosConsumer(TimeStampMicroVector vector) {
super(vector);
}
diff --git a/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/logical/AvroTimestampMillisConsumer.java b/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/logical/AvroTimestampMillisConsumer.java
index 9e651c3959f..e665082d143 100644
--- a/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/logical/AvroTimestampMillisConsumer.java
+++ b/java/adapter/avro/src/main/java/org/apache/arrow/adapter/avro/consumers/logical/AvroTimestampMillisConsumer.java
@@ -18,20 +18,17 @@
package org.apache.arrow.adapter.avro.consumers.logical;
import java.io.IOException;
-
import org.apache.arrow.adapter.avro.consumers.BaseAvroConsumer;
import org.apache.arrow.vector.TimeStampMilliVector;
import org.apache.avro.io.Decoder;
/**
- * Consumer which consume date timestamp-millis values from avro decoder.
- * Write the data to {@link TimeStampMilliVector}.
+ * Consumer which consume date timestamp-millis values from avro decoder. Write the data to {@link
+ * TimeStampMilliVector}.
*/
public class AvroTimestampMillisConsumer extends BaseAvroConsumer {
- /**
- * Instantiate a AvroTimestampMillisConsumer.
- */
+ /** Instantiate a AvroTimestampMillisConsumer. */
public AvroTimestampMillisConsumer(TimeStampMilliVector vector) {
super(vector);
}
diff --git a/java/adapter/avro/src/test/java/org/apache/arrow/adapter/avro/AvroLogicalTypesTest.java b/java/adapter/avro/src/test/java/org/apache/arrow/adapter/avro/AvroLogicalTypesTest.java
index 6ee04e33a5c..956760fa34b 100644
--- a/java/adapter/avro/src/test/java/org/apache/arrow/adapter/avro/AvroLogicalTypesTest.java
+++ b/java/adapter/avro/src/test/java/org/apache/arrow/adapter/avro/AvroLogicalTypesTest.java
@@ -27,7 +27,6 @@
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
-
import org.apache.arrow.vector.FieldVector;
import org.apache.arrow.vector.VectorSchemaRoot;
import org.apache.arrow.vector.util.DateUtility;
@@ -43,13 +42,13 @@ public void testTimestampMicros() throws Exception {
Schema schema = getSchema("logical/test_timestamp_micros.avsc");
List data = Arrays.asList(10000L, 20000L, 30000L, 40000L, 50000L);
- List expected = Arrays.asList(
- DateUtility.getLocalDateTimeFromEpochMicro(10000),
- DateUtility.getLocalDateTimeFromEpochMicro(20000),
- DateUtility.getLocalDateTimeFromEpochMicro(30000),
- DateUtility.getLocalDateTimeFromEpochMicro(40000),
- DateUtility.getLocalDateTimeFromEpochMicro(50000)
- );
+ List expected =
+ Arrays.asList(
+ DateUtility.getLocalDateTimeFromEpochMicro(10000),
+ DateUtility.getLocalDateTimeFromEpochMicro(20000),
+ DateUtility.getLocalDateTimeFromEpochMicro(30000),
+ DateUtility.getLocalDateTimeFromEpochMicro(40000),
+ DateUtility.getLocalDateTimeFromEpochMicro(50000));
VectorSchemaRoot root = writeAndRead(schema, data);
FieldVector vector = root.getFieldVectors().get(0);
@@ -62,13 +61,13 @@ public void testTimestampMillis() throws Exception {
Schema schema = getSchema("logical/test_timestamp_millis.avsc");
List data = Arrays.asList(10000L, 20000L, 30000L, 40000L, 50000L);
- List expected = Arrays.asList(
- DateUtility.getLocalDateTimeFromEpochMilli(10000),
- DateUtility.getLocalDateTimeFromEpochMilli(20000),
- DateUtility.getLocalDateTimeFromEpochMilli(30000),
- DateUtility.getLocalDateTimeFromEpochMilli(40000),
- DateUtility.getLocalDateTimeFromEpochMilli(50000)
- );
+ List expected =
+ Arrays.asList(
+ DateUtility.getLocalDateTimeFromEpochMilli(10000),
+ DateUtility.getLocalDateTimeFromEpochMilli(20000),
+ DateUtility.getLocalDateTimeFromEpochMilli(30000),
+ DateUtility.getLocalDateTimeFromEpochMilli(40000),
+ DateUtility.getLocalDateTimeFromEpochMilli(50000));
VectorSchemaRoot root = writeAndRead(schema, data);
FieldVector vector = root.getFieldVectors().get(0);
@@ -93,13 +92,13 @@ public void testTimeMillis() throws Exception {
Schema schema = getSchema("logical/test_time_millis.avsc");
List data = Arrays.asList(100, 200, 300, 400, 500);
- List expected = Arrays.asList(
- DateUtility.getLocalDateTimeFromEpochMilli(100),
- DateUtility.getLocalDateTimeFromEpochMilli(200),
- DateUtility.getLocalDateTimeFromEpochMilli(300),
- DateUtility.getLocalDateTimeFromEpochMilli(400),
- DateUtility.getLocalDateTimeFromEpochMilli(500)
- );
+ List expected =
+ Arrays.asList(
+ DateUtility.getLocalDateTimeFromEpochMilli(100),
+ DateUtility.getLocalDateTimeFromEpochMilli(200),
+ DateUtility.getLocalDateTimeFromEpochMilli(300),
+ DateUtility.getLocalDateTimeFromEpochMilli(400),
+ DateUtility.getLocalDateTimeFromEpochMilli(500));
VectorSchemaRoot root = writeAndRead(schema, data);
FieldVector vector = root.getFieldVectors().get(0);
@@ -137,7 +136,6 @@ public void testDecimalWithOriginalBytes() throws Exception {
VectorSchemaRoot root = writeAndRead(schema, data);
FieldVector vector = root.getFieldVectors().get(0);
checkPrimitiveResult(expected, vector);
-
}
@Test
@@ -174,10 +172,9 @@ public void testInvalidDecimalPrecision() throws Exception {
data.add(buffer);
}
- IllegalArgumentException e = assertThrows(IllegalArgumentException.class,
- () -> writeAndRead(schema, data));
+ IllegalArgumentException e =
+ assertThrows(IllegalArgumentException.class, () -> writeAndRead(schema, data));
assertTrue(e.getMessage().contains("Precision must be in range of 1 to 38"));
-
}
@Test
@@ -197,5 +194,4 @@ public void testFailedToCreateDecimalLogicalType() throws Exception {
Schema schema3 = getSchema("logical/test_decimal_invalid4.avsc");
assertNull(schema3.getLogicalType());
}
-
}
diff --git a/java/adapter/avro/src/test/java/org/apache/arrow/adapter/avro/AvroSkipFieldTest.java b/java/adapter/avro/src/test/java/org/apache/arrow/adapter/avro/AvroSkipFieldTest.java
index a37eca6514e..7c102610812 100644
--- a/java/adapter/avro/src/test/java/org/apache/arrow/adapter/avro/AvroSkipFieldTest.java
+++ b/java/adapter/avro/src/test/java/org/apache/arrow/adapter/avro/AvroSkipFieldTest.java
@@ -25,7 +25,6 @@
import java.util.HashMap;
import java.util.HashSet;
import java.util.Set;
-
import org.apache.arrow.vector.VectorSchemaRoot;
import org.apache.arrow.vector.complex.StructVector;
import org.apache.arrow.vector.types.Types;
@@ -40,7 +39,10 @@ public class AvroSkipFieldTest extends AvroTestBase {
public void testSkipUnionWithOneField() throws Exception {
Set skipFieldNames = new HashSet<>();
skipFieldNames.add("f0");
- config = new AvroToArrowConfigBuilder(config.getAllocator()).setSkipFieldNames(skipFieldNames).build();
+ config =
+ new AvroToArrowConfigBuilder(config.getAllocator())
+ .setSkipFieldNames(skipFieldNames)
+ .build();
Schema schema = getSchema("skip/test_skip_union_before.avsc");
Schema expectedSchema = getSchema("skip/test_skip_union_one_field_expected.avsc");
@@ -69,7 +71,10 @@ public void testSkipUnionWithOneField() throws Exception {
public void testSkipUnionWithNullableOneField() throws Exception {
Set skipFieldNames = new HashSet<>();
skipFieldNames.add("f1");
- config = new AvroToArrowConfigBuilder(config.getAllocator()).setSkipFieldNames(skipFieldNames).build();
+ config =
+ new AvroToArrowConfigBuilder(config.getAllocator())
+ .setSkipFieldNames(skipFieldNames)
+ .build();
Schema schema = getSchema("skip/test_skip_union_before.avsc");
Schema expectedSchema = getSchema("skip/test_skip_union_nullable_field_expected.avsc");
@@ -98,7 +103,10 @@ public void testSkipUnionWithNullableOneField() throws Exception {
public void testSkipUnionWithMultiFields() throws Exception {
Set skipFieldNames = new HashSet<>();
skipFieldNames.add("f2");
- config = new AvroToArrowConfigBuilder(config.getAllocator()).setSkipFieldNames(skipFieldNames).build();
+ config =
+ new AvroToArrowConfigBuilder(config.getAllocator())
+ .setSkipFieldNames(skipFieldNames)
+ .build();
Schema schema = getSchema("skip/test_skip_union_before.avsc");
Schema expectedSchema = getSchema("skip/test_skip_union_multi_fields_expected.avsc");
@@ -127,7 +135,10 @@ public void testSkipUnionWithMultiFields() throws Exception {
public void testSkipMapField() throws Exception {
Set skipFieldNames = new HashSet<>();
skipFieldNames.add("f1");
- config = new AvroToArrowConfigBuilder(config.getAllocator()).setSkipFieldNames(skipFieldNames).build();
+ config =
+ new AvroToArrowConfigBuilder(config.getAllocator())
+ .setSkipFieldNames(skipFieldNames)
+ .build();
Schema schema = getSchema("skip/test_skip_map_before.avsc");
Schema expectedSchema = getSchema("skip/test_skip_map_expected.avsc");
@@ -159,7 +170,10 @@ public void testSkipMapField() throws Exception {
public void testSkipArrayField() throws Exception {
Set skipFieldNames = new HashSet<>();
skipFieldNames.add("f1");
- config = new AvroToArrowConfigBuilder(config.getAllocator()).setSkipFieldNames(skipFieldNames).build();
+ config =
+ new AvroToArrowConfigBuilder(config.getAllocator())
+ .setSkipFieldNames(skipFieldNames)
+ .build();
Schema schema = getSchema("skip/test_skip_array_before.avsc");
Schema expectedSchema = getSchema("skip/test_skip_array_expected.avsc");
@@ -188,7 +202,10 @@ public void testSkipMultiFields() throws Exception {
Set skipFieldNames = new HashSet<>();
skipFieldNames.add("f1");
skipFieldNames.add("f2");
- config = new AvroToArrowConfigBuilder(config.getAllocator()).setSkipFieldNames(skipFieldNames).build();
+ config =
+ new AvroToArrowConfigBuilder(config.getAllocator())
+ .setSkipFieldNames(skipFieldNames)
+ .build();
Schema schema = getSchema("test_record.avsc");
Schema expectedSchema = getSchema("skip/test_skip_multi_fields_expected.avsc");
@@ -215,7 +232,10 @@ public void testSkipMultiFields() throws Exception {
public void testSkipStringField() throws Exception {
Set skipFieldNames = new HashSet<>();
skipFieldNames.add("f2");
- config = new AvroToArrowConfigBuilder(config.getAllocator()).setSkipFieldNames(skipFieldNames).build();
+ config =
+ new AvroToArrowConfigBuilder(config.getAllocator())
+ .setSkipFieldNames(skipFieldNames)
+ .build();
Schema schema = getSchema("skip/test_skip_base1.avsc");
Schema expectedSchema = getSchema("skip/test_skip_string_expected.avsc");
@@ -228,7 +248,8 @@ public void testSkipStringField() throws Exception {
GenericData.Fixed fixed = new GenericData.Fixed(schema.getField("f0").schema());
fixed.bytes(testBytes);
record.put(0, fixed);
- GenericData.EnumSymbol symbol = new GenericData.EnumSymbol(schema.getField("f1").schema(), "TEST" + i % 2);
+ GenericData.EnumSymbol symbol =
+ new GenericData.EnumSymbol(schema.getField("f1").schema(), "TEST" + i % 2);
record.put(1, symbol);
record.put(2, "testtest" + i);
record.put(3, ByteBuffer.wrap(testBytes));
@@ -249,7 +270,10 @@ public void testSkipStringField() throws Exception {
public void testSkipBytesField() throws Exception {
Set skipFieldNames = new HashSet<>();
skipFieldNames.add("f3");
- config = new AvroToArrowConfigBuilder(config.getAllocator()).setSkipFieldNames(skipFieldNames).build();
+ config =
+ new AvroToArrowConfigBuilder(config.getAllocator())
+ .setSkipFieldNames(skipFieldNames)
+ .build();
Schema schema = getSchema("skip/test_skip_base1.avsc");
Schema expectedSchema = getSchema("skip/test_skip_bytes_expected.avsc");
@@ -262,7 +286,8 @@ public void testSkipBytesField() throws Exception {
GenericData.Fixed fixed = new GenericData.Fixed(schema.getField("f0").schema());
fixed.bytes(testBytes);
record.put(0, fixed);
- GenericData.EnumSymbol symbol = new GenericData.EnumSymbol(schema.getField("f1").schema(), "TEST" + i % 2);
+ GenericData.EnumSymbol symbol =
+ new GenericData.EnumSymbol(schema.getField("f1").schema(), "TEST" + i % 2);
record.put(1, symbol);
record.put(2, "testtest" + i);
record.put(3, ByteBuffer.wrap(testBytes));
@@ -283,7 +308,10 @@ public void testSkipBytesField() throws Exception {
public void testSkipFixedField() throws Exception {
Set skipFieldNames = new HashSet<>();
skipFieldNames.add("f0");
- config = new AvroToArrowConfigBuilder(config.getAllocator()).setSkipFieldNames(skipFieldNames).build();
+ config =
+ new AvroToArrowConfigBuilder(config.getAllocator())
+ .setSkipFieldNames(skipFieldNames)
+ .build();
Schema schema = getSchema("skip/test_skip_base1.avsc");
Schema expectedSchema = getSchema("skip/test_skip_fixed_expected.avsc");
@@ -296,7 +324,8 @@ public void testSkipFixedField() throws Exception {
GenericData.Fixed fixed = new GenericData.Fixed(schema.getField("f0").schema());
fixed.bytes(testBytes);
record.put(0, fixed);
- GenericData.EnumSymbol symbol = new GenericData.EnumSymbol(schema.getField("f1").schema(), "TEST" + i % 2);
+ GenericData.EnumSymbol symbol =
+ new GenericData.EnumSymbol(schema.getField("f1").schema(), "TEST" + i % 2);
record.put(1, symbol);
record.put(2, "testtest" + i);
record.put(3, ByteBuffer.wrap(testBytes));
@@ -317,7 +346,10 @@ public void testSkipFixedField() throws Exception {
public void testSkipEnumField() throws Exception {
Set skipFieldNames = new HashSet<>();
skipFieldNames.add("f1");
- config = new AvroToArrowConfigBuilder(config.getAllocator()).setSkipFieldNames(skipFieldNames).build();
+ config =
+ new AvroToArrowConfigBuilder(config.getAllocator())
+ .setSkipFieldNames(skipFieldNames)
+ .build();
Schema schema = getSchema("skip/test_skip_base1.avsc");
Schema expectedSchema = getSchema("skip/test_skip_fixed_expected.avsc");
@@ -330,7 +362,8 @@ public void testSkipEnumField() throws Exception {
GenericData.Fixed fixed = new GenericData.Fixed(schema.getField("f0").schema());
fixed.bytes(testBytes);
record.put(0, fixed);
- GenericData.EnumSymbol symbol = new GenericData.EnumSymbol(schema.getField("f1").schema(), "TEST" + i % 2);
+ GenericData.EnumSymbol symbol =
+ new GenericData.EnumSymbol(schema.getField("f1").schema(), "TEST" + i % 2);
record.put(1, symbol);
record.put(2, "testtest" + i);
record.put(3, ByteBuffer.wrap(testBytes));
@@ -351,7 +384,10 @@ public void testSkipEnumField() throws Exception {
public void testSkipBooleanField() throws Exception {
Set skipFieldNames = new HashSet<>();
skipFieldNames.add("f0");
- config = new AvroToArrowConfigBuilder(config.getAllocator()).setSkipFieldNames(skipFieldNames).build();
+ config =
+ new AvroToArrowConfigBuilder(config.getAllocator())
+ .setSkipFieldNames(skipFieldNames)
+ .build();
Schema schema = getSchema("skip/test_skip_base2.avsc");
Schema expectedSchema = getSchema("skip/test_skip_boolean_expected.avsc");
@@ -384,7 +420,10 @@ public void testSkipBooleanField() throws Exception {
public void testSkipIntField() throws Exception {
Set skipFieldNames = new HashSet<>();
skipFieldNames.add("f1");
- config = new AvroToArrowConfigBuilder(config.getAllocator()).setSkipFieldNames(skipFieldNames).build();
+ config =
+ new AvroToArrowConfigBuilder(config.getAllocator())
+ .setSkipFieldNames(skipFieldNames)
+ .build();
Schema schema = getSchema("skip/test_skip_base2.avsc");
Schema expectedSchema = getSchema("skip/test_skip_int_expected.avsc");
@@ -417,7 +456,10 @@ public void testSkipIntField() throws Exception {
public void testSkipLongField() throws Exception {
Set skipFieldNames = new HashSet<>();
skipFieldNames.add("f2");
- config = new AvroToArrowConfigBuilder(config.getAllocator()).setSkipFieldNames(skipFieldNames).build();
+ config =
+ new AvroToArrowConfigBuilder(config.getAllocator())
+ .setSkipFieldNames(skipFieldNames)
+ .build();
Schema schema = getSchema("skip/test_skip_base2.avsc");
Schema expectedSchema = getSchema("skip/test_skip_long_expected.avsc");
@@ -450,7 +492,10 @@ public void testSkipLongField() throws Exception {
public void testSkipFloatField() throws Exception {
Set skipFieldNames = new HashSet<>();
skipFieldNames.add("f3");
- config = new AvroToArrowConfigBuilder(config.getAllocator()).setSkipFieldNames(skipFieldNames).build();
+ config =
+ new AvroToArrowConfigBuilder(config.getAllocator())
+ .setSkipFieldNames(skipFieldNames)
+ .build();
Schema schema = getSchema("skip/test_skip_base2.avsc");
Schema expectedSchema = getSchema("skip/test_skip_float_expected.avsc");
@@ -483,7 +528,10 @@ public void testSkipFloatField() throws Exception {
public void testSkipDoubleField() throws Exception {
Set skipFieldNames = new HashSet<>();
skipFieldNames.add("f4");
- config = new AvroToArrowConfigBuilder(config.getAllocator()).setSkipFieldNames(skipFieldNames).build();
+ config =
+ new AvroToArrowConfigBuilder(config.getAllocator())
+ .setSkipFieldNames(skipFieldNames)
+ .build();
Schema schema = getSchema("skip/test_skip_base2.avsc");
Schema expectedSchema = getSchema("skip/test_skip_double_expected.avsc");
@@ -516,7 +564,10 @@ public void testSkipDoubleField() throws Exception {
public void testSkipRecordField() throws Exception {
Set skipFieldNames = new HashSet<>();
skipFieldNames.add("f0");
- config = new AvroToArrowConfigBuilder(config.getAllocator()).setSkipFieldNames(skipFieldNames).build();
+ config =
+ new AvroToArrowConfigBuilder(config.getAllocator())
+ .setSkipFieldNames(skipFieldNames)
+ .build();
Schema schema = getSchema("skip/test_skip_record_before.avsc");
Schema nestedSchema = schema.getFields().get(0).schema();
ArrayList data = new ArrayList<>();
@@ -546,7 +597,10 @@ public void testSkipRecordField() throws Exception {
public void testSkipNestedFields() throws Exception {
Set skipFieldNames = new HashSet<>();
skipFieldNames.add("f0.f0");
- config = new AvroToArrowConfigBuilder(config.getAllocator()).setSkipFieldNames(skipFieldNames).build();
+ config =
+ new AvroToArrowConfigBuilder(config.getAllocator())
+ .setSkipFieldNames(skipFieldNames)
+ .build();
Schema schema = getSchema("test_nested_record.avsc");
Schema nestedSchema = schema.getFields().get(0).schema();
ArrayList data = new ArrayList<>();
@@ -602,21 +656,26 @@ public void testSkipThirdLevelField() throws Exception {
assertEquals(Types.MinorType.STRUCT, root1.getFieldVectors().get(0).getMinorType());
StructVector secondLevelVector = (StructVector) root1.getFieldVectors().get(0);
assertEquals(1, secondLevelVector.getChildrenFromFields().size());
- assertEquals(Types.MinorType.STRUCT, secondLevelVector.getChildrenFromFields().get(0).getMinorType());
+ assertEquals(
+ Types.MinorType.STRUCT, secondLevelVector.getChildrenFromFields().get(0).getMinorType());
StructVector thirdLevelVector = (StructVector) secondLevelVector.getChildrenFromFields().get(0);
assertEquals(3, thirdLevelVector.getChildrenFromFields().size());
// skip third level field and validate
Set skipFieldNames = new HashSet<>();
skipFieldNames.add("f0.f0.f0");
- config = new AvroToArrowConfigBuilder(config.getAllocator()).setSkipFieldNames(skipFieldNames).build();
+ config =
+ new AvroToArrowConfigBuilder(config.getAllocator())
+ .setSkipFieldNames(skipFieldNames)
+ .build();
VectorSchemaRoot root2 = writeAndRead(firstLevelSchema, data);
assertEquals(1, root2.getFieldVectors().size());
assertEquals(Types.MinorType.STRUCT, root2.getFieldVectors().get(0).getMinorType());
StructVector secondStruct = (StructVector) root2.getFieldVectors().get(0);
assertEquals(1, secondStruct.getChildrenFromFields().size());
- assertEquals(Types.MinorType.STRUCT, secondStruct.getChildrenFromFields().get(0).getMinorType());
+ assertEquals(
+ Types.MinorType.STRUCT, secondStruct.getChildrenFromFields().get(0).getMinorType());
StructVector thirdStruct = (StructVector) secondStruct.getChildrenFromFields().get(0);
assertEquals(2, thirdStruct.getChildrenFromFields().size());
diff --git a/java/adapter/avro/src/test/java/org/apache/arrow/adapter/avro/AvroTestBase.java b/java/adapter/avro/src/test/java/org/apache/arrow/adapter/avro/AvroTestBase.java
index 60a3a285db3..1c2e551c631 100644
--- a/java/adapter/avro/src/test/java/org/apache/arrow/adapter/avro/AvroTestBase.java
+++ b/java/adapter/avro/src/test/java/org/apache/arrow/adapter/avro/AvroTestBase.java
@@ -29,7 +29,6 @@
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
-
import org.apache.arrow.memory.BufferAllocator;
import org.apache.arrow.memory.RootAllocator;
import org.apache.arrow.vector.FieldVector;
@@ -51,8 +50,7 @@
public class AvroTestBase {
- @ClassRule
- public static final TemporaryFolder TMP = new TemporaryFolder();
+ @ClassRule public static final TemporaryFolder TMP = new TemporaryFolder();
protected AvroToArrowConfig config;
@@ -64,18 +62,21 @@ public void init() {
public static Schema getSchema(String schemaName) throws Exception {
try {
- // Attempt to use JDK 9 behavior of getting the module then the resource stream from the module.
+ // Attempt to use JDK 9 behavior of getting the module then the resource stream from the
+ // module.
// Note that this code is caller-sensitive.
Method getModuleMethod = Class.class.getMethod("getModule");
Object module = getModuleMethod.invoke(TestWriteReadAvroRecord.class);
- Method getResourceAsStreamFromModule = module.getClass().getMethod("getResourceAsStream", String.class);
- try (InputStream is = (InputStream) getResourceAsStreamFromModule.invoke(module, "/schema/" + schemaName)) {
- return new Schema.Parser()
- .parse(is);
+ Method getResourceAsStreamFromModule =
+ module.getClass().getMethod("getResourceAsStream", String.class);
+ try (InputStream is =
+ (InputStream) getResourceAsStreamFromModule.invoke(module, "/schema/" + schemaName)) {
+ return new Schema.Parser().parse(is);
}
} catch (NoSuchMethodException ex) {
// Use JDK8 behavior.
- try (InputStream is = TestWriteReadAvroRecord.class.getResourceAsStream("/schema/" + schemaName)) {
+ try (InputStream is =
+ TestWriteReadAvroRecord.class.getResourceAsStream("/schema/" + schemaName)) {
return new Schema.Parser().parse(is);
}
}
@@ -84,11 +85,11 @@ public static Schema getSchema(String schemaName) throws Exception {
protected VectorSchemaRoot writeAndRead(Schema schema, List data) throws Exception {
File dataFile = TMP.newFile();
- BinaryEncoder
- encoder = new EncoderFactory().directBinaryEncoder(new FileOutputStream(dataFile), null);
+ BinaryEncoder encoder =
+ new EncoderFactory().directBinaryEncoder(new FileOutputStream(dataFile), null);
DatumWriter writer = new GenericDatumWriter(schema);
- BinaryDecoder
- decoder = new DecoderFactory().directBinaryDecoder(new FileInputStream(dataFile), null);
+ BinaryDecoder decoder =
+ new DecoderFactory().directBinaryDecoder(new FileInputStream(dataFile), null);
for (Object value : data) {
writer.write(value, encoder);
@@ -145,7 +146,8 @@ protected void checkPrimitiveResult(List data, FieldVector vector) {
}
}
- protected void checkRecordResult(Schema schema, ArrayList data, VectorSchemaRoot root) {
+ protected void checkRecordResult(
+ Schema schema, ArrayList data, VectorSchemaRoot root) {
assertEquals(data.size(), root.getRowCount());
assertEquals(schema.getFields().size(), root.getFieldVectors().size());
@@ -157,10 +159,10 @@ protected void checkRecordResult(Schema schema, ArrayList data, V
checkPrimitiveResult(fieldData, root.getFieldVectors().get(i));
}
-
}
- protected void checkNestedRecordResult(Schema schema, List data, VectorSchemaRoot root) {
+ protected void checkNestedRecordResult(
+ Schema schema, List data, VectorSchemaRoot root) {
assertEquals(data.size(), root.getRowCount());
assertTrue(schema.getFields().size() == 1);
@@ -176,10 +178,8 @@ protected void checkNestedRecordResult(Schema schema, List data,
checkPrimitiveResult(fieldData, structVector.getChildrenFromFields().get(i));
}
-
}
-
// belows are for iterator api
protected void checkArrayResult(List> expected, List vectors) {
@@ -194,10 +194,12 @@ protected void checkArrayResult(List> expected, List vectors
}
}
- protected void checkRecordResult(Schema schema, ArrayList data, List roots) {
- roots.forEach(root -> {
- assertEquals(schema.getFields().size(), root.getFieldVectors().size());
- });
+ protected void checkRecordResult(
+ Schema schema, ArrayList data, List roots) {
+ roots.forEach(
+ root -> {
+ assertEquals(schema.getFields().size(), root.getFieldVectors().size());
+ });
for (int i = 0; i < schema.getFields().size(); i++) {
List fieldData = new ArrayList();
@@ -210,7 +212,6 @@ protected void checkRecordResult(Schema schema, ArrayList data, L
checkPrimitiveResult(fieldData, vectors);
}
-
}
protected void checkPrimitiveResult(List data, List vectors) {
diff --git a/java/adapter/avro/src/test/java/org/apache/arrow/adapter/avro/AvroToArrowIteratorTest.java b/java/adapter/avro/src/test/java/org/apache/arrow/adapter/avro/AvroToArrowIteratorTest.java
index 02f7a373373..0479e3693e3 100644
--- a/java/adapter/avro/src/test/java/org/apache/arrow/adapter/avro/AvroToArrowIteratorTest.java
+++ b/java/adapter/avro/src/test/java/org/apache/arrow/adapter/avro/AvroToArrowIteratorTest.java
@@ -28,7 +28,6 @@
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
-
import org.apache.arrow.memory.BufferAllocator;
import org.apache.arrow.memory.RootAllocator;
import org.apache.arrow.util.AutoCloseables;
@@ -59,11 +58,11 @@ public void init() {
private AvroToArrowVectorIterator convert(Schema schema, List data) throws Exception {
File dataFile = TMP.newFile();
- BinaryEncoder
- encoder = new EncoderFactory().directBinaryEncoder(new FileOutputStream(dataFile), null);
+ BinaryEncoder encoder =
+ new EncoderFactory().directBinaryEncoder(new FileOutputStream(dataFile), null);
DatumWriter writer = new GenericDatumWriter(schema);
- BinaryDecoder
- decoder = new DecoderFactory().directBinaryDecoder(new FileInputStream(dataFile), null);
+ BinaryDecoder decoder =
+ new DecoderFactory().directBinaryDecoder(new FileInputStream(dataFile), null);
for (Object value : data) {
writer.write(value, encoder);
@@ -107,7 +106,7 @@ public void testNullableStringType() throws Exception {
List roots = new ArrayList<>();
List vectors = new ArrayList<>();
- try (AvroToArrowVectorIterator iterator = convert(schema, data);) {
+ try (AvroToArrowVectorIterator iterator = convert(schema, data); ) {
while (iterator.hasNext()) {
VectorSchemaRoot root = iterator.next();
FieldVector vector = root.getFieldVectors().get(0);
@@ -117,7 +116,6 @@ public void testNullableStringType() throws Exception {
}
checkPrimitiveResult(expected, vectors);
AutoCloseables.close(roots);
-
}
@Test
@@ -140,18 +138,18 @@ public void testRecordType() throws Exception {
}
checkRecordResult(schema, data, roots);
AutoCloseables.close(roots);
-
}
@Test
public void testArrayType() throws Exception {
Schema schema = getSchema("test_array.avsc");
- List> data = Arrays.asList(
- Arrays.asList("11", "222", "999"),
- Arrays.asList("12222", "2333", "1000"),
- Arrays.asList("1rrr", "2ggg"),
- Arrays.asList("1vvv", "2bbb"),
- Arrays.asList("1fff", "2"));
+ List> data =
+ Arrays.asList(
+ Arrays.asList("11", "222", "999"),
+ Arrays.asList("12222", "2333", "1000"),
+ Arrays.asList("1rrr", "2ggg"),
+ Arrays.asList("1vvv", "2bbb"),
+ Arrays.asList("1fff", "2"));
List roots = new ArrayList<>();
List vectors = new ArrayList<>();
@@ -172,8 +170,9 @@ public void runLargeNumberOfRows() throws Exception {
int x = 0;
final int targetRows = 600000;
Decoder fakeDecoder = new FakeDecoder(targetRows);
- try (AvroToArrowVectorIterator iter = AvroToArrow.avroToArrowIterator(schema, fakeDecoder,
- new AvroToArrowConfigBuilder(config.getAllocator()).build())) {
+ try (AvroToArrowVectorIterator iter =
+ AvroToArrow.avroToArrowIterator(
+ schema, fakeDecoder, new AvroToArrowConfigBuilder(config.getAllocator()).build())) {
while (iter.hasNext()) {
VectorSchemaRoot root = iter.next();
x += root.getRowCount();
@@ -184,9 +183,7 @@ public void runLargeNumberOfRows() throws Exception {
assertEquals(x, targetRows);
}
- /**
- * Fake avro decoder to test large data.
- */
+ /** Fake avro decoder to test large data. */
private class FakeDecoder extends Decoder {
private int numRows;
@@ -204,8 +201,7 @@ private void validate() throws EOFException {
}
@Override
- public void readNull() throws IOException {
- }
+ public void readNull() throws IOException {}
@Override
public boolean readBoolean() throws IOException {
@@ -243,9 +239,7 @@ public String readString() throws IOException {
}
@Override
- public void skipString() throws IOException {
-
- }
+ public void skipString() throws IOException {}
@Override
public ByteBuffer readBytes(ByteBuffer old) throws IOException {
@@ -253,9 +247,7 @@ public ByteBuffer readBytes(ByteBuffer old) throws IOException {
}
@Override
- public void skipBytes() throws IOException {
-
- }
+ public void skipBytes() throws IOException {}
@Override
public void readFixed(byte[] bytes, int start, int length) throws IOException {
@@ -264,9 +256,7 @@ public void readFixed(byte[] bytes, int start, int length) throws IOException {
}
@Override
- public void skipFixed(int length) throws IOException {
-
- }
+ public void skipFixed(int length) throws IOException {}
@Override
public int readEnum() throws IOException {
diff --git a/java/adapter/avro/src/test/java/org/apache/arrow/adapter/avro/AvroToArrowTest.java b/java/adapter/avro/src/test/java/org/apache/arrow/adapter/avro/AvroToArrowTest.java
index 1c642041917..12ec191d953 100644
--- a/java/adapter/avro/src/test/java/org/apache/arrow/adapter/avro/AvroToArrowTest.java
+++ b/java/adapter/avro/src/test/java/org/apache/arrow/adapter/avro/AvroToArrowTest.java
@@ -26,7 +26,6 @@
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
-
import org.apache.arrow.vector.FieldVector;
import org.apache.arrow.vector.VarCharVector;
import org.apache.arrow.vector.VectorSchemaRoot;
@@ -107,12 +106,13 @@ public void testFixedAttributes() throws Exception {
@Test
public void testEnumAttributes() throws Exception {
Schema schema = getSchema("attrs/test_enum_attrs.avsc");
- List data = Arrays.asList(
- new GenericData.EnumSymbol(schema, "SPADES"),
- new GenericData.EnumSymbol(schema, "HEARTS"),
- new GenericData.EnumSymbol(schema, "DIAMONDS"),
- new GenericData.EnumSymbol(schema, "CLUBS"),
- new GenericData.EnumSymbol(schema, "SPADES"));
+ List data =
+ Arrays.asList(
+ new GenericData.EnumSymbol(schema, "SPADES"),
+ new GenericData.EnumSymbol(schema, "HEARTS"),
+ new GenericData.EnumSymbol(schema, "DIAMONDS"),
+ new GenericData.EnumSymbol(schema, "CLUBS"),
+ new GenericData.EnumSymbol(schema, "SPADES"));
VectorSchemaRoot root = writeAndRead(schema, data);
FieldVector vector = root.getFieldVectors().get(0);
@@ -174,12 +174,13 @@ public void testNestedRecordType() throws Exception {
@Test
public void testEnumType() throws Exception {
Schema schema = getSchema("test_primitive_enum.avsc");
- List data = Arrays.asList(
- new GenericData.EnumSymbol(schema, "SPADES"),
- new GenericData.EnumSymbol(schema, "HEARTS"),
- new GenericData.EnumSymbol(schema, "DIAMONDS"),
- new GenericData.EnumSymbol(schema, "CLUBS"),
- new GenericData.EnumSymbol(schema, "SPADES"));
+ List data =
+ Arrays.asList(
+ new GenericData.EnumSymbol(schema, "SPADES"),
+ new GenericData.EnumSymbol(schema, "HEARTS"),
+ new GenericData.EnumSymbol(schema, "DIAMONDS"),
+ new GenericData.EnumSymbol(schema, "CLUBS"),
+ new GenericData.EnumSymbol(schema, "SPADES"));
List expectedIndices = Arrays.asList(0, 1, 2, 3, 0);
@@ -304,12 +305,13 @@ public void testNullableDoubleType() throws Exception {
@Test
public void testBytesType() throws Exception {
Schema schema = getSchema("test_primitive_bytes.avsc");
- List data = Arrays.asList(
- ByteBuffer.wrap("value1".getBytes(StandardCharsets.UTF_8)),
- ByteBuffer.wrap("value2".getBytes(StandardCharsets.UTF_8)),
- ByteBuffer.wrap("value3".getBytes(StandardCharsets.UTF_8)),
- ByteBuffer.wrap("value4".getBytes(StandardCharsets.UTF_8)),
- ByteBuffer.wrap("value5".getBytes(StandardCharsets.UTF_8)));
+ List data =
+ Arrays.asList(
+ ByteBuffer.wrap("value1".getBytes(StandardCharsets.UTF_8)),
+ ByteBuffer.wrap("value2".getBytes(StandardCharsets.UTF_8)),
+ ByteBuffer.wrap("value3".getBytes(StandardCharsets.UTF_8)),
+ ByteBuffer.wrap("value4".getBytes(StandardCharsets.UTF_8)),
+ ByteBuffer.wrap("value5".getBytes(StandardCharsets.UTF_8)));
VectorSchemaRoot root = writeAndRead(schema, data);
FieldVector vector = root.getFieldVectors().get(0);
@@ -324,7 +326,8 @@ public void testNullableBytesType() throws Exception {
ArrayList data = new ArrayList<>();
for (int i = 0; i < 5; i++) {
GenericRecord record = new GenericData.Record(schema);
- record.put(0, i % 2 == 0 ? ByteBuffer.wrap(("test" + i).getBytes(StandardCharsets.UTF_8)) : null);
+ record.put(
+ 0, i % 2 == 0 ? ByteBuffer.wrap(("test" + i).getBytes(StandardCharsets.UTF_8)) : null);
data.add(record);
}
@@ -361,12 +364,13 @@ public void testNullableBooleanType() throws Exception {
@Test
public void testArrayType() throws Exception {
Schema schema = getSchema("test_array.avsc");
- List> data = Arrays.asList(
- Arrays.asList("11", "222", "999"),
- Arrays.asList("12222", "2333", "1000"),
- Arrays.asList("1rrr", "2ggg"),
- Arrays.asList("1vvv", "2bbb"),
- Arrays.asList("1fff", "2"));
+ List> data =
+ Arrays.asList(
+ Arrays.asList("11", "222", "999"),
+ Arrays.asList("12222", "2333", "1000"),
+ Arrays.asList("1rrr", "2ggg"),
+ Arrays.asList("1vvv", "2bbb"),
+ Arrays.asList("1fff", "2"));
VectorSchemaRoot root = writeAndRead(schema, data);
FieldVector vector = root.getFieldVectors().get(0);
@@ -473,5 +477,4 @@ public void testNullableUnionType() throws Exception {
checkPrimitiveResult(expected, vector);
}
-
}
diff --git a/java/adapter/avro/src/test/java/org/apache/arrow/adapter/avro/TestWriteReadAvroRecord.java b/java/adapter/avro/src/test/java/org/apache/arrow/adapter/avro/TestWriteReadAvroRecord.java
index afbddaa6ed8..1924a58a94d 100644
--- a/java/adapter/avro/src/test/java/org/apache/arrow/adapter/avro/TestWriteReadAvroRecord.java
+++ b/java/adapter/avro/src/test/java/org/apache/arrow/adapter/avro/TestWriteReadAvroRecord.java
@@ -22,7 +22,6 @@
import java.io.File;
import java.util.ArrayList;
import java.util.List;
-
import org.apache.avro.Schema;
import org.apache.avro.file.DataFileReader;
import org.apache.avro.file.DataFileWriter;
@@ -36,11 +35,9 @@
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
-
public class TestWriteReadAvroRecord {
- @ClassRule
- public static final TemporaryFolder TMP = new TemporaryFolder();
+ @ClassRule public static final TemporaryFolder TMP = new TemporaryFolder();
@Test
public void testWriteAndRead() throws Exception {
@@ -48,7 +45,7 @@ public void testWriteAndRead() throws Exception {
File dataFile = TMP.newFile();
Schema schema = AvroTestBase.getSchema("test.avsc");
- //write data to disk
+ // write data to disk
GenericRecord user1 = new GenericData.Record(schema);
user1.put("name", "Alyssa");
user1.put("favorite_number", 256);
@@ -65,10 +62,10 @@ public void testWriteAndRead() throws Exception {
dataFileWriter.append(user2);
dataFileWriter.close();
- //read data from disk
+ // read data from disk
DatumReader datumReader = new GenericDatumReader(schema);
- DataFileReader
- dataFileReader = new DataFileReader(dataFile, datumReader);
+ DataFileReader dataFileReader =
+ new DataFileReader(dataFile, datumReader);
List result = new ArrayList<>();
while (dataFileReader.hasNext()) {
GenericRecord user = dataFileReader.next();
@@ -86,5 +83,4 @@ public void testWriteAndRead() throws Exception {
assertEquals(7, deUser2.get("favorite_number"));
assertEquals("red", deUser2.get("favorite_color").toString());
}
-
}
diff --git a/java/adapter/avro/src/test/resources/logback.xml b/java/adapter/avro/src/test/resources/logback.xml
index 4c54d18a210..9fa3f1e3b43 100644
--- a/java/adapter/avro/src/test/resources/logback.xml
+++ b/java/adapter/avro/src/test/resources/logback.xml
@@ -1,4 +1,4 @@
-
+
-
diff --git a/java/adapter/jdbc/pom.xml b/java/adapter/jdbc/pom.xml
index dfcd4cfe8f6..e3ce10c02e3 100644
--- a/java/adapter/jdbc/pom.xml
+++ b/java/adapter/jdbc/pom.xml
@@ -1,116 +1,115 @@
-
+ license agreements. See the NOTICE file distributed with this work for additional
+ information regarding copyright ownership. The ASF licenses this file to
+ You under the Apache License, Version 2.0 (the "License"); you may not use
+ this file except in compliance with the License. You may obtain a copy of
+ the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required
+ by applicable law or agreed to in writing, software distributed under the
+ License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS
+ OF ANY KIND, either express or implied. See the License for the specific
+ language governing permissions and limitations under the License. -->
- 4.0.0
-
-
- org.apache.arrow
- arrow-java-root
- 16.0.0-SNAPSHOT
- ../../pom.xml
-
-
- arrow-jdbc
- Arrow JDBC Adapter
- (Contrib/Experimental)A library for converting JDBC data to Arrow data.
- http://maven.apache.org
-
-
-
-
-
- org.apache.arrow
- arrow-memory-core
-
-
-
-
- org.apache.arrow
- arrow-memory-netty
- runtime
-
-
-
-
- org.apache.arrow
- arrow-vector
- ${arrow.vector.classifier}
-
-
-
- org.immutables
- value
-
-
-
-
- com.h2database
- h2
- 2.2.224
- test
-
-
-
- com.fasterxml.jackson.dataformat
- jackson-dataformat-yaml
- test
-
-
-
- com.fasterxml.jackson.core
- jackson-databind
-
-
-
- com.fasterxml.jackson.core
- jackson-core
-
-
-
- com.fasterxml.jackson.core
- jackson-annotations
- test
-
-
-
- org.assertj
- assertj-core
- test
-
-
-
-
-
-
- jdk11+
-
- [11,]
-
- !m2e.version
-
-
-
-
-
- org.apache.maven.plugins
- maven-surefire-plugin
-
- --add-reads=org.apache.arrow.adapter.jdbc=com.fasterxml.jackson.dataformat.yaml --add-opens=java.base/java.nio=org.apache.arrow.memory.core,ALL-UNNAMED
-
-
-
-
-
-
+ 4.0.0
+
+
+ org.apache.arrow
+ arrow-java-root
+ 16.0.0-SNAPSHOT
+ ../../pom.xml
+
+
+ arrow-jdbc
+ Arrow JDBC Adapter
+ (Contrib/Experimental)A library for converting JDBC data to Arrow data.
+ http://maven.apache.org
+
+
+
+
+
+ org.apache.arrow
+ arrow-memory-core
+
+
+
+
+ org.apache.arrow
+ arrow-memory-netty
+ runtime
+
+
+
+
+ org.apache.arrow
+ arrow-vector
+ ${arrow.vector.classifier}
+
+
+
+ org.immutables
+ value
+
+
+
+
+ com.h2database
+ h2
+ 2.2.224
+ test
+
+
+
+ com.fasterxml.jackson.dataformat
+ jackson-dataformat-yaml
+ test
+
+
+
+ com.fasterxml.jackson.core
+ jackson-databind
+
+
+
+ com.fasterxml.jackson.core
+ jackson-core
+
+
+
+ com.fasterxml.jackson.core
+ jackson-annotations
+ test
+
+
+
+ org.assertj
+ assertj-core
+ test
+
+
+
+
+
+
+ jdk11+
+
+ [11,]
+
+ !m2e.version
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-surefire-plugin
+
+ --add-reads=org.apache.arrow.adapter.jdbc=com.fasterxml.jackson.dataformat.yaml --add-opens=java.base/java.nio=org.apache.arrow.memory.core,ALL-UNNAMED
+
+
+
+
+
+
diff --git a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/ArrowVectorIterator.java b/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/ArrowVectorIterator.java
index 427c766982f..56aea04db9d 100644
--- a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/ArrowVectorIterator.java
+++ b/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/ArrowVectorIterator.java
@@ -23,7 +23,6 @@
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.util.Iterator;
-
import org.apache.arrow.adapter.jdbc.consumer.CompositeJdbcConsumer;
import org.apache.arrow.adapter.jdbc.consumer.JdbcConsumer;
import org.apache.arrow.adapter.jdbc.consumer.exceptions.JdbcConsumerException;
@@ -35,9 +34,7 @@
import org.apache.arrow.vector.types.pojo.Schema;
import org.apache.arrow.vector.util.ValueVectorUtility;
-/**
- * VectorSchemaRoot iterator for partially converting JDBC data.
- */
+/** VectorSchemaRoot iterator for partially converting JDBC data. */
public class ArrowVectorIterator implements Iterator, AutoCloseable {
private final ResultSet resultSet;
@@ -54,13 +51,12 @@ public class ArrowVectorIterator implements Iterator, AutoClos
private final int targetBatchSize;
- // This is used to track whether the ResultSet has been fully read, and is needed specifically for cases where there
+ // This is used to track whether the ResultSet has been fully read, and is needed specifically for
+ // cases where there
// is a ResultSet having zero rows (empty):
private boolean readComplete = false;
- /**
- * Construct an instance.
- */
+ /** Construct an instance. */
private ArrowVectorIterator(ResultSet resultSet, JdbcToArrowConfig config) throws SQLException {
this.resultSet = resultSet;
this.config = config;
@@ -73,12 +69,8 @@ private ArrowVectorIterator(ResultSet resultSet, JdbcToArrowConfig config) throw
this.nextBatch = config.isReuseVectorSchemaRoot() ? createVectorSchemaRoot() : null;
}
- /**
- * Create a ArrowVectorIterator to partially convert data.
- */
- public static ArrowVectorIterator create(
- ResultSet resultSet,
- JdbcToArrowConfig config)
+ /** Create a ArrowVectorIterator to partially convert data. */
+ public static ArrowVectorIterator create(ResultSet resultSet, JdbcToArrowConfig config)
throws SQLException {
ArrowVectorIterator iterator = null;
try {
@@ -142,10 +134,18 @@ private VectorSchemaRoot createVectorSchemaRoot() throws SQLException {
private void initialize(VectorSchemaRoot root) throws SQLException {
for (int i = 1; i <= consumers.length; i++) {
- final JdbcFieldInfo columnFieldInfo = JdbcToArrowUtils.getJdbcFieldInfoForColumn(rsmd, i, config);
+ final JdbcFieldInfo columnFieldInfo =
+ JdbcToArrowUtils.getJdbcFieldInfoForColumn(rsmd, i, config);
ArrowType arrowType = config.getJdbcToArrowTypeConverter().apply(columnFieldInfo);
- consumers[i - 1] = config.getJdbcConsumerGetter().apply(
- arrowType, i, isColumnNullable(resultSet.getMetaData(), i, columnFieldInfo), root.getVector(i - 1), config);
+ consumers[i - 1] =
+ config
+ .getJdbcConsumerGetter()
+ .apply(
+ arrowType,
+ i,
+ isColumnNullable(resultSet.getMetaData(), i, columnFieldInfo),
+ root.getVector(i - 1),
+ config);
}
}
@@ -170,16 +170,17 @@ public boolean hasNext() {
}
/**
- * Gets the next vector.
- * If {@link JdbcToArrowConfig#isReuseVectorSchemaRoot()} is false,
- * the client is responsible for freeing its resources.
+ * Gets the next vector. If {@link JdbcToArrowConfig#isReuseVectorSchemaRoot()} is false, the
+ * client is responsible for freeing its resources.
+ *
* @throws JdbcConsumerException on error from VectorConsumer
*/
@Override
public VectorSchemaRoot next() {
Preconditions.checkArgument(hasNext());
try {
- VectorSchemaRoot ret = config.isReuseVectorSchemaRoot() ? nextBatch : createVectorSchemaRoot();
+ VectorSchemaRoot ret =
+ config.isReuseVectorSchemaRoot() ? nextBatch : createVectorSchemaRoot();
load(ret);
return ret;
} catch (Exception e) {
@@ -193,8 +194,9 @@ public VectorSchemaRoot next() {
}
/**
- * Clean up resources ONLY WHEN THE {@link VectorSchemaRoot} HOLDING EACH BATCH IS REUSED. If a new VectorSchemaRoot
- * is created for each batch, each root must be closed manually by the client code.
+ * Clean up resources ONLY WHEN THE {@link VectorSchemaRoot} HOLDING EACH BATCH IS REUSED. If a
+ * new VectorSchemaRoot is created for each batch, each root must be closed manually by the client
+ * code.
*/
@Override
public void close() {
diff --git a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/Constants.java b/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/Constants.java
index 5b01077b179..ce5f28680d8 100644
--- a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/Constants.java
+++ b/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/Constants.java
@@ -17,9 +17,7 @@
package org.apache.arrow.adapter.jdbc;
-/**
- * String constants used for metadata returned on Vectors.
- */
+/** String constants used for metadata returned on Vectors. */
public class Constants {
private Constants() {}
@@ -28,5 +26,4 @@ private Constants() {}
public static final String SQL_TABLE_NAME_KEY = "SQL_TABLE_NAME";
public static final String SQL_COLUMN_NAME_KEY = "SQL_COLUMN_NAME";
public static final String SQL_TYPE_KEY = "SQL_TYPE";
-
}
diff --git a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/JdbcFieldInfo.java b/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/JdbcFieldInfo.java
index d16964ea144..536b837d5bd 100644
--- a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/JdbcFieldInfo.java
+++ b/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/JdbcFieldInfo.java
@@ -21,18 +21,19 @@
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Types;
-
import org.apache.arrow.util.Preconditions;
/**
- * This class represents the information about a JDBC ResultSet Field that is
- * needed to construct an {@link org.apache.arrow.vector.types.pojo.ArrowType}.
- * Currently, this is:
+ * This class represents the information about a JDBC ResultSet Field that is needed to construct an
+ * {@link org.apache.arrow.vector.types.pojo.ArrowType}. Currently, this is:
+ *
*
- *
The JDBC {@link java.sql.Types} type.
- *
The nullability.
- *
The field's precision (used for {@link java.sql.Types#DECIMAL} and {@link java.sql.Types#NUMERIC} types).
- *
The field's scale (used for {@link java.sql.Types#DECIMAL} and {@link java.sql.Types#NUMERIC} types).
+ *
The JDBC {@link java.sql.Types} type.
+ *
The nullability.
+ *
The field's precision (used for {@link java.sql.Types#DECIMAL} and {@link
+ * java.sql.Types#NUMERIC} types).
+ *
The field's scale (used for {@link java.sql.Types#DECIMAL} and {@link
+ * java.sql.Types#NUMERIC} types).
*
*/
public class JdbcFieldInfo {
@@ -45,12 +46,13 @@ public class JdbcFieldInfo {
private final int displaySize;
/**
- * Builds a JdbcFieldInfo using only the {@link java.sql.Types} type. Do not use this constructor
- * if the field type is {@link java.sql.Types#DECIMAL} or {@link java.sql.Types#NUMERIC}; the precision and
- * scale will be set to 0.
+ * Builds a JdbcFieldInfo using only the {@link java.sql.Types} type. Do not use this
+ * constructor if the field type is {@link java.sql.Types#DECIMAL} or {@link
+ * java.sql.Types#NUMERIC}; the precision and scale will be set to 0.
*
* @param jdbcType The {@link java.sql.Types} type.
- * @throws IllegalArgumentException if jdbcType is {@link java.sql.Types#DECIMAL} or {@link java.sql.Types#NUMERIC}.
+ * @throws IllegalArgumentException if jdbcType is {@link java.sql.Types#DECIMAL} or {@link
+ * java.sql.Types#NUMERIC}.
*/
public JdbcFieldInfo(int jdbcType) {
Preconditions.checkArgument(
@@ -67,7 +69,8 @@ public JdbcFieldInfo(int jdbcType) {
/**
* Builds a JdbcFieldInfo from the {@link java.sql.Types} type, precision, and scale.
- * Use this constructor for {@link java.sql.Types#DECIMAL} and {@link java.sql.Types#NUMERIC} types.
+ * Use this constructor for {@link java.sql.Types#DECIMAL} and {@link java.sql.Types#NUMERIC}
+ * types.
*
* @param jdbcType The {@link java.sql.Types} type.
* @param precision The field's numeric precision.
@@ -84,11 +87,13 @@ public JdbcFieldInfo(int jdbcType, int precision, int scale) {
}
/**
- * Builds a JdbcFieldInfo from the {@link java.sql.Types} type, nullability, precision, and scale.
+ * Builds a JdbcFieldInfo from the {@link java.sql.Types} type, nullability,
+ * precision, and scale.
*
* @param jdbcType The {@link java.sql.Types} type.
* @param nullability The nullability. Must be one of {@link ResultSetMetaData#columnNoNulls},
- * {@link ResultSetMetaData#columnNullable}, or {@link ResultSetMetaData#columnNullableUnknown}.
+ * {@link ResultSetMetaData#columnNullable}, or {@link
+ * ResultSetMetaData#columnNullableUnknown}.
* @param precision The field's numeric precision.
* @param scale The field's numeric scale.
*/
@@ -103,7 +108,8 @@ public JdbcFieldInfo(int jdbcType, int nullability, int precision, int scale) {
}
/**
- * Builds a JdbcFieldInfo from the corresponding {@link java.sql.ResultSetMetaData} column.
+ * Builds a JdbcFieldInfo from the corresponding {@link java.sql.ResultSetMetaData}
+ * column.
*
* @param rsmd The {@link java.sql.ResultSetMetaData} to get the field information from.
* @param column The column to get the field information for (on a 1-based index).
@@ -113,10 +119,12 @@ public JdbcFieldInfo(int jdbcType, int nullability, int precision, int scale) {
*/
public JdbcFieldInfo(ResultSetMetaData rsmd, int column) throws SQLException {
Preconditions.checkNotNull(rsmd, "ResultSetMetaData cannot be null.");
- Preconditions.checkArgument(column > 0, "ResultSetMetaData columns have indices starting at 1.");
+ Preconditions.checkArgument(
+ column > 0, "ResultSetMetaData columns have indices starting at 1.");
Preconditions.checkArgument(
column <= rsmd.getColumnCount(),
- "The index must be within the number of columns (1 to %s, inclusive)", rsmd.getColumnCount());
+ "The index must be within the number of columns (1 to %s, inclusive)",
+ rsmd.getColumnCount());
this.column = column;
this.jdbcType = rsmd.getColumnType(column);
@@ -128,8 +136,8 @@ public JdbcFieldInfo(ResultSetMetaData rsmd, int column) throws SQLException {
}
/**
- * Builds a JdbcFieldInfo from the corresponding row from a {@link java.sql.DatabaseMetaData#getColumns}
- * ResultSet.
+ * Builds a JdbcFieldInfo from the corresponding row from a {@link
+ * java.sql.DatabaseMetaData#getColumns} ResultSet.
*
* @param rs The {@link java.sql.ResultSet} to get the field information from.
* @throws SQLException If the column information cannot be retrieved.
@@ -144,51 +152,42 @@ public JdbcFieldInfo(ResultSet rs) throws SQLException {
this.displaySize = rs.getInt("CHAR_OCTET_LENGTH");
}
- /**
- * The {@link java.sql.Types} type.
- */
+ /** The {@link java.sql.Types} type. */
public int getJdbcType() {
return jdbcType;
}
- /**
- * The nullability.
- */
+ /** The nullability. */
public int isNullable() {
return nullability;
}
/**
- * The numeric precision, for {@link java.sql.Types#NUMERIC} and {@link java.sql.Types#DECIMAL} types.
+ * The numeric precision, for {@link java.sql.Types#NUMERIC} and {@link java.sql.Types#DECIMAL}
+ * types.
*/
public int getPrecision() {
return precision;
}
/**
- * The numeric scale, for {@link java.sql.Types#NUMERIC} and {@link java.sql.Types#DECIMAL} types.
+ * The numeric scale, for {@link java.sql.Types#NUMERIC} and {@link java.sql.Types#DECIMAL} types.
*/
public int getScale() {
return scale;
}
- /**
- * The column index for query column.
- */
+ /** The column index for query column. */
public int getColumn() {
return column;
}
- /**
- * The type name as reported by the database.
- */
+ /** The type name as reported by the database. */
public String getTypeName() {
return typeName;
}
- /**
- * The max number of characters for the column.
- */
+ /** The max number of characters for the column. */
public int getDisplaySize() {
return displaySize;
}
diff --git a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/JdbcParameterBinder.java b/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/JdbcParameterBinder.java
index 2dfc0658cb8..05159d04c86 100644
--- a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/JdbcParameterBinder.java
+++ b/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/JdbcParameterBinder.java
@@ -21,7 +21,6 @@
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Map;
-
import org.apache.arrow.adapter.jdbc.binder.ColumnBinder;
import org.apache.arrow.util.Preconditions;
import org.apache.arrow.vector.VectorSchemaRoot;
@@ -29,8 +28,8 @@
/**
* A binder binds JDBC prepared statement parameters to rows of Arrow data from a VectorSchemaRoot.
*
- * Each row of the VectorSchemaRoot will be bound to the configured parameters of the PreparedStatement.
- * One row of data is bound at a time.
+ *
Each row of the VectorSchemaRoot will be bound to the configured parameters of the
+ * PreparedStatement. One row of data is bound at a time.
*/
public class JdbcParameterBinder {
private final PreparedStatement statement;
@@ -44,8 +43,10 @@ public class JdbcParameterBinder {
*
* @param statement The statement to bind parameters to.
* @param root The VectorSchemaRoot to pull data from.
- * @param binders Column binders to translate from Arrow data to JDBC parameters, one per parameter.
- * @param parameterIndices For each binder in binders, the index of the parameter to bind to.
+ * @param binders Column binders to translate from Arrow data to JDBC parameters, one per
+ * parameter.
+ * @param parameterIndices For each binder in binders, the index of the parameter to bind
+ * to.
*/
private JdbcParameterBinder(
final PreparedStatement statement,
@@ -55,7 +56,8 @@ private JdbcParameterBinder(
Preconditions.checkArgument(
binders.length == parameterIndices.length,
"Number of column binders (%s) must equal number of parameter indices (%s)",
- binders.length, parameterIndices.length);
+ binders.length,
+ parameterIndices.length);
this.statement = statement;
this.root = root;
this.binders = binders;
@@ -66,9 +68,10 @@ private JdbcParameterBinder(
/**
* Initialize a binder with a builder.
*
- * @param statement The statement to bind to. The binder does not maintain ownership of the statement.
- * @param root The {@link VectorSchemaRoot} to pull data from. The binder does not maintain ownership
- * of the vector schema root.
+ * @param statement The statement to bind to. The binder does not maintain ownership of the
+ * statement.
+ * @param root The {@link VectorSchemaRoot} to pull data from. The binder does not maintain
+ * ownership of the vector schema root.
*/
public static Builder builder(final PreparedStatement statement, final VectorSchemaRoot root) {
return new Builder(statement, root);
@@ -82,8 +85,8 @@ public void reset() {
/**
* Bind the next row of data to the parameters of the statement.
*
- * After this, the application should call the desired method on the prepared statement,
- * such as {@link PreparedStatement#executeUpdate()}, or {@link PreparedStatement#addBatch()}.
+ *
After this, the application should call the desired method on the prepared statement, such
+ * as {@link PreparedStatement#executeUpdate()}, or {@link PreparedStatement#addBatch()}.
*
* @return true if a row was bound, false if rows were exhausted
*/
@@ -99,9 +102,7 @@ public boolean next() throws SQLException {
return true;
}
- /**
- * A builder for a {@link JdbcParameterBinder}.
- */
+ /** A builder for a {@link JdbcParameterBinder}. */
public static class Builder {
private final PreparedStatement statement;
private final VectorSchemaRoot root;
@@ -116,16 +117,14 @@ public static class Builder {
/** Bind each column to the corresponding parameter in order. */
public Builder bindAll() {
for (int i = 0; i < root.getFieldVectors().size(); i++) {
- bind(/*parameterIndex=*/ i + 1, /*columnIndex=*/ i);
+ bind(/* parameterIndex= */ i + 1, /* columnIndex= */ i);
}
return this;
}
/** Bind the given parameter to the given column using the default binder. */
public Builder bind(int parameterIndex, int columnIndex) {
- return bind(
- parameterIndex,
- ColumnBinder.forVector(root.getVector(columnIndex)));
+ return bind(parameterIndex, ColumnBinder.forVector(root.getVector(columnIndex)));
}
/** Bind the given parameter using the given binder. */
diff --git a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/JdbcToArrow.java b/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/JdbcToArrow.java
index 246451b5b22..89ebd420abc 100644
--- a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/JdbcToArrow.java
+++ b/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/JdbcToArrow.java
@@ -20,7 +20,6 @@
import java.io.IOException;
import java.sql.ResultSet;
import java.sql.SQLException;
-
import org.apache.arrow.memory.BufferAllocator;
import org.apache.arrow.util.Preconditions;
@@ -29,44 +28,32 @@
*
*
This utility uses following data mapping to map JDBC/SQL datatype to Arrow data types.
*
- *
CHAR --> ArrowType.Utf8 NCHAR --> ArrowType.Utf8 VARCHAR --> ArrowType.Utf8 NVARCHAR -->
+ * ArrowType.Utf8 LONGVARCHAR --> ArrowType.Utf8 LONGNVARCHAR --> ArrowType.Utf8 NUMERIC -->
+ * ArrowType.Decimal(precision, scale) DECIMAL --> ArrowType.Decimal(precision, scale) BIT -->
+ * ArrowType.Bool TINYINT --> ArrowType.Int(8, signed) SMALLINT --> ArrowType.Int(16, signed)
+ * INTEGER --> ArrowType.Int(32, signed) BIGINT --> ArrowType.Int(64, signed) REAL -->
+ * ArrowType.FloatingPoint(FloatingPointPrecision.SINGLE) FLOAT -->
+ * ArrowType.FloatingPoint(FloatingPointPrecision.SINGLE) DOUBLE -->
+ * ArrowType.FloatingPoint(FloatingPointPrecision.DOUBLE) BINARY --> ArrowType.Binary VARBINARY -->
+ * ArrowType.Binary LONGVARBINARY --> ArrowType.Binary DATE --> ArrowType.Date(DateUnit.MILLISECOND)
+ * TIME --> ArrowType.Time(TimeUnit.MILLISECOND, 32) TIMESTAMP -->
+ * ArrowType.Timestamp(TimeUnit.MILLISECOND, timezone=null) CLOB --> ArrowType.Utf8 BLOB -->
+ * ArrowType.Binary
*
* @since 0.10.0
*/
public class JdbcToArrow {
/*----------------------------------------------------------------*
- | |
- | Partial Convert API |
- | |
+ | |
+ | Partial Convert API |
+ | |
*----------------------------------------------------------------*/
/**
- * For the given JDBC {@link ResultSet}, fetch the data from Relational DB and convert it to Arrow objects.
- * Note here uses the default targetBatchSize = 1024.
+ * For the given JDBC {@link ResultSet}, fetch the data from Relational DB and convert it to Arrow
+ * objects. Note here uses the default targetBatchSize = 1024.
*
* @param resultSet ResultSet to use to fetch the data from underlying database
* @param allocator Memory allocator
@@ -74,28 +61,25 @@ public class JdbcToArrow {
* @throws SQLException on error
*/
public static ArrowVectorIterator sqlToArrowVectorIterator(
- ResultSet resultSet,
- BufferAllocator allocator)
- throws SQLException, IOException {
+ ResultSet resultSet, BufferAllocator allocator) throws SQLException, IOException {
Preconditions.checkNotNull(allocator, "Memory Allocator object cannot be null");
- JdbcToArrowConfig config =
- new JdbcToArrowConfig(allocator, JdbcToArrowUtils.getUtcCalendar());
+ JdbcToArrowConfig config = new JdbcToArrowConfig(allocator, JdbcToArrowUtils.getUtcCalendar());
return sqlToArrowVectorIterator(resultSet, config);
}
/**
- * For the given JDBC {@link ResultSet}, fetch the data from Relational DB and convert it to Arrow objects.
- * Note if not specify {@link JdbcToArrowConfig#targetBatchSize}, will use default value 1024.
+ * For the given JDBC {@link ResultSet}, fetch the data from Relational DB and convert it to Arrow
+ * objects. Note if not specify {@link JdbcToArrowConfig#targetBatchSize}, will use default value
+ * 1024.
+ *
* @param resultSet ResultSet to use to fetch the data from underlying database
- * @param config Configuration of the conversion from JDBC to Arrow.
+ * @param config Configuration of the conversion from JDBC to Arrow.
* @return Arrow Data Objects {@link ArrowVectorIterator}
* @throws SQLException on error
*/
public static ArrowVectorIterator sqlToArrowVectorIterator(
- ResultSet resultSet,
- JdbcToArrowConfig config)
- throws SQLException, IOException {
+ ResultSet resultSet, JdbcToArrowConfig config) throws SQLException, IOException {
Preconditions.checkNotNull(resultSet, "JDBC ResultSet object cannot be null");
Preconditions.checkNotNull(config, "The configuration cannot be null");
return ArrowVectorIterator.create(resultSet, config);
diff --git a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/JdbcToArrowConfig.java b/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/JdbcToArrowConfig.java
index 68851f4a98b..6470dc58a6f 100644
--- a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/JdbcToArrowConfig.java
+++ b/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/JdbcToArrowConfig.java
@@ -21,7 +21,6 @@
import java.util.Calendar;
import java.util.Map;
import java.util.function.Function;
-
import org.apache.arrow.adapter.jdbc.consumer.JdbcConsumer;
import org.apache.arrow.memory.BufferAllocator;
import org.apache.arrow.util.Preconditions;
@@ -30,25 +29,23 @@
/**
* This class configures the JDBC-to-Arrow conversion process.
- *
- * The allocator is used to construct the {@link org.apache.arrow.vector.VectorSchemaRoot},
- * and the calendar is used to define the time zone of any
- * {@link org.apache.arrow.vector.types.pojo.ArrowType.Timestamp}
- * fields that are created during the conversion. Neither field may be null.
- *
- *
- * If the includeMetadata flag is set, the Arrow field metadata will contain information
- * from the corresponding {@link java.sql.ResultSetMetaData} that was used to create the
- * {@link org.apache.arrow.vector.types.pojo.FieldType} of the corresponding
- * {@link org.apache.arrow.vector.FieldVector}.
- *
- *
- * If there are any {@link java.sql.Types#ARRAY} fields in the {@link java.sql.ResultSet}, the corresponding
- * {@link JdbcFieldInfo} for the array's contents must be defined here. Unfortunately, the sub-type
- * information cannot be retrieved from all JDBC implementations (H2 for example, returns
- * {@link java.sql.Types#NULL} for the array sub-type), so it must be configured here. The column index
- * or name can be used to map to a {@link JdbcFieldInfo}, and that will be used for the conversion.
- *
+ *
+ *
The allocator is used to construct the {@link org.apache.arrow.vector.VectorSchemaRoot}, and
+ * the calendar is used to define the time zone of any {@link
+ * org.apache.arrow.vector.types.pojo.ArrowType.Timestamp} fields that are created during the
+ * conversion. Neither field may be null.
+ *
+ *
If the includeMetadata flag is set, the Arrow field metadata will contain
+ * information from the corresponding {@link java.sql.ResultSetMetaData} that was used to create the
+ * {@link org.apache.arrow.vector.types.pojo.FieldType} of the corresponding {@link
+ * org.apache.arrow.vector.FieldVector}.
+ *
+ *
If there are any {@link java.sql.Types#ARRAY} fields in the {@link java.sql.ResultSet}, the
+ * corresponding {@link JdbcFieldInfo} for the array's contents must be defined here. Unfortunately,
+ * the sub-type information cannot be retrieved from all JDBC implementations (H2 for example,
+ * returns {@link java.sql.Types#NULL} for the array sub-type), so it must be configured here. The
+ * column index or name can be used to map to a {@link JdbcFieldInfo}, and that will be used for the
+ * conversion.
*/
public final class JdbcToArrowConfig {
@@ -65,15 +62,14 @@ public final class JdbcToArrowConfig {
private final Map schemaMetadata;
private final Map> columnMetadataByColumnIndex;
private final RoundingMode bigDecimalRoundingMode;
+
/**
- * The maximum rowCount to read each time when partially convert data.
- * Default value is 1024 and -1 means disable partial read.
- * default is -1 which means disable partial read.
- * Note that this flag only useful for {@link JdbcToArrow#sqlToArrowVectorIterator}
- * 1) if targetBatchSize != -1, it will convert full data into multiple vectors
- * with valueCount no more than targetBatchSize.
- * 2) if targetBatchSize == -1, it will convert full data into a single vector in {@link ArrowVectorIterator}
- *
+ * The maximum rowCount to read each time when partially convert data. Default value is 1024 and
+ * -1 means disable partial read. default is -1 which means disable partial read. Note that this
+ * flag only useful for {@link JdbcToArrow#sqlToArrowVectorIterator} 1) if targetBatchSize != -1,
+ * it will convert full data into multiple vectors with valueCount no more than targetBatchSize.
+ * 2) if targetBatchSize == -1, it will convert full data into a single vector in {@link
+ * ArrowVectorIterator}
*/
private final int targetBatchSize;
@@ -81,81 +77,100 @@ public final class JdbcToArrowConfig {
private final JdbcConsumerFactory jdbcConsumerGetter;
/**
- * Constructs a new configuration from the provided allocator and calendar. The allocator
- * is used when constructing the Arrow vectors from the ResultSet, and the calendar is used to define
- * Arrow Timestamp fields, and to read time-based fields from the JDBC ResultSet.
+ * Constructs a new configuration from the provided allocator and calendar. The allocator
+ * is used when constructing the Arrow vectors from the ResultSet, and the calendar is
+ * used to define Arrow Timestamp fields, and to read time-based fields from the JDBC
+ * ResultSet.
*
- * @param allocator The memory allocator to construct the Arrow vectors with.
- * @param calendar The calendar to use when constructing Timestamp fields and reading time-based results.
+ * @param allocator The memory allocator to construct the Arrow vectors with.
+ * @param calendar The calendar to use when constructing Timestamp fields and reading time-based
+ * results.
*/
JdbcToArrowConfig(BufferAllocator allocator, Calendar calendar) {
- this(allocator, calendar,
+ this(
+ allocator,
+ calendar,
/* include metadata */ false,
/* reuse vector schema root */ false,
/* array sub-types by column index */ null,
/* array sub-types by column name */ null,
- DEFAULT_TARGET_BATCH_SIZE, null, null);
+ DEFAULT_TARGET_BATCH_SIZE,
+ null,
+ null);
}
JdbcToArrowConfig(
- BufferAllocator allocator,
- Calendar calendar,
- boolean includeMetadata,
- boolean reuseVectorSchemaRoot,
- Map arraySubTypesByColumnIndex,
- Map arraySubTypesByColumnName,
- int targetBatchSize,
- Function jdbcToArrowTypeConverter) {
- this(allocator, calendar, includeMetadata, reuseVectorSchemaRoot, arraySubTypesByColumnIndex,
- arraySubTypesByColumnName, targetBatchSize, jdbcToArrowTypeConverter, null);
+ BufferAllocator allocator,
+ Calendar calendar,
+ boolean includeMetadata,
+ boolean reuseVectorSchemaRoot,
+ Map arraySubTypesByColumnIndex,
+ Map arraySubTypesByColumnName,
+ int targetBatchSize,
+ Function jdbcToArrowTypeConverter) {
+ this(
+ allocator,
+ calendar,
+ includeMetadata,
+ reuseVectorSchemaRoot,
+ arraySubTypesByColumnIndex,
+ arraySubTypesByColumnName,
+ targetBatchSize,
+ jdbcToArrowTypeConverter,
+ null);
}
/**
- * Constructs a new configuration from the provided allocator and calendar. The allocator
- * is used when constructing the Arrow vectors from the ResultSet, and the calendar is used to define
- * Arrow Timestamp fields, and to read time-based fields from the JDBC ResultSet.
+ * Constructs a new configuration from the provided allocator and calendar. The allocator
+ * is used when constructing the Arrow vectors from the ResultSet, and the calendar is
+ * used to define Arrow Timestamp fields, and to read time-based fields from the JDBC
+ * ResultSet.
*
- * @param allocator The memory allocator to construct the Arrow vectors with.
- * @param calendar The calendar to use when constructing Timestamp fields and reading time-based results.
- * @param includeMetadata Whether to include JDBC field metadata in the Arrow Schema Field metadata.
+ * @param allocator The memory allocator to construct the Arrow vectors with.
+ * @param calendar The calendar to use when constructing Timestamp fields and reading time-based
+ * results.
+ * @param includeMetadata Whether to include JDBC field metadata in the Arrow Schema Field
+ * metadata.
* @param reuseVectorSchemaRoot Whether to reuse the vector schema root for each data load.
* @param arraySubTypesByColumnIndex The type of the JDBC array at the column index (1-based).
- * @param arraySubTypesByColumnName The type of the JDBC array at the column name.
- * @param targetBatchSize The target batch size to be used in preallocation of the resulting vectors.
- * @param jdbcToArrowTypeConverter The function that maps JDBC field type information to arrow type. If set to null,
- * the default mapping will be used, which is defined as:
- *
- *
CHAR --> ArrowType.Utf8
- *
NCHAR --> ArrowType.Utf8
- *
VARCHAR --> ArrowType.Utf8
- *
NVARCHAR --> ArrowType.Utf8
- *
LONGVARCHAR --> ArrowType.Utf8
- *
LONGNVARCHAR --> ArrowType.Utf8
- *
NUMERIC --> ArrowType.Decimal(precision, scale)
- *
DECIMAL --> ArrowType.Decimal(precision, scale)
- *
BIT --> ArrowType.Bool
- *
TINYINT --> ArrowType.Int(8, signed)
- *
SMALLINT --> ArrowType.Int(16, signed)
- *
INTEGER --> ArrowType.Int(32, signed)
- *
BIGINT --> ArrowType.Int(64, signed)
- *
REAL --> ArrowType.FloatingPoint(FloatingPointPrecision.SINGLE)
- * @param bigDecimalRoundingMode The java.math.RoundingMode to be used in coercion of a BigDecimal from a
- * ResultSet having a scale which does not match that of the target vector. Use null
- * (default value) to require strict scale matching.
+ * @param arraySubTypesByColumnName The type of the JDBC array at the column name.
+ * @param targetBatchSize The target batch size to be used in preallocation of the resulting
+ * vectors.
+ * @param jdbcToArrowTypeConverter The function that maps JDBC field type information to arrow
+ * type. If set to null, the default mapping will be used, which is defined as:
+ *
+ *
+ * @param bigDecimalRoundingMode The java.math.RoundingMode to be used in coercion of a BigDecimal
+ * from a ResultSet having a scale which does not match that of the target vector. Use null
+ * (default value) to require strict scale matching.
*/
JdbcToArrowConfig(
BufferAllocator allocator,
@@ -245,16 +260,19 @@ public final class JdbcToArrowConfig {
this.bigDecimalRoundingMode = bigDecimalRoundingMode;
// set up type converter
- this.jdbcToArrowTypeConverter = jdbcToArrowTypeConverter != null ? jdbcToArrowTypeConverter :
- (jdbcFieldInfo) -> JdbcToArrowUtils.getArrowTypeFromJdbcType(jdbcFieldInfo, calendar);
+ this.jdbcToArrowTypeConverter =
+ jdbcToArrowTypeConverter != null
+ ? jdbcToArrowTypeConverter
+ : (jdbcFieldInfo) -> JdbcToArrowUtils.getArrowTypeFromJdbcType(jdbcFieldInfo, calendar);
- this.jdbcConsumerGetter = jdbcConsumerGetter != null ? jdbcConsumerGetter : JdbcToArrowUtils::getConsumer;
+ this.jdbcConsumerGetter =
+ jdbcConsumerGetter != null ? jdbcConsumerGetter : JdbcToArrowUtils::getConsumer;
}
/**
- * The calendar to use when defining Arrow Timestamp fields
- * and retrieving {@link java.sql.Date}, {@link java.sql.Time}, or {@link java.sql.Timestamp}
- * data types from the {@link java.sql.ResultSet}, or null if not converting.
+ * The calendar to use when defining Arrow Timestamp fields and retrieving {@link java.sql.Date},
+ * {@link java.sql.Time}, or {@link java.sql.Timestamp} data types from the {@link
+ * java.sql.ResultSet}, or null if not converting.
*
* @return the calendar.
*/
@@ -280,30 +298,22 @@ public boolean shouldIncludeMetadata() {
return includeMetadata;
}
- /**
- * Get the target batch size for partial read.
- */
+ /** Get the target batch size for partial read. */
public int getTargetBatchSize() {
return targetBatchSize;
}
- /**
- * Get whether it is allowed to reuse the vector schema root.
- */
+ /** Get whether it is allowed to reuse the vector schema root. */
public boolean isReuseVectorSchemaRoot() {
return reuseVectorSchemaRoot;
}
- /**
- * Gets the mapping between JDBC type information to Arrow type.
- */
+ /** Gets the mapping between JDBC type information to Arrow type. */
public Function getJdbcToArrowTypeConverter() {
return jdbcToArrowTypeConverter;
}
- /**
- * Gets the JDBC consumer getter.
- */
+ /** Gets the JDBC consumer getter. */
public JdbcConsumerFactory getJdbcConsumerGetter() {
return jdbcConsumerGetter;
}
@@ -311,8 +321,10 @@ public JdbcConsumerFactory getJdbcConsumerGetter() {
/**
* Returns the array sub-type {@link JdbcFieldInfo} defined for the provided column index.
*
- * @param index The {@link java.sql.ResultSetMetaData} column index of an {@link java.sql.Types#ARRAY} type.
- * @return The {@link JdbcFieldInfo} for that array's sub-type, or null if not defined.
+ * @param index The {@link java.sql.ResultSetMetaData} column index of an {@link
+ * java.sql.Types#ARRAY} type.
+ * @return The {@link JdbcFieldInfo} for that array's sub-type, or null if not
+ * defined.
*/
public JdbcFieldInfo getArraySubTypeByColumnIndex(int index) {
if (arraySubTypesByColumnIndex == null) {
@@ -325,8 +337,10 @@ public JdbcFieldInfo getArraySubTypeByColumnIndex(int index) {
/**
* Returns the array sub-type {@link JdbcFieldInfo} defined for the provided column name.
*
- * @param name The {@link java.sql.ResultSetMetaData} column name of an {@link java.sql.Types#ARRAY} type.
- * @return The {@link JdbcFieldInfo} for that array's sub-type, or null if not defined.
+ * @param name The {@link java.sql.ResultSetMetaData} column name of an {@link
+ * java.sql.Types#ARRAY} type.
+ * @return The {@link JdbcFieldInfo} for that array's sub-type, or null if not
+ * defined.
*/
public JdbcFieldInfo getArraySubTypeByColumnName(String name) {
if (arraySubTypesByColumnName == null) {
@@ -339,7 +353,8 @@ public JdbcFieldInfo getArraySubTypeByColumnName(String name) {
/**
* Returns the type {@link JdbcFieldInfo} explicitly defined for the provided column index.
*
- * @param index The {@link java.sql.ResultSetMetaData} column index to evaluate for explicit type mapping.
+ * @param index The {@link java.sql.ResultSetMetaData} column index to evaluate for explicit type
+ * mapping.
* @return The {@link JdbcFieldInfo} defined for the column, or null if not defined.
*/
public JdbcFieldInfo getExplicitTypeByColumnIndex(int index) {
@@ -353,7 +368,8 @@ public JdbcFieldInfo getExplicitTypeByColumnIndex(int index) {
/**
* Returns the type {@link JdbcFieldInfo} explicitly defined for the provided column name.
*
- * @param name The {@link java.sql.ResultSetMetaData} column name to evaluate for explicit type mapping.
+ * @param name The {@link java.sql.ResultSetMetaData} column name to evaluate for explicit type
+ * mapping.
* @return The {@link JdbcFieldInfo} defined for the column, or null if not defined.
*/
public JdbcFieldInfo getExplicitTypeByColumnName(String name) {
@@ -364,17 +380,12 @@ public JdbcFieldInfo getExplicitTypeByColumnName(String name) {
}
}
- /**
- * Return schema level metadata or null if not provided.
- */
+ /** Return schema level metadata or null if not provided. */
public Map getSchemaMetadata() {
return schemaMetadata;
}
- /**
- * Return metadata from columnIndex->meta map on per field basis
- * or null if not provided.
- */
+ /** Return metadata from columnIndex->meta map on per field basis or null if not provided. */
public Map> getColumnMetadataByColumnIndex() {
return columnMetadataByColumnIndex;
}
@@ -383,12 +394,14 @@ public RoundingMode getBigDecimalRoundingMode() {
return bigDecimalRoundingMode;
}
- /**
- * Interface for a function that gets a JDBC consumer for the given values.
- */
+ /** Interface for a function that gets a JDBC consumer for the given values. */
@FunctionalInterface
public interface JdbcConsumerFactory {
- JdbcConsumer apply(ArrowType arrowType, int columnIndex, boolean nullable, FieldVector vector,
- JdbcToArrowConfig config);
+ JdbcConsumer apply(
+ ArrowType arrowType,
+ int columnIndex,
+ boolean nullable,
+ FieldVector vector,
+ JdbcToArrowConfig config);
}
}
diff --git a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/JdbcToArrowConfigBuilder.java b/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/JdbcToArrowConfigBuilder.java
index 7d88c238320..cef26b27b7a 100644
--- a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/JdbcToArrowConfigBuilder.java
+++ b/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/JdbcToArrowConfigBuilder.java
@@ -23,15 +23,12 @@
import java.util.Calendar;
import java.util.Map;
import java.util.function.Function;
-
import org.apache.arrow.memory.BufferAllocator;
import org.apache.arrow.util.Preconditions;
import org.apache.arrow.vector.FieldVector;
import org.apache.arrow.vector.types.pojo.ArrowType;
-/**
- * This class builds {@link JdbcToArrowConfig}s.
- */
+/** This class builds {@link JdbcToArrowConfig}s. */
public class JdbcToArrowConfigBuilder {
private Calendar calendar;
private BufferAllocator allocator;
@@ -49,9 +46,9 @@ public class JdbcToArrowConfigBuilder {
private RoundingMode bigDecimalRoundingMode;
/**
- * Default constructor for the JdbcToArrowConfigBuilder}.
- * Use the setter methods for the allocator and calendar; the allocator must be
- * set. Otherwise, {@link #build()} will throw a {@link NullPointerException}.
+ * Default constructor for the JdbcToArrowConfigBuilder}. Use the setter methods for
+ * the allocator and calendar; the allocator must be set. Otherwise, {@link #build()} will throw a
+ * {@link NullPointerException}.
*/
public JdbcToArrowConfigBuilder() {
this.allocator = null;
@@ -68,16 +65,13 @@ public JdbcToArrowConfigBuilder() {
}
/**
- * Constructor for the JdbcToArrowConfigBuilder. The
- * allocator is required, and a {@link NullPointerException}
- * will be thrown if it is null.
- *
- * The allocator is used to construct Arrow vectors from the JDBC ResultSet.
- * The calendar is used to determine the time zone of {@link java.sql.Timestamp}
- * fields and convert {@link java.sql.Date}, {@link java.sql.Time}, and
- * {@link java.sql.Timestamp} fields to a single, common time zone when reading
- * from the result set.
- *
+ * Constructor for the JdbcToArrowConfigBuilder. The allocator is required, and a
+ * {@link NullPointerException} will be thrown if it is null.
+ *
+ *
The allocator is used to construct Arrow vectors from the JDBC ResultSet. The calendar is
+ * used to determine the time zone of {@link java.sql.Timestamp} fields and convert {@link
+ * java.sql.Date}, {@link java.sql.Time}, and {@link java.sql.Timestamp} fields to a single,
+ * common time zone when reading from the result set.
*
* @param allocator The Arrow Vector memory allocator.
* @param calendar The calendar to use when constructing timestamp fields.
@@ -95,26 +89,23 @@ public JdbcToArrowConfigBuilder(BufferAllocator allocator, Calendar calendar) {
}
/**
- * Constructor for the JdbcToArrowConfigBuilder. Both the
- * allocator and calendar are required. A {@link NullPointerException}
- * will be thrown if either of those arguments is null.
- *
- * The allocator is used to construct Arrow vectors from the JDBC ResultSet.
- * The calendar is used to determine the time zone of {@link java.sql.Timestamp}
- * fields and convert {@link java.sql.Date}, {@link java.sql.Time}, and
- * {@link java.sql.Timestamp} fields to a single, common time zone when reading
- * from the result set.
- *
- *
- * The includeMetadata argument, if true will cause
- * various information about each database field to be added to the Vector
- * Schema's field metadata.
- *
+ * Constructor for the JdbcToArrowConfigBuilder. Both the allocator and calendar are
+ * required. A {@link NullPointerException} will be thrown if either of those arguments is
+ * null.
+ *
+ *
The allocator is used to construct Arrow vectors from the JDBC ResultSet. The calendar is
+ * used to determine the time zone of {@link java.sql.Timestamp} fields and convert {@link
+ * java.sql.Date}, {@link java.sql.Time}, and {@link java.sql.Timestamp} fields to a single,
+ * common time zone when reading from the result set.
+ *
+ *
The includeMetadata argument, if true will cause various
+ * information about each database field to be added to the Vector Schema's field metadata.
*
* @param allocator The Arrow Vector memory allocator.
* @param calendar The calendar to use when constructing timestamp fields.
*/
- public JdbcToArrowConfigBuilder(BufferAllocator allocator, Calendar calendar, boolean includeMetadata) {
+ public JdbcToArrowConfigBuilder(
+ BufferAllocator allocator, Calendar calendar, boolean includeMetadata) {
this(allocator, calendar);
this.includeMetadata = includeMetadata;
}
@@ -132,8 +123,8 @@ public JdbcToArrowConfigBuilder setAllocator(BufferAllocator allocator) {
}
/**
- * Sets the {@link Calendar} to use when constructing timestamp fields in the
- * Arrow schema, and reading time-based fields from the JDBC ResultSet.
+ * Sets the {@link Calendar} to use when constructing timestamp fields in the Arrow schema, and
+ * reading time-based fields from the JDBC ResultSet.
*
* @param calendar the calendar to set.
*/
@@ -145,7 +136,8 @@ public JdbcToArrowConfigBuilder setCalendar(Calendar calendar) {
/**
* Sets whether to include JDBC ResultSet field metadata in the Arrow Schema field metadata.
*
- * @param includeMetadata Whether to include or exclude JDBC metadata in the Arrow Schema field metadata.
+ * @param includeMetadata Whether to include or exclude JDBC metadata in the Arrow Schema field
+ * metadata.
* @return This instance of the JdbcToArrowConfig, for chaining.
*/
public JdbcToArrowConfigBuilder setIncludeMetadata(boolean includeMetadata) {
@@ -154,8 +146,8 @@ public JdbcToArrowConfigBuilder setIncludeMetadata(boolean includeMetadata) {
}
/**
- * Sets the mapping of column-index-to-{@link JdbcFieldInfo} used for columns of type {@link java.sql.Types#ARRAY}.
- * The column index is 1-based, to match the JDBC column index.
+ * Sets the mapping of column-index-to-{@link JdbcFieldInfo} used for columns of type {@link
+ * java.sql.Types#ARRAY}. The column index is 1-based, to match the JDBC column index.
*
* @param map The mapping.
* @return This instance of the JdbcToArrowConfig, for chaining.
@@ -166,7 +158,8 @@ public JdbcToArrowConfigBuilder setArraySubTypeByColumnIndexMap(MapJdbcToArrowConfig, for chaining.
@@ -178,11 +171,12 @@ public JdbcToArrowConfigBuilder setArraySubTypeByColumnNameMap(Map
- * This can be useful to override type information from JDBC drivers that provide incomplete type info,
- * e.g. DECIMAL with precision = scale = 0.
- *
- * The column index is 1-based, to match the JDBC column index.
+ *
+ *
This can be useful to override type information from JDBC drivers that provide incomplete
+ * type info, e.g. DECIMAL with precision = scale = 0.
+ *
+ *
The column index is 1-based, to match the JDBC column index.
+ *
* @param map The mapping.
*/
public JdbcToArrowConfigBuilder setExplicitTypesByColumnIndex(Map map) {
@@ -192,9 +186,10 @@ public JdbcToArrowConfigBuilder setExplicitTypesByColumnIndex(Map
- * This can be useful to override type information from JDBC drivers that provide incomplete type info,
- * e.g. DECIMAL with precision = scale = 0.
+ *
+ *
This can be useful to override type information from JDBC drivers that provide incomplete
+ * type info, e.g. DECIMAL with precision = scale = 0.
+ *
* @param map The mapping.
*/
public JdbcToArrowConfigBuilder setExplicitTypesByColumnName(Map map) {
@@ -204,8 +199,8 @@ public JdbcToArrowConfigBuilder setExplicitTypesByColumnName(Map
- * Use {@link JdbcToArrowConfig#NO_LIMIT_BATCH_SIZE} to read all rows at once.
+ *
+ *
Use {@link JdbcToArrowConfig#NO_LIMIT_BATCH_SIZE} to read all rows at once.
*/
public JdbcToArrowConfigBuilder setTargetBatchSize(int targetBatchSize) {
this.targetBatchSize = targetBatchSize;
@@ -214,8 +209,9 @@ public JdbcToArrowConfigBuilder setTargetBatchSize(int targetBatchSize) {
/**
* Set the function used to convert JDBC types to Arrow types.
- *
Defaults to wrapping {@link JdbcToArrowUtils#getArrowTypeFromJdbcType(JdbcFieldInfo,
+ * Calendar)}.
*/
public JdbcToArrowConfigBuilder setJdbcToArrowTypeConverter(
Function jdbcToArrowTypeConverter) {
@@ -225,9 +221,9 @@ public JdbcToArrowConfigBuilder setJdbcToArrowTypeConverter(
/**
* Set the function used to get a JDBC consumer for a given type.
- *
Defaults to wrapping {@link JdbcToArrowUtils#getConsumer(ArrowType, Integer, Boolean,
+ * FieldVector, JdbcToArrowConfig)}.
*/
public JdbcToArrowConfigBuilder setJdbcConsumerGetter(
JdbcToArrowConfig.JdbcConsumerFactory jdbcConsumerGetter) {
@@ -236,35 +232,32 @@ public JdbcToArrowConfigBuilder setJdbcConsumerGetter(
}
/**
- * Set whether to use the same {@link org.apache.arrow.vector.VectorSchemaRoot} instance on each iteration,
- * or to allocate a new one.
+ * Set whether to use the same {@link org.apache.arrow.vector.VectorSchemaRoot} instance on each
+ * iteration, or to allocate a new one.
*/
public JdbcToArrowConfigBuilder setReuseVectorSchemaRoot(boolean reuseVectorSchemaRoot) {
this.reuseVectorSchemaRoot = reuseVectorSchemaRoot;
return this;
}
- /**
- * Set metadata for schema.
- */
+ /** Set metadata for schema. */
public JdbcToArrowConfigBuilder setSchemaMetadata(Map schemaMetadata) {
this.schemaMetadata = schemaMetadata;
return this;
}
- /**
- * Set metadata from columnIndex->meta map on per field basis.
- */
+ /** Set metadata from columnIndex->meta map on per field basis. */
public JdbcToArrowConfigBuilder setColumnMetadataByColumnIndex(
- Map> columnMetadataByColumnIndex) {
+ Map> columnMetadataByColumnIndex) {
this.columnMetadataByColumnIndex = columnMetadataByColumnIndex;
return this;
}
/**
- * Set the rounding mode used when the scale of the actual value does not match the declared scale.
- *
- * By default, an error is raised in such cases.
+ * Set the rounding mode used when the scale of the actual value does not match the declared
+ * scale.
+ *
+ *
By default, an error is raised in such cases.
*/
public JdbcToArrowConfigBuilder setBigDecimalRoundingMode(RoundingMode bigDecimalRoundingMode) {
this.bigDecimalRoundingMode = bigDecimalRoundingMode;
@@ -272,8 +265,8 @@ public JdbcToArrowConfigBuilder setBigDecimalRoundingMode(RoundingMode bigDecima
}
/**
- * This builds the {@link JdbcToArrowConfig} from the provided
- * {@link BufferAllocator} and {@link Calendar}.
+ * This builds the {@link JdbcToArrowConfig} from the provided {@link BufferAllocator} and {@link
+ * Calendar}.
*
* @return The built {@link JdbcToArrowConfig}
* @throws NullPointerException if either the allocator or calendar was not set.
diff --git a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/JdbcToArrowUtils.java b/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/JdbcToArrowUtils.java
index eaee4993607..959083136a7 100644
--- a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/JdbcToArrowUtils.java
+++ b/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/JdbcToArrowUtils.java
@@ -38,7 +38,6 @@
import java.util.Locale;
import java.util.Map;
import java.util.TimeZone;
-
import org.apache.arrow.adapter.jdbc.consumer.ArrayConsumer;
import org.apache.arrow.adapter.jdbc.consumer.BigIntConsumer;
import org.apache.arrow.adapter.jdbc.consumer.BinaryConsumer;
@@ -91,7 +90,8 @@
import org.apache.arrow.vector.util.ValueVectorUtility;
/**
- * Class that does most of the work to convert JDBC ResultSet data into Arrow columnar format Vector objects.
+ * Class that does most of the work to convert JDBC ResultSet data into Arrow columnar format Vector
+ * objects.
*
* @since 0.10.0
*/
@@ -99,9 +99,7 @@ public class JdbcToArrowUtils {
private static final int JDBC_ARRAY_VALUE_COLUMN = 2;
- /**
- * Returns the instance of a {java.util.Calendar} with the UTC time zone and root locale.
- */
+ /** Returns the instance of a {java.util.Calendar} with the UTC time zone and root locale. */
public static Calendar getUtcCalendar() {
return Calendar.getInstance(TimeZone.getTimeZone("UTC"), Locale.ROOT);
}
@@ -114,7 +112,8 @@ public static Calendar getUtcCalendar() {
* @return {@link Schema}
* @throws SQLException on error
*/
- public static Schema jdbcToArrowSchema(ResultSetMetaData rsmd, Calendar calendar) throws SQLException {
+ public static Schema jdbcToArrowSchema(ResultSetMetaData rsmd, Calendar calendar)
+ throws SQLException {
Preconditions.checkNotNull(calendar, "Calendar object can't be null");
return jdbcToArrowSchema(rsmd, new JdbcToArrowConfig(new RootAllocator(0), calendar));
@@ -123,25 +122,28 @@ public static Schema jdbcToArrowSchema(ResultSetMetaData rsmd, Calendar calendar
/**
* Create Arrow {@link Schema} object for the given JDBC {@link ResultSetMetaData}.
*
- * @param parameterMetaData The ResultSetMetaData containing the results, to read the JDBC metadata from.
- * @param calendar The calendar to use the time zone field of, to construct Timestamp fields from.
+ * @param parameterMetaData The ResultSetMetaData containing the results, to read the JDBC
+ * metadata from.
+ * @param calendar The calendar to use the time zone field of, to construct Timestamp fields from.
* @return {@link Schema}
* @throws SQLException on error
*/
- public static Schema jdbcToArrowSchema(final ParameterMetaData parameterMetaData, final Calendar calendar)
- throws SQLException {
+ public static Schema jdbcToArrowSchema(
+ final ParameterMetaData parameterMetaData, final Calendar calendar) throws SQLException {
Preconditions.checkNotNull(calendar, "Calendar object can't be null");
Preconditions.checkNotNull(parameterMetaData);
final List parameterFields = new ArrayList<>(parameterMetaData.getParameterCount());
- for (int parameterCounter = 1; parameterCounter <= parameterMetaData.getParameterCount();
- parameterCounter++) {
+ for (int parameterCounter = 1;
+ parameterCounter <= parameterMetaData.getParameterCount();
+ parameterCounter++) {
final int jdbcDataType = parameterMetaData.getParameterType(parameterCounter);
final int jdbcIsNullable = parameterMetaData.isNullable(parameterCounter);
final boolean arrowIsNullable = jdbcIsNullable != ParameterMetaData.parameterNoNulls;
final int precision = parameterMetaData.getPrecision(parameterCounter);
final int scale = parameterMetaData.getScale(parameterCounter);
- final ArrowType arrowType = getArrowTypeFromJdbcType(new JdbcFieldInfo(jdbcDataType, precision, scale), calendar);
- final FieldType fieldType = new FieldType(arrowIsNullable, arrowType, /*dictionary=*/null);
+ final ArrowType arrowType =
+ getArrowTypeFromJdbcType(new JdbcFieldInfo(jdbcDataType, precision, scale), calendar);
+ final FieldType fieldType = new FieldType(arrowIsNullable, arrowType, /* dictionary= */ null);
parameterFields.add(new Field(null, fieldType, null));
}
@@ -152,10 +154,11 @@ public static Schema jdbcToArrowSchema(final ParameterMetaData parameterMetaData
* Converts the provided JDBC type to its respective {@link ArrowType} counterpart.
*
* @param fieldInfo the {@link JdbcFieldInfo} with information about the original JDBC type.
- * @param calendar the {@link Calendar} to use for datetime data types.
+ * @param calendar the {@link Calendar} to use for datetime data types.
* @return a new {@link ArrowType}.
*/
- public static ArrowType getArrowTypeFromJdbcType(final JdbcFieldInfo fieldInfo, final Calendar calendar) {
+ public static ArrowType getArrowTypeFromJdbcType(
+ final JdbcFieldInfo fieldInfo, final Calendar calendar) {
switch (fieldInfo.getJdbcType()) {
case Types.BOOLEAN:
case Types.BIT:
@@ -222,30 +225,34 @@ public static ArrowType getArrowTypeFromJdbcType(final JdbcFieldInfo fieldInfo,
/**
* Create Arrow {@link Schema} object for the given JDBC {@link java.sql.ResultSetMetaData}.
*
- *
- * If {@link JdbcToArrowConfig#shouldIncludeMetadata()} returns true, the following fields
- * will be added to the {@link FieldType#getMetadata()}:
+ *
If {@link JdbcToArrowConfig#shouldIncludeMetadata()} returns true, the
+ * following fields will be added to the {@link FieldType#getMetadata()}:
+ *
*
- * If any columns are of type {@link java.sql.Types#ARRAY}, the configuration object will be used to look up
- * the array sub-type field. The {@link JdbcToArrowConfig#getArraySubTypeByColumnIndex(int)} method will be
- * checked first, followed by the {@link JdbcToArrowConfig#getArraySubTypeByColumnName(String)} method.
- *
+ *
+ *
If any columns are of type {@link java.sql.Types#ARRAY}, the configuration object will be
+ * used to look up the array sub-type field. The {@link
+ * JdbcToArrowConfig#getArraySubTypeByColumnIndex(int)} method will be checked first, followed by
+ * the {@link JdbcToArrowConfig#getArraySubTypeByColumnName(String)} method.
*
* @param rsmd The ResultSetMetaData containing the results, to read the JDBC metadata from.
* @param config The configuration to use when constructing the schema.
* @return {@link Schema}
* @throws SQLException on error
- * @throws IllegalArgumentException if rsmd contains an {@link java.sql.Types#ARRAY} but the
- * config does not have a sub-type definition for it.
+ * @throws IllegalArgumentException if rsmd contains an {@link java.sql.Types#ARRAY}
+ * but the config does not have a sub-type definition for it.
*/
- public static Schema jdbcToArrowSchema(ResultSetMetaData rsmd, JdbcToArrowConfig config) throws SQLException {
+ public static Schema jdbcToArrowSchema(ResultSetMetaData rsmd, JdbcToArrowConfig config)
+ throws SQLException {
Preconditions.checkNotNull(rsmd, "JDBC ResultSetMetaData object can't be null");
Preconditions.checkNotNull(config, "The configuration object must not be null");
@@ -254,8 +261,10 @@ public static Schema jdbcToArrowSchema(ResultSetMetaData rsmd, JdbcToArrowConfig
for (int i = 1; i <= columnCount; i++) {
final String columnName = rsmd.getColumnLabel(i);
- final Map columnMetadata = config.getColumnMetadataByColumnIndex() != null ?
- config.getColumnMetadataByColumnIndex().get(i) : null;
+ final Map columnMetadata =
+ config.getColumnMetadataByColumnIndex() != null
+ ? config.getColumnMetadataByColumnIndex().get(i)
+ : null;
final Map metadata;
if (config.shouldIncludeMetadata()) {
metadata = new HashMap<>();
@@ -278,14 +287,19 @@ public static Schema jdbcToArrowSchema(ResultSetMetaData rsmd, JdbcToArrowConfig
final JdbcFieldInfo columnFieldInfo = getJdbcFieldInfoForColumn(rsmd, i, config);
final ArrowType arrowType = config.getJdbcToArrowTypeConverter().apply(columnFieldInfo);
if (arrowType != null) {
- final FieldType fieldType = new FieldType(
- isColumnNullable(rsmd, i, columnFieldInfo), arrowType, /* dictionary encoding */ null, metadata);
+ final FieldType fieldType =
+ new FieldType(
+ isColumnNullable(rsmd, i, columnFieldInfo),
+ arrowType, /* dictionary encoding */
+ null,
+ metadata);
List children = null;
if (arrowType.getTypeID() == ArrowType.List.TYPE_TYPE) {
final JdbcFieldInfo arrayFieldInfo = getJdbcFieldInfoForArraySubType(rsmd, i, config);
if (arrayFieldInfo == null) {
- throw new IllegalArgumentException("Configuration does not provide a mapping for array column " + i);
+ throw new IllegalArgumentException(
+ "Configuration does not provide a mapping for array column " + i);
}
children = new ArrayList();
final ArrowType childType = config.getJdbcToArrowTypeConverter().apply(arrayFieldInfo);
@@ -295,9 +309,13 @@ public static Schema jdbcToArrowSchema(ResultSetMetaData rsmd, JdbcToArrowConfig
FieldType keyType = new FieldType(false, new ArrowType.Utf8(), null, null);
FieldType valueType = new FieldType(false, new ArrowType.Utf8(), null, null);
children = new ArrayList<>();
- children.add(new Field("child", mapType,
- Arrays.asList(new Field(MapVector.KEY_NAME, keyType, null),
- new Field(MapVector.VALUE_NAME, valueType, null))));
+ children.add(
+ new Field(
+ "child",
+ mapType,
+ Arrays.asList(
+ new Field(MapVector.KEY_NAME, keyType, null),
+ new Field(MapVector.VALUE_NAME, valueType, null))));
}
fields.add(new Field(columnName, fieldType, children));
@@ -307,18 +325,14 @@ public static Schema jdbcToArrowSchema(ResultSetMetaData rsmd, JdbcToArrowConfig
}
static JdbcFieldInfo getJdbcFieldInfoForColumn(
- ResultSetMetaData rsmd,
- int arrayColumn,
- JdbcToArrowConfig config)
- throws SQLException {
+ ResultSetMetaData rsmd, int arrayColumn, JdbcToArrowConfig config) throws SQLException {
Preconditions.checkNotNull(rsmd, "ResultSet MetaData object cannot be null");
Preconditions.checkNotNull(config, "Configuration must not be null");
Preconditions.checkArgument(
- arrayColumn > 0,
- "ResultSetMetaData columns start with 1; column cannot be less than 1");
+ arrayColumn > 0, "ResultSetMetaData columns start with 1; column cannot be less than 1");
Preconditions.checkArgument(
- arrayColumn <= rsmd.getColumnCount(),
- "Column number cannot be more than the number of columns");
+ arrayColumn <= rsmd.getColumnCount(),
+ "Column number cannot be more than the number of columns");
JdbcFieldInfo fieldInfo = config.getExplicitTypeByColumnIndex(arrayColumn);
if (fieldInfo == null) {
@@ -334,16 +348,12 @@ static JdbcFieldInfo getJdbcFieldInfoForColumn(
* If no sub-type can be found, returns null.
*/
private static JdbcFieldInfo getJdbcFieldInfoForArraySubType(
- ResultSetMetaData rsmd,
- int arrayColumn,
- JdbcToArrowConfig config)
- throws SQLException {
+ ResultSetMetaData rsmd, int arrayColumn, JdbcToArrowConfig config) throws SQLException {
Preconditions.checkNotNull(rsmd, "ResultSet MetaData object cannot be null");
Preconditions.checkNotNull(config, "Configuration must not be null");
Preconditions.checkArgument(
- arrayColumn > 0,
- "ResultSetMetaData columns start with 1; column cannot be less than 1");
+ arrayColumn > 0, "ResultSetMetaData columns start with 1; column cannot be less than 1");
Preconditions.checkArgument(
arrayColumn <= rsmd.getColumnCount(),
"Column number cannot be more than the number of columns");
@@ -359,10 +369,10 @@ private static JdbcFieldInfo getJdbcFieldInfoForArraySubType(
* Iterate the given JDBC {@link ResultSet} object to fetch the data and transpose it to populate
* the given Arrow Vector objects.
*
- * @param rs ResultSet to use to fetch the data from underlying database
- * @param root Arrow {@link VectorSchemaRoot} object to populate
- * @param calendar The calendar to use when reading {@link Date}, {@link Time}, or {@link Timestamp}
- * data types from the {@link ResultSet}, or null if not converting.
+ * @param rs ResultSet to use to fetch the data from underlying database
+ * @param root Arrow {@link VectorSchemaRoot} object to populate
+ * @param calendar The calendar to use when reading {@link Date}, {@link Time}, or {@link
+ * Timestamp} data types from the {@link ResultSet}, or null if not converting.
* @throws SQLException on error
*/
public static void jdbcToArrowVectors(ResultSet rs, VectorSchemaRoot root, Calendar calendar)
@@ -373,29 +383,30 @@ public static void jdbcToArrowVectors(ResultSet rs, VectorSchemaRoot root, Calen
jdbcToArrowVectors(rs, root, new JdbcToArrowConfig(new RootAllocator(0), calendar));
}
- static boolean isColumnNullable(ResultSetMetaData resultSetMetadata, int index, JdbcFieldInfo info)
- throws SQLException {
+ static boolean isColumnNullable(
+ ResultSetMetaData resultSetMetadata, int index, JdbcFieldInfo info) throws SQLException {
int nullableValue;
if (info != null && info.isNullable() != ResultSetMetaData.columnNullableUnknown) {
nullableValue = info.isNullable();
} else {
nullableValue = resultSetMetadata.isNullable(index);
}
- return nullableValue == ResultSetMetaData.columnNullable ||
- nullableValue == ResultSetMetaData.columnNullableUnknown;
+ return nullableValue == ResultSetMetaData.columnNullable
+ || nullableValue == ResultSetMetaData.columnNullableUnknown;
}
/**
* Iterate the given JDBC {@link ResultSet} object to fetch the data and transpose it to populate
* the given Arrow Vector objects.
*
- * @param rs ResultSet to use to fetch the data from underlying database
- * @param root Arrow {@link VectorSchemaRoot} object to populate
+ * @param rs ResultSet to use to fetch the data from underlying database
+ * @param root Arrow {@link VectorSchemaRoot} object to populate
* @param config The configuration to use when reading the data.
* @throws SQLException on error
* @throws JdbcConsumerException on error from VectorConsumer
*/
- public static void jdbcToArrowVectors(ResultSet rs, VectorSchemaRoot root, JdbcToArrowConfig config)
+ public static void jdbcToArrowVectors(
+ ResultSet rs, VectorSchemaRoot root, JdbcToArrowConfig config)
throws SQLException, IOException {
ResultSetMetaData rsmd = rs.getMetaData();
@@ -405,8 +416,13 @@ public static void jdbcToArrowVectors(ResultSet rs, VectorSchemaRoot root, JdbcT
for (int i = 1; i <= columnCount; i++) {
FieldVector vector = root.getVector(rsmd.getColumnLabel(i));
final JdbcFieldInfo columnFieldInfo = getJdbcFieldInfoForColumn(rsmd, i, config);
- consumers[i - 1] = getConsumer(
- vector.getField().getType(), i, isColumnNullable(rsmd, i, columnFieldInfo), vector, config);
+ consumers[i - 1] =
+ getConsumer(
+ vector.getField().getType(),
+ i,
+ isColumnNullable(rsmd, i, columnFieldInfo),
+ vector,
+ config);
}
CompositeJdbcConsumer compositeConsumer = null;
@@ -439,18 +455,22 @@ public static void jdbcToArrowVectors(ResultSet rs, VectorSchemaRoot root, JdbcT
}
/**
- * Default function used for JdbcConsumerFactory. This function gets a JdbcConsumer for the
- * given column based on the Arrow type and provided vector.
+ * Default function used for JdbcConsumerFactory. This function gets a JdbcConsumer for the given
+ * column based on the Arrow type and provided vector.
*
- * @param arrowType Arrow type for the column.
+ * @param arrowType Arrow type for the column.
* @param columnIndex Column index to fetch from the ResultSet
- * @param nullable Whether the value is nullable or not
- * @param vector Vector to store the consumed value
- * @param config Associated JdbcToArrowConfig, used mainly for the Calendar.
+ * @param nullable Whether the value is nullable or not
+ * @param vector Vector to store the consumed value
+ * @param config Associated JdbcToArrowConfig, used mainly for the Calendar.
* @return {@link JdbcConsumer}
*/
- public static JdbcConsumer getConsumer(ArrowType arrowType, int columnIndex, boolean nullable,
- FieldVector vector, JdbcToArrowConfig config) {
+ public static JdbcConsumer getConsumer(
+ ArrowType arrowType,
+ int columnIndex,
+ boolean nullable,
+ FieldVector vector,
+ JdbcToArrowConfig config) {
final Calendar calendar = config.getCalendar();
switch (arrowType.getTypeID()) {
@@ -472,10 +492,11 @@ public static JdbcConsumer getConsumer(ArrowType arrowType, int columnIndex, boo
case Decimal:
final RoundingMode bigDecimalRoundingMode = config.getBigDecimalRoundingMode();
if (((ArrowType.Decimal) arrowType).getBitWidth() == 256) {
- return Decimal256Consumer.createConsumer((Decimal256Vector) vector, columnIndex, nullable,
- bigDecimalRoundingMode);
+ return Decimal256Consumer.createConsumer(
+ (Decimal256Vector) vector, columnIndex, nullable, bigDecimalRoundingMode);
} else {
- return DecimalConsumer.createConsumer((DecimalVector) vector, columnIndex, nullable, bigDecimalRoundingMode);
+ return DecimalConsumer.createConsumer(
+ (DecimalVector) vector, columnIndex, nullable, bigDecimalRoundingMode);
}
case FloatingPoint:
switch (((ArrowType.FloatingPoint) arrowType).getPrecision()) {
@@ -495,17 +516,25 @@ public static JdbcConsumer getConsumer(ArrowType arrowType, int columnIndex, boo
case Date:
return DateConsumer.createConsumer((DateDayVector) vector, columnIndex, nullable, calendar);
case Time:
- return TimeConsumer.createConsumer((TimeMilliVector) vector, columnIndex, nullable, calendar);
+ return TimeConsumer.createConsumer(
+ (TimeMilliVector) vector, columnIndex, nullable, calendar);
case Timestamp:
if (config.getCalendar() == null) {
- return TimestampConsumer.createConsumer((TimeStampMilliVector) vector, columnIndex, nullable);
+ return TimestampConsumer.createConsumer(
+ (TimeStampMilliVector) vector, columnIndex, nullable);
} else {
- return TimestampTZConsumer.createConsumer((TimeStampMilliTZVector) vector, columnIndex, nullable, calendar);
+ return TimestampTZConsumer.createConsumer(
+ (TimeStampMilliTZVector) vector, columnIndex, nullable, calendar);
}
case List:
FieldVector childVector = ((ListVector) vector).getDataVector();
- JdbcConsumer delegate = getConsumer(childVector.getField().getType(), JDBC_ARRAY_VALUE_COLUMN,
- childVector.getField().isNullable(), childVector, config);
+ JdbcConsumer delegate =
+ getConsumer(
+ childVector.getField().getType(),
+ JDBC_ARRAY_VALUE_COLUMN,
+ childVector.getField().isNullable(),
+ childVector,
+ config);
return ArrayConsumer.createConsumer((ListVector) vector, delegate, columnIndex, nullable);
case Map:
return MapConsumer.createConsumer((MapVector) vector, columnIndex, nullable);
diff --git a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/binder/BaseColumnBinder.java b/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/binder/BaseColumnBinder.java
index f24f409072c..e1df0041982 100644
--- a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/binder/BaseColumnBinder.java
+++ b/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/binder/BaseColumnBinder.java
@@ -21,6 +21,7 @@
/**
* Base class for ColumnBinder implementations.
+ *
* @param The concrete FieldVector subtype.
*/
public abstract class BaseColumnBinder implements ColumnBinder {
diff --git a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/binder/BigIntBinder.java b/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/binder/BigIntBinder.java
index fde4642ef90..f84a7c3e22b 100644
--- a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/binder/BigIntBinder.java
+++ b/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/binder/BigIntBinder.java
@@ -20,7 +20,6 @@
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Types;
-
import org.apache.arrow.vector.BigIntVector;
/** A column binder for 8-bit integers. */
@@ -34,7 +33,8 @@ public BigIntBinder(BigIntVector vector, int jdbcType) {
}
@Override
- public void bind(PreparedStatement statement, int parameterIndex, int rowIndex) throws SQLException {
+ public void bind(PreparedStatement statement, int parameterIndex, int rowIndex)
+ throws SQLException {
final long value = vector.getDataBuffer().getLong((long) rowIndex * BigIntVector.TYPE_WIDTH);
statement.setLong(parameterIndex, value);
}
diff --git a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/binder/BitBinder.java b/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/binder/BitBinder.java
index adae513e99e..86930dc35cb 100644
--- a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/binder/BitBinder.java
+++ b/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/binder/BitBinder.java
@@ -20,7 +20,6 @@
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Types;
-
import org.apache.arrow.vector.BitVector;
/** A column binder for booleans. */
@@ -34,7 +33,8 @@ public BitBinder(BitVector vector, int jdbcType) {
}
@Override
- public void bind(PreparedStatement statement, int parameterIndex, int rowIndex) throws SQLException {
+ public void bind(PreparedStatement statement, int parameterIndex, int rowIndex)
+ throws SQLException {
// See BitVector#getBit
final int byteIndex = rowIndex >> 3;
final byte b = vector.getDataBuffer().getByte(byteIndex);
diff --git a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/binder/ColumnBinder.java b/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/binder/ColumnBinder.java
index c2b1259e142..0e16b61ef84 100644
--- a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/binder/ColumnBinder.java
+++ b/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/binder/ColumnBinder.java
@@ -19,12 +19,9 @@
import java.sql.PreparedStatement;
import java.sql.SQLException;
-
import org.apache.arrow.vector.FieldVector;
-/**
- * A helper to bind values from a wrapped Arrow vector to a JDBC PreparedStatement.
- */
+/** A helper to bind values from a wrapped Arrow vector to a JDBC PreparedStatement. */
public interface ColumnBinder {
/**
* Bind the given row to the given parameter.
@@ -43,14 +40,10 @@ public interface ColumnBinder {
*/
int getJdbcType();
- /**
- * Get the vector used by this binder.
- */
+ /** Get the vector used by this binder. */
FieldVector getVector();
- /**
- * Create a column binder for a vector, using the default JDBC type code for null values.
- */
+ /** Create a column binder for a vector, using the default JDBC type code for null values. */
static ColumnBinder forVector(FieldVector vector) {
return forVector(vector, /*jdbcType*/ null);
}
@@ -62,7 +55,8 @@ static ColumnBinder forVector(FieldVector vector) {
* @param jdbcType The JDBC type code to use (or null to use the default).
*/
static ColumnBinder forVector(FieldVector vector, Integer jdbcType) {
- final ColumnBinder binder = vector.getField().getType().accept(new ColumnBinderArrowTypeVisitor(vector, jdbcType));
+ final ColumnBinder binder =
+ vector.getField().getType().accept(new ColumnBinderArrowTypeVisitor(vector, jdbcType));
if (vector.getField().isNullable()) {
return new NullableColumnBinder(binder);
}
diff --git a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/binder/ColumnBinderArrowTypeVisitor.java b/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/binder/ColumnBinderArrowTypeVisitor.java
index dc708724043..8dd43f25b66 100644
--- a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/binder/ColumnBinderArrowTypeVisitor.java
+++ b/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/binder/ColumnBinderArrowTypeVisitor.java
@@ -21,7 +21,6 @@
import java.time.ZoneId;
import java.util.Calendar;
import java.util.TimeZone;
-
import org.apache.arrow.vector.BigIntVector;
import org.apache.arrow.vector.BitVector;
import org.apache.arrow.vector.DateDayVector;
@@ -50,8 +49,8 @@
/**
* Visitor to create the base ColumnBinder for a vector.
- *
- * To handle null values, wrap the returned binder in a {@link NullableColumnBinder}.
+ *
+ *
To handle null values, wrap the returned binder in a {@link NullableColumnBinder}.
*/
public class ColumnBinderArrowTypeVisitor implements ArrowType.ArrowTypeVisitor {
private final FieldVector vector;
@@ -111,17 +110,21 @@ public ColumnBinder visit(ArrowType.Int type) {
}
switch (type.getBitWidth()) {
case 8:
- return jdbcType == null ? new TinyIntBinder((TinyIntVector) vector) :
- new TinyIntBinder((TinyIntVector) vector, jdbcType);
+ return jdbcType == null
+ ? new TinyIntBinder((TinyIntVector) vector)
+ : new TinyIntBinder((TinyIntVector) vector, jdbcType);
case 16:
- return jdbcType == null ? new SmallIntBinder((SmallIntVector) vector) :
- new SmallIntBinder((SmallIntVector) vector, jdbcType);
+ return jdbcType == null
+ ? new SmallIntBinder((SmallIntVector) vector)
+ : new SmallIntBinder((SmallIntVector) vector, jdbcType);
case 32:
- return jdbcType == null ? new IntBinder((IntVector) vector) :
- new IntBinder((IntVector) vector, jdbcType);
+ return jdbcType == null
+ ? new IntBinder((IntVector) vector)
+ : new IntBinder((IntVector) vector, jdbcType);
case 64:
- return jdbcType == null ? new BigIntBinder((BigIntVector) vector) :
- new BigIntBinder((BigIntVector) vector, jdbcType);
+ return jdbcType == null
+ ? new BigIntBinder((BigIntVector) vector)
+ : new BigIntBinder((BigIntVector) vector, jdbcType);
default:
throw new UnsupportedOperationException("No column binder implemented for type " + type);
}
@@ -131,11 +134,13 @@ public ColumnBinder visit(ArrowType.Int type) {
public ColumnBinder visit(ArrowType.FloatingPoint type) {
switch (type.getPrecision()) {
case SINGLE:
- return jdbcType == null ? new Float4Binder((Float4Vector) vector) :
- new Float4Binder((Float4Vector) vector, jdbcType);
+ return jdbcType == null
+ ? new Float4Binder((Float4Vector) vector)
+ : new Float4Binder((Float4Vector) vector, jdbcType);
case DOUBLE:
- return jdbcType == null ? new Float8Binder((Float8Vector) vector) :
- new Float8Binder((Float8Vector) vector, jdbcType);
+ return jdbcType == null
+ ? new Float8Binder((Float8Vector) vector)
+ : new Float8Binder((Float8Vector) vector, jdbcType);
default:
throw new UnsupportedOperationException("No column binder implemented for type " + type);
}
@@ -144,51 +149,62 @@ public ColumnBinder visit(ArrowType.FloatingPoint type) {
@Override
public ColumnBinder visit(ArrowType.Utf8 type) {
VarCharVector varChar = (VarCharVector) vector;
- return jdbcType == null ? new VarCharBinder<>(varChar, Types.VARCHAR) :
- new VarCharBinder<>(varChar, jdbcType);
+ return jdbcType == null
+ ? new VarCharBinder<>(varChar, Types.VARCHAR)
+ : new VarCharBinder<>(varChar, jdbcType);
}
@Override
public ColumnBinder visit(ArrowType.LargeUtf8 type) {
LargeVarCharVector varChar = (LargeVarCharVector) vector;
- return jdbcType == null ? new VarCharBinder<>(varChar, Types.LONGVARCHAR) :
- new VarCharBinder<>(varChar, jdbcType);
+ return jdbcType == null
+ ? new VarCharBinder<>(varChar, Types.LONGVARCHAR)
+ : new VarCharBinder<>(varChar, jdbcType);
}
@Override
public ColumnBinder visit(ArrowType.Binary type) {
VarBinaryVector varBinary = (VarBinaryVector) vector;
- return jdbcType == null ? new VarBinaryBinder<>(varBinary, Types.VARBINARY) :
- new VarBinaryBinder<>(varBinary, jdbcType);
+ return jdbcType == null
+ ? new VarBinaryBinder<>(varBinary, Types.VARBINARY)
+ : new VarBinaryBinder<>(varBinary, jdbcType);
}
@Override
public ColumnBinder visit(ArrowType.LargeBinary type) {
LargeVarBinaryVector varBinary = (LargeVarBinaryVector) vector;
- return jdbcType == null ? new VarBinaryBinder<>(varBinary, Types.LONGVARBINARY) :
- new VarBinaryBinder<>(varBinary, jdbcType);
+ return jdbcType == null
+ ? new VarBinaryBinder<>(varBinary, Types.LONGVARBINARY)
+ : new VarBinaryBinder<>(varBinary, jdbcType);
}
@Override
public ColumnBinder visit(ArrowType.FixedSizeBinary type) {
FixedSizeBinaryVector binary = (FixedSizeBinaryVector) vector;
- return jdbcType == null ? new FixedSizeBinaryBinder(binary, Types.BINARY) :
- new FixedSizeBinaryBinder(binary, jdbcType);
+ return jdbcType == null
+ ? new FixedSizeBinaryBinder(binary, Types.BINARY)
+ : new FixedSizeBinaryBinder(binary, jdbcType);
}
@Override
public ColumnBinder visit(ArrowType.Bool type) {
- return jdbcType == null ? new BitBinder((BitVector) vector) : new BitBinder((BitVector) vector, jdbcType);
+ return jdbcType == null
+ ? new BitBinder((BitVector) vector)
+ : new BitBinder((BitVector) vector, jdbcType);
}
@Override
public ColumnBinder visit(ArrowType.Decimal type) {
if (type.getBitWidth() == 128) {
DecimalVector decimalVector = (DecimalVector) vector;
- return jdbcType == null ? new Decimal128Binder(decimalVector) : new Decimal128Binder(decimalVector, jdbcType);
+ return jdbcType == null
+ ? new Decimal128Binder(decimalVector)
+ : new Decimal128Binder(decimalVector, jdbcType);
} else if (type.getBitWidth() == 256) {
Decimal256Vector decimalVector = (Decimal256Vector) vector;
- return jdbcType == null ? new Decimal256Binder(decimalVector) : new Decimal256Binder(decimalVector, jdbcType);
+ return jdbcType == null
+ ? new Decimal256Binder(decimalVector)
+ : new Decimal256Binder(decimalVector, jdbcType);
}
throw new UnsupportedOperationException("No column binder implemented for type " + type);
}
@@ -197,11 +213,13 @@ public ColumnBinder visit(ArrowType.Decimal type) {
public ColumnBinder visit(ArrowType.Date type) {
switch (type.getUnit()) {
case DAY:
- return jdbcType == null ? new DateDayBinder((DateDayVector) vector) :
- new DateDayBinder((DateDayVector) vector, /*calendar*/null, jdbcType);
+ return jdbcType == null
+ ? new DateDayBinder((DateDayVector) vector)
+ : new DateDayBinder((DateDayVector) vector, /*calendar*/ null, jdbcType);
case MILLISECOND:
- return jdbcType == null ? new DateMilliBinder((DateMilliVector) vector) :
- new DateMilliBinder((DateMilliVector) vector, /*calendar*/null, jdbcType);
+ return jdbcType == null
+ ? new DateMilliBinder((DateMilliVector) vector)
+ : new DateMilliBinder((DateMilliVector) vector, /*calendar*/ null, jdbcType);
default:
throw new UnsupportedOperationException("No column binder implemented for type " + type);
}
@@ -211,17 +229,21 @@ public ColumnBinder visit(ArrowType.Date type) {
public ColumnBinder visit(ArrowType.Time type) {
switch (type.getUnit()) {
case SECOND:
- return jdbcType == null ? new Time32Binder((TimeSecVector) vector) :
- new Time32Binder((TimeSecVector) vector, jdbcType);
+ return jdbcType == null
+ ? new Time32Binder((TimeSecVector) vector)
+ : new Time32Binder((TimeSecVector) vector, jdbcType);
case MILLISECOND:
- return jdbcType == null ? new Time32Binder((TimeMilliVector) vector) :
- new Time32Binder((TimeMilliVector) vector, jdbcType);
+ return jdbcType == null
+ ? new Time32Binder((TimeMilliVector) vector)
+ : new Time32Binder((TimeMilliVector) vector, jdbcType);
case MICROSECOND:
- return jdbcType == null ? new Time64Binder((TimeMicroVector) vector) :
- new Time64Binder((TimeMicroVector) vector, jdbcType);
+ return jdbcType == null
+ ? new Time64Binder((TimeMicroVector) vector)
+ : new Time64Binder((TimeMicroVector) vector, jdbcType);
case NANOSECOND:
- return jdbcType == null ? new Time64Binder((TimeNanoVector) vector) :
- new Time64Binder((TimeNanoVector) vector, jdbcType);
+ return jdbcType == null
+ ? new Time64Binder((TimeNanoVector) vector)
+ : new Time64Binder((TimeNanoVector) vector, jdbcType);
default:
throw new UnsupportedOperationException("No column binder implemented for type " + type);
}
diff --git a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/binder/DateDayBinder.java b/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/binder/DateDayBinder.java
index bc16790c8f3..92686d54203 100644
--- a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/binder/DateDayBinder.java
+++ b/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/binder/DateDayBinder.java
@@ -22,12 +22,9 @@
import java.sql.SQLException;
import java.sql.Types;
import java.util.Calendar;
-
import org.apache.arrow.vector.DateDayVector;
-/**
- * A column binder for 32-bit dates.
- */
+/** A column binder for 32-bit dates. */
public class DateDayBinder extends BaseColumnBinder {
private static final long MILLIS_PER_DAY = 86_400_000;
private final Calendar calendar;
@@ -46,7 +43,8 @@ public DateDayBinder(DateDayVector vector, Calendar calendar, int jdbcType) {
}
@Override
- public void bind(PreparedStatement statement, int parameterIndex, int rowIndex) throws SQLException {
+ public void bind(PreparedStatement statement, int parameterIndex, int rowIndex)
+ throws SQLException {
// TODO: multiply with overflow
final long index = (long) rowIndex * DateDayVector.TYPE_WIDTH;
final Date value = new Date(vector.getDataBuffer().getInt(index) * MILLIS_PER_DAY);
diff --git a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/binder/DateMilliBinder.java b/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/binder/DateMilliBinder.java
index 5cb91b46ac1..df2dcc8639d 100644
--- a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/binder/DateMilliBinder.java
+++ b/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/binder/DateMilliBinder.java
@@ -22,12 +22,9 @@
import java.sql.SQLException;
import java.sql.Types;
import java.util.Calendar;
-
import org.apache.arrow.vector.DateMilliVector;
-/**
- * A column binder for 64-bit dates.
- */
+/** A column binder for 64-bit dates. */
public class DateMilliBinder extends BaseColumnBinder {
private final Calendar calendar;
@@ -39,14 +36,14 @@ public DateMilliBinder(DateMilliVector vector, Calendar calendar) {
this(vector, calendar, Types.DATE);
}
-
public DateMilliBinder(DateMilliVector vector, Calendar calendar, int jdbcType) {
super(vector, jdbcType);
this.calendar = calendar;
}
@Override
- public void bind(PreparedStatement statement, int parameterIndex, int rowIndex) throws SQLException {
+ public void bind(PreparedStatement statement, int parameterIndex, int rowIndex)
+ throws SQLException {
final long index = (long) rowIndex * DateMilliVector.TYPE_WIDTH;
final Date value = new Date(vector.getDataBuffer().getLong(index));
if (calendar == null) {
diff --git a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/binder/Decimal128Binder.java b/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/binder/Decimal128Binder.java
index 9e9d0e4fdb2..7f7e8774510 100644
--- a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/binder/Decimal128Binder.java
+++ b/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/binder/Decimal128Binder.java
@@ -21,13 +21,10 @@
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Types;
-
import org.apache.arrow.vector.DecimalVector;
import org.apache.arrow.vector.util.DecimalUtility;
-/**
- * A binder for 128-bit decimals.
- */
+/** A binder for 128-bit decimals. */
public class Decimal128Binder extends BaseColumnBinder {
public Decimal128Binder(DecimalVector vector) {
this(vector, Types.DECIMAL);
@@ -38,9 +35,11 @@ public Decimal128Binder(DecimalVector vector, int jdbcType) {
}
@Override
- public void bind(PreparedStatement statement, int parameterIndex, int rowIndex) throws SQLException {
- final BigDecimal value = DecimalUtility.getBigDecimalFromArrowBuf(
- vector.getDataBuffer(), rowIndex, vector.getScale(), DecimalVector.TYPE_WIDTH);
+ public void bind(PreparedStatement statement, int parameterIndex, int rowIndex)
+ throws SQLException {
+ final BigDecimal value =
+ DecimalUtility.getBigDecimalFromArrowBuf(
+ vector.getDataBuffer(), rowIndex, vector.getScale(), DecimalVector.TYPE_WIDTH);
statement.setBigDecimal(parameterIndex, value);
}
}
diff --git a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/binder/Decimal256Binder.java b/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/binder/Decimal256Binder.java
index bd29e083b45..38de58f76e1 100644
--- a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/binder/Decimal256Binder.java
+++ b/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/binder/Decimal256Binder.java
@@ -21,13 +21,10 @@
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Types;
-
import org.apache.arrow.vector.Decimal256Vector;
import org.apache.arrow.vector.util.DecimalUtility;
-/**
- * A binder for 256-bit decimals.
- */
+/** A binder for 256-bit decimals. */
public class Decimal256Binder extends BaseColumnBinder {
public Decimal256Binder(Decimal256Vector vector) {
this(vector, Types.DECIMAL);
@@ -38,9 +35,11 @@ public Decimal256Binder(Decimal256Vector vector, int jdbcType) {
}
@Override
- public void bind(PreparedStatement statement, int parameterIndex, int rowIndex) throws SQLException {
- final BigDecimal value = DecimalUtility.getBigDecimalFromArrowBuf(
- vector.getDataBuffer(), rowIndex, vector.getScale(), Decimal256Vector.TYPE_WIDTH);
+ public void bind(PreparedStatement statement, int parameterIndex, int rowIndex)
+ throws SQLException {
+ final BigDecimal value =
+ DecimalUtility.getBigDecimalFromArrowBuf(
+ vector.getDataBuffer(), rowIndex, vector.getScale(), Decimal256Vector.TYPE_WIDTH);
statement.setBigDecimal(parameterIndex, value);
}
}
diff --git a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/binder/FixedSizeBinaryBinder.java b/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/binder/FixedSizeBinaryBinder.java
index 7edc5e45329..6f39ffe4db3 100644
--- a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/binder/FixedSizeBinaryBinder.java
+++ b/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/binder/FixedSizeBinaryBinder.java
@@ -19,17 +19,14 @@
import java.sql.PreparedStatement;
import java.sql.SQLException;
-
import org.apache.arrow.vector.FixedSizeBinaryVector;
-/**
- * A binder for fixed-width binary types.
- */
+/** A binder for fixed-width binary types. */
public class FixedSizeBinaryBinder extends BaseColumnBinder {
/**
* Create a binder for the given vector using the given JDBC type for null values.
*
- * @param vector The vector to draw values from.
+ * @param vector The vector to draw values from.
* @param jdbcType The JDBC type code.
*/
public FixedSizeBinaryBinder(FixedSizeBinaryVector vector, int jdbcType) {
@@ -37,9 +34,12 @@ public FixedSizeBinaryBinder(FixedSizeBinaryVector vector, int jdbcType) {
}
@Override
- public void bind(PreparedStatement statement, int parameterIndex, int rowIndex) throws SQLException {
+ public void bind(PreparedStatement statement, int parameterIndex, int rowIndex)
+ throws SQLException {
byte[] binaryData = new byte[vector.getByteWidth()];
- vector.getDataBuffer().getBytes((long) rowIndex * binaryData.length, binaryData, 0, binaryData.length);
+ vector
+ .getDataBuffer()
+ .getBytes((long) rowIndex * binaryData.length, binaryData, 0, binaryData.length);
statement.setBytes(parameterIndex, binaryData);
}
}
diff --git a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/binder/Float4Binder.java b/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/binder/Float4Binder.java
index a471c1ebadd..af3a3b21604 100644
--- a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/binder/Float4Binder.java
+++ b/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/binder/Float4Binder.java
@@ -20,12 +20,9 @@
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Types;
-
import org.apache.arrow.vector.Float4Vector;
-/**
- * A binder for 32-bit floats.
- */
+/** A binder for 32-bit floats. */
public class Float4Binder extends BaseColumnBinder {
public Float4Binder(Float4Vector vector) {
this(vector, Types.REAL);
@@ -36,7 +33,8 @@ public Float4Binder(Float4Vector vector, int jdbcType) {
}
@Override
- public void bind(PreparedStatement statement, int parameterIndex, int rowIndex) throws SQLException {
+ public void bind(PreparedStatement statement, int parameterIndex, int rowIndex)
+ throws SQLException {
final float value = vector.getDataBuffer().getFloat((long) rowIndex * Float4Vector.TYPE_WIDTH);
statement.setFloat(parameterIndex, value);
}
diff --git a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/binder/Float8Binder.java b/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/binder/Float8Binder.java
index 4710c3b5986..c85dc926ac0 100644
--- a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/binder/Float8Binder.java
+++ b/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/binder/Float8Binder.java
@@ -20,12 +20,9 @@
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Types;
-
import org.apache.arrow.vector.Float8Vector;
-/**
- * A binder for 64-bit floats.
- */
+/** A binder for 64-bit floats. */
public class Float8Binder extends BaseColumnBinder {
public Float8Binder(Float8Vector vector) {
this(vector, Types.DOUBLE);
@@ -36,8 +33,10 @@ public Float8Binder(Float8Vector vector, int jdbcType) {
}
@Override
- public void bind(PreparedStatement statement, int parameterIndex, int rowIndex) throws SQLException {
- final double value = vector.getDataBuffer().getDouble((long) rowIndex * Float8Vector.TYPE_WIDTH);
+ public void bind(PreparedStatement statement, int parameterIndex, int rowIndex)
+ throws SQLException {
+ final double value =
+ vector.getDataBuffer().getDouble((long) rowIndex * Float8Vector.TYPE_WIDTH);
statement.setDouble(parameterIndex, value);
}
}
diff --git a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/binder/IntBinder.java b/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/binder/IntBinder.java
index 7d47f585a39..2d3bdbab4a8 100644
--- a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/binder/IntBinder.java
+++ b/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/binder/IntBinder.java
@@ -20,7 +20,6 @@
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Types;
-
import org.apache.arrow.vector.IntVector;
/** A column binder for 32-bit integers. */
@@ -34,7 +33,8 @@ public IntBinder(IntVector vector, int jdbcType) {
}
@Override
- public void bind(PreparedStatement statement, int parameterIndex, int rowIndex) throws SQLException {
+ public void bind(PreparedStatement statement, int parameterIndex, int rowIndex)
+ throws SQLException {
final int value = vector.getDataBuffer().getInt((long) rowIndex * IntVector.TYPE_WIDTH);
statement.setInt(parameterIndex, value);
}
diff --git a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/binder/ListBinder.java b/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/binder/ListBinder.java
index b8aa61234f4..7d59cb8ea01 100644
--- a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/binder/ListBinder.java
+++ b/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/binder/ListBinder.java
@@ -20,15 +20,12 @@
import java.lang.reflect.Array;
import java.util.ArrayList;
import java.util.Arrays;
-
import org.apache.arrow.vector.FieldVector;
import org.apache.arrow.vector.complex.ListVector;
import org.apache.arrow.vector.complex.impl.UnionListReader;
import org.apache.arrow.vector.util.Text;
-/**
- * A column binder for list of primitive values.
- */
+/** A column binder for list of primitive values. */
public class ListBinder extends BaseColumnBinder {
private final UnionListReader listReader;
@@ -52,7 +49,9 @@ public ListBinder(ListVector vector, int jdbcType) {
try {
arrayElementClass = dataVectorClass.getMethod("getObject", Integer.TYPE).getReturnType();
} catch (NoSuchMethodException e) {
- final String message = String.format("Issue to determine type for getObject method of data vector class %s ",
+ final String message =
+ String.format(
+ "Issue to determine type for getObject method of data vector class %s ",
dataVectorClass.getName());
throw new RuntimeException(message);
}
@@ -60,7 +59,8 @@ public ListBinder(ListVector vector, int jdbcType) {
}
@Override
- public void bind(java.sql.PreparedStatement statement, int parameterIndex, int rowIndex)throws java.sql.SQLException {
+ public void bind(java.sql.PreparedStatement statement, int parameterIndex, int rowIndex)
+ throws java.sql.SQLException {
listReader.setPosition(rowIndex);
ArrayList> sourceArray = (ArrayList>) listReader.readObject();
Object array;
@@ -69,7 +69,9 @@ public void bind(java.sql.PreparedStatement statement, int parameterIndex, int r
Arrays.setAll((Object[]) array, sourceArray::get);
} else {
array = new String[sourceArray.size()];
- Arrays.setAll((Object[]) array, idx -> sourceArray.get(idx) != null ? sourceArray.get(idx).toString() : null);
+ Arrays.setAll(
+ (Object[]) array,
+ idx -> sourceArray.get(idx) != null ? sourceArray.get(idx).toString() : null);
}
statement.setObject(parameterIndex, array);
}
diff --git a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/binder/MapBinder.java b/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/binder/MapBinder.java
index 07391eb7cbf..6e347a18123 100644
--- a/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/binder/MapBinder.java
+++ b/java/adapter/jdbc/src/main/java/org/apache/arrow/adapter/jdbc/binder/MapBinder.java
@@ -23,16 +23,13 @@
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Objects;
-
import org.apache.arrow.vector.complex.MapVector;
import org.apache.arrow.vector.complex.impl.UnionMapReader;
import org.apache.arrow.vector.types.pojo.ArrowType;
import org.apache.arrow.vector.types.pojo.Field;
import org.apache.arrow.vector.util.JsonStringHashMap;
-/**
- * A column binder for map of primitive values.
- */
+/** A column binder for map of primitive values. */
public class MapBinder extends BaseColumnBinder {
private UnionMapReader reader;
@@ -58,8 +55,8 @@ public MapBinder(MapVector vector, int jdbcType) {
}
List keyValueFields = Objects.requireNonNull(structField.get(0)).getChildren();
if (keyValueFields.size() != 2) {
- throw new IllegalArgumentException("Expected two children fields " +
- "inside nested Struct field in Map");
+ throw new IllegalArgumentException(
+ "Expected two children fields " + "inside nested Struct field in Map");
}
ArrowType keyType = Objects.requireNonNull(keyValueFields.get(0)).getType();
ArrowType valueType = Objects.requireNonNull(keyValueFields.get(1)).getType();
@@ -68,15 +65,16 @@ public MapBinder(MapVector vector, int jdbcType) {
}
@Override
- public void bind(PreparedStatement statement,
- int parameterIndex, int rowIndex) throws SQLException {
+ public void bind(PreparedStatement statement, int parameterIndex, int rowIndex)
+ throws SQLException {
reader.setPosition(rowIndex);
LinkedHashMap