From fb55181b5d9280d1a77c14f4d33282815338b295 Mon Sep 17 00:00:00 2001 From: ajantha-bhat Date: Tue, 11 Apr 2023 15:39:05 +0530 Subject: [PATCH 1/5] Bump checkstyle plugin version --- baseline.gradle | 27 ++++++++++++++++++++++++++- build.gradle | 4 ---- 2 files changed, 26 insertions(+), 5 deletions(-) diff --git a/baseline.gradle b/baseline.gradle index 6d5ce07caf31..11f43c8a904f 100644 --- a/baseline.gradle +++ b/baseline.gradle @@ -33,7 +33,11 @@ subprojects { // ready to enforce linting on. apply plugin: 'org.inferred.processors' if (!project.hasProperty('quick')) { - apply plugin: 'com.palantir.baseline-checkstyle' + // com.palantir.baseline:gradle-baseline-java:4.42.0 (the last version supporting Java 8) pulls + // in an old version of the checkstyle(9.1), which has this OutOfMemory bug https://github.com/checkstyle/checkstyle/issues/10934 + // So, replace com.palantir.baseline-checkstyle plugin usage with gradle checkstyle plugin + // with checkstyle version 9.3 (the latest java 8 supported version) which contains the fix. + apply plugin: 'checkstyle' apply plugin: 'com.palantir.baseline-error-prone' } apply plugin: 'com.palantir.baseline-class-uniqueness' @@ -42,6 +46,27 @@ subprojects { apply plugin: 'com.palantir.baseline-release-compatibility' apply plugin: 'com.diffplug.spotless' + checkstyle { + toolVersion '9.3' + ignoreFailures = true + configDirectory = file("${rootDir}/.baseline/checkstyle") + } + + // Configure checkstyle to be same as palantir/gradle-baseline + pluginManager.withPlugin("java", plugin -> { + JavaPluginExtension javaExt = project.getExtensions().getByType(JavaPluginExtension.class) + // We use the "JavadocMethod" module in our Checkstyle configuration, making + // Java 8+ new doclint compiler feature redundant. + if (javaExt.getSourceCompatibility().isJava8Compatible()) { + project.getTasks() + .withType(Javadoc.class) + .configureEach(javadoc -> + javadoc.options(javadocOptions -> ((StandardJavadocDocletOptions) javadocOptions) + .addStringOption("Xdoclint:none", "-quiet"))) + } + }) + + pluginManager.withPlugin('com.diffplug.spotless') { spotless { java { diff --git a/build.gradle b/build.gradle index 2919308d8c9b..977a1b96fd91 100644 --- a/build.gradle +++ b/build.gradle @@ -32,10 +32,6 @@ buildscript { // in an old version of the errorprone, which doesn't work w/ Gradle 8, so bump errorpone as // well. classpath "net.ltgt.gradle:gradle-errorprone-plugin:3.0.1" - // com.palantir.baseline:gradle-baseline-java:4.42.0 (the last version supporting Java 8) pulls - // in an old version of the checkstyle(9.1), which has this OutOfMemory bug https://github.com/checkstyle/checkstyle/issues/10934 - // So, bump checkstyle to the latest java 8 supported version(9.3) which contains the fix. - classpath "com.puppycrawl.tools:checkstyle:9.3" classpath 'com.diffplug.spotless:spotless-plugin-gradle:6.13.0' classpath 'gradle.plugin.org.inferred:gradle-processors:3.7.0' From 80ee9156f49f812eeca0dcb4edefb2dedf150078 Mon Sep 17 00:00:00 2001 From: ajantha-bhat Date: Tue, 11 Apr 2023 19:39:10 +0530 Subject: [PATCH 2/5] Fix new checkstyle errors --- .../java/org/apache/iceberg/io/TestCloseableIterable.java | 5 +++-- baseline.gradle | 1 - core/src/test/java/org/apache/iceberg/TestMetrics.java | 2 +- .../test/java/org/apache/iceberg/flink/DataGenerators.java | 4 ++-- .../test/java/org/apache/iceberg/flink/DataGenerators.java | 4 ++-- .../test/java/org/apache/iceberg/flink/DataGenerators.java | 4 ++-- .../org/apache/iceberg/parquet/TestBloomRowGroupFilter.java | 2 +- .../iceberg/spark/source/TestSparkReaderWithBloomFilter.java | 2 +- .../iceberg/spark/source/TestSparkReaderWithBloomFilter.java | 2 +- 9 files changed, 13 insertions(+), 13 deletions(-) diff --git a/api/src/test/java/org/apache/iceberg/io/TestCloseableIterable.java b/api/src/test/java/org/apache/iceberg/io/TestCloseableIterable.java index 0c001c53180b..bed414d3cb54 100644 --- a/api/src/test/java/org/apache/iceberg/io/TestCloseableIterable.java +++ b/api/src/test/java/org/apache/iceberg/io/TestCloseableIterable.java @@ -195,10 +195,11 @@ public CloseableIterator iterator() { } }); + AtomicInteger consumedCounter = new AtomicInteger(0); try (CloseableIterable concat = CloseableIterable.concat(transform)) { - concat.forEach(c -> c++); + concat.forEach(count -> consumedCounter.getAndIncrement()); } - Assertions.assertThat(counter.get()).isEqualTo(items.size()); + Assertions.assertThat(counter.get()).isEqualTo(items.size()).isEqualTo(consumedCounter.get()); } @Test diff --git a/baseline.gradle b/baseline.gradle index 11f43c8a904f..fbc310f218ca 100644 --- a/baseline.gradle +++ b/baseline.gradle @@ -48,7 +48,6 @@ subprojects { checkstyle { toolVersion '9.3' - ignoreFailures = true configDirectory = file("${rootDir}/.baseline/checkstyle") } diff --git a/core/src/test/java/org/apache/iceberg/TestMetrics.java b/core/src/test/java/org/apache/iceberg/TestMetrics.java index 7eb4d3344326..32bc6299ce1b 100644 --- a/core/src/test/java/org/apache/iceberg/TestMetrics.java +++ b/core/src/test/java/org/apache/iceberg/TestMetrics.java @@ -454,7 +454,7 @@ public void testMetricsForTopLevelWithMultipleRowGroup() throws Exception { for (int i = 0; i < recordCount; i++) { Record newRecord = GenericRecord.create(SIMPLE_SCHEMA); - newRecord.setField("booleanCol", i == 0 ? false : true); + newRecord.setField("booleanCol", i != 0); newRecord.setField("intCol", i + 1); newRecord.setField("longCol", i == 0 ? null : i + 1L); newRecord.setField("floatCol", i + 1.0F); diff --git a/flink/v1.15/flink/src/test/java/org/apache/iceberg/flink/DataGenerators.java b/flink/v1.15/flink/src/test/java/org/apache/iceberg/flink/DataGenerators.java index 367afb75f920..dfe0d63eb01c 100644 --- a/flink/v1.15/flink/src/test/java/org/apache/iceberg/flink/DataGenerators.java +++ b/flink/v1.15/flink/src/test/java/org/apache/iceberg/flink/DataGenerators.java @@ -124,10 +124,10 @@ private org.apache.avro.Schema fixupAvroSchemaConvertedFromIcebergSchema( LogicalTypes.timeMillis() .addToSchema( org.apache.avro.Schema.create(org.apache.avro.Schema.Type.INT)); - field = new org.apache.avro.Schema.Field("time_field", fieldSchema); + return new org.apache.avro.Schema.Field("time_field", fieldSchema); } - return new org.apache.avro.Schema.Field(field, field.schema()); + return field; }) .collect(Collectors.toList()); return org.apache.avro.Schema.createRecord( diff --git a/flink/v1.16/flink/src/test/java/org/apache/iceberg/flink/DataGenerators.java b/flink/v1.16/flink/src/test/java/org/apache/iceberg/flink/DataGenerators.java index 367afb75f920..dfe0d63eb01c 100644 --- a/flink/v1.16/flink/src/test/java/org/apache/iceberg/flink/DataGenerators.java +++ b/flink/v1.16/flink/src/test/java/org/apache/iceberg/flink/DataGenerators.java @@ -124,10 +124,10 @@ private org.apache.avro.Schema fixupAvroSchemaConvertedFromIcebergSchema( LogicalTypes.timeMillis() .addToSchema( org.apache.avro.Schema.create(org.apache.avro.Schema.Type.INT)); - field = new org.apache.avro.Schema.Field("time_field", fieldSchema); + return new org.apache.avro.Schema.Field("time_field", fieldSchema); } - return new org.apache.avro.Schema.Field(field, field.schema()); + return field; }) .collect(Collectors.toList()); return org.apache.avro.Schema.createRecord( diff --git a/flink/v1.17/flink/src/test/java/org/apache/iceberg/flink/DataGenerators.java b/flink/v1.17/flink/src/test/java/org/apache/iceberg/flink/DataGenerators.java index 367afb75f920..dfe0d63eb01c 100644 --- a/flink/v1.17/flink/src/test/java/org/apache/iceberg/flink/DataGenerators.java +++ b/flink/v1.17/flink/src/test/java/org/apache/iceberg/flink/DataGenerators.java @@ -124,10 +124,10 @@ private org.apache.avro.Schema fixupAvroSchemaConvertedFromIcebergSchema( LogicalTypes.timeMillis() .addToSchema( org.apache.avro.Schema.create(org.apache.avro.Schema.Type.INT)); - field = new org.apache.avro.Schema.Field("time_field", fieldSchema); + return new org.apache.avro.Schema.Field("time_field", fieldSchema); } - return new org.apache.avro.Schema.Field(field, field.schema()); + return field; }) .collect(Collectors.toList()); return org.apache.avro.Schema.createRecord( diff --git a/parquet/src/test/java/org/apache/iceberg/parquet/TestBloomRowGroupFilter.java b/parquet/src/test/java/org/apache/iceberg/parquet/TestBloomRowGroupFilter.java index e3e938e45034..34a92a9b4483 100644 --- a/parquet/src/test/java/org/apache/iceberg/parquet/TestBloomRowGroupFilter.java +++ b/parquet/src/test/java/org/apache/iceberg/parquet/TestBloomRowGroupFilter.java @@ -248,7 +248,7 @@ public void createInputFile() throws IOException { structNotNull.put("_int_field", INT_MIN_VALUE + i); builder.set("_struct_not_null", structNotNull); // struct with int builder.set("_no_stats", TOO_LONG_FOR_STATS); // value longer than 4k will produce no stats - builder.set("_boolean", (i % 2 == 0) ? true : false); + builder.set("_boolean", i % 2 == 0); builder.set("_time", instant.plusSeconds(i * 86400).toEpochMilli()); builder.set("_date", instant.plusSeconds(i * 86400).getEpochSecond()); builder.set("_timestamp", instant.plusSeconds(i * 86400).toEpochMilli()); diff --git a/spark/v3.2/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderWithBloomFilter.java b/spark/v3.2/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderWithBloomFilter.java index 7bbfc3138678..e5831b76e424 100644 --- a/spark/v3.2/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderWithBloomFilter.java +++ b/spark/v3.2/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderWithBloomFilter.java @@ -140,7 +140,7 @@ public void writeTestDataFile() throws IOException { "id_string", BINARY_PREFIX + (INT_MIN_VALUE + i), "id_boolean", - (i % 2 == 0) ? true : false, + i % 2 == 0, "id_date", LocalDate.parse("2021-09-05"), "id_int_decimal", diff --git a/spark/v3.3/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderWithBloomFilter.java b/spark/v3.3/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderWithBloomFilter.java index 7bbfc3138678..e5831b76e424 100644 --- a/spark/v3.3/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderWithBloomFilter.java +++ b/spark/v3.3/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderWithBloomFilter.java @@ -140,7 +140,7 @@ public void writeTestDataFile() throws IOException { "id_string", BINARY_PREFIX + (INT_MIN_VALUE + i), "id_boolean", - (i % 2 == 0) ? true : false, + i % 2 == 0, "id_date", LocalDate.parse("2021-09-05"), "id_int_decimal", From 9db33309822068a292871594862c8c8be68a277d Mon Sep 17 00:00:00 2001 From: ajantha-bhat Date: Wed, 12 Apr 2023 11:34:27 +0530 Subject: [PATCH 3/5] Reafactor --- baseline.gradle | 31 ++++++++----------------------- 1 file changed, 8 insertions(+), 23 deletions(-) diff --git a/baseline.gradle b/baseline.gradle index fbc310f218ca..085f57d2c031 100644 --- a/baseline.gradle +++ b/baseline.gradle @@ -33,11 +33,7 @@ subprojects { // ready to enforce linting on. apply plugin: 'org.inferred.processors' if (!project.hasProperty('quick')) { - // com.palantir.baseline:gradle-baseline-java:4.42.0 (the last version supporting Java 8) pulls - // in an old version of the checkstyle(9.1), which has this OutOfMemory bug https://github.com/checkstyle/checkstyle/issues/10934 - // So, replace com.palantir.baseline-checkstyle plugin usage with gradle checkstyle plugin - // with checkstyle version 9.3 (the latest java 8 supported version) which contains the fix. - apply plugin: 'checkstyle' + apply plugin: 'com.palantir.baseline-checkstyle' apply plugin: 'com.palantir.baseline-error-prone' } apply plugin: 'com.palantir.baseline-class-uniqueness' @@ -46,25 +42,14 @@ subprojects { apply plugin: 'com.palantir.baseline-release-compatibility' apply plugin: 'com.diffplug.spotless' - checkstyle { - toolVersion '9.3' - configDirectory = file("${rootDir}/.baseline/checkstyle") - } - - // Configure checkstyle to be same as palantir/gradle-baseline - pluginManager.withPlugin("java", plugin -> { - JavaPluginExtension javaExt = project.getExtensions().getByType(JavaPluginExtension.class) - // We use the "JavadocMethod" module in our Checkstyle configuration, making - // Java 8+ new doclint compiler feature redundant. - if (javaExt.getSourceCompatibility().isJava8Compatible()) { - project.getTasks() - .withType(Javadoc.class) - .configureEach(javadoc -> - javadoc.options(javadocOptions -> ((StandardJavadocDocletOptions) javadocOptions) - .addStringOption("Xdoclint:none", "-quiet"))) + pluginManager.withPlugin('com.palantir.baseline-checkstyle') { + checkstyle { + // com.palantir.baseline:gradle-baseline-java:4.42.0 (the last version supporting Java 8) pulls + // in an old version of the checkstyle(9.1), which has this OutOfMemory bug https://github.com/checkstyle/checkstyle/issues/10934. + // So, override its checkstyle version to 9.3 (the latest java 8 supported version) which contains a fix using CheckstyleExtension. + toolVersion '9.3' } - }) - + } pluginManager.withPlugin('com.diffplug.spotless') { spotless { From 93170b8b88121517ebdec27f8dacc00f6d5f6eef Mon Sep 17 00:00:00 2001 From: ajantha-bhat Date: Wed, 12 Apr 2023 13:35:03 +0530 Subject: [PATCH 4/5] Fix testcases --- baseline.gradle | 2 +- .../test/java/org/apache/iceberg/flink/DataGenerators.java | 7 +++++-- .../test/java/org/apache/iceberg/flink/DataGenerators.java | 7 +++++-- .../test/java/org/apache/iceberg/flink/DataGenerators.java | 7 +++++-- 4 files changed, 16 insertions(+), 7 deletions(-) diff --git a/baseline.gradle b/baseline.gradle index 085f57d2c031..04fad576c10d 100644 --- a/baseline.gradle +++ b/baseline.gradle @@ -46,7 +46,7 @@ subprojects { checkstyle { // com.palantir.baseline:gradle-baseline-java:4.42.0 (the last version supporting Java 8) pulls // in an old version of the checkstyle(9.1), which has this OutOfMemory bug https://github.com/checkstyle/checkstyle/issues/10934. - // So, override its checkstyle version to 9.3 (the latest java 8 supported version) which contains a fix using CheckstyleExtension. + // So, override its checkstyle version using CheckstyleExtension to 9.3 (the latest java 8 supported version) which contains a fix. toolVersion '9.3' } } diff --git a/flink/v1.15/flink/src/test/java/org/apache/iceberg/flink/DataGenerators.java b/flink/v1.15/flink/src/test/java/org/apache/iceberg/flink/DataGenerators.java index dfe0d63eb01c..f8d035e0abcc 100644 --- a/flink/v1.15/flink/src/test/java/org/apache/iceberg/flink/DataGenerators.java +++ b/flink/v1.15/flink/src/test/java/org/apache/iceberg/flink/DataGenerators.java @@ -113,6 +113,7 @@ private org.apache.avro.Schema fixupAvroSchemaConvertedFromIcebergSchema( schemaConvertedFromIceberg.getFields().stream() .map( field -> { + org.apache.avro.Schema.Field updatedField; if (field.name().equals("time_field")) { // Iceberg's AvroSchemaUtil uses timestamp-micros with Long value for time // field, while AvroToRowDataConverters#convertToTime() always looks for @@ -124,10 +125,12 @@ private org.apache.avro.Schema fixupAvroSchemaConvertedFromIcebergSchema( LogicalTypes.timeMillis() .addToSchema( org.apache.avro.Schema.create(org.apache.avro.Schema.Type.INT)); - return new org.apache.avro.Schema.Field("time_field", fieldSchema); + updatedField = new org.apache.avro.Schema.Field("time_field", fieldSchema); + } else { + updatedField = field; } - return field; + return new org.apache.avro.Schema.Field(updatedField, updatedField.schema()); }) .collect(Collectors.toList()); return org.apache.avro.Schema.createRecord( diff --git a/flink/v1.16/flink/src/test/java/org/apache/iceberg/flink/DataGenerators.java b/flink/v1.16/flink/src/test/java/org/apache/iceberg/flink/DataGenerators.java index dfe0d63eb01c..f8d035e0abcc 100644 --- a/flink/v1.16/flink/src/test/java/org/apache/iceberg/flink/DataGenerators.java +++ b/flink/v1.16/flink/src/test/java/org/apache/iceberg/flink/DataGenerators.java @@ -113,6 +113,7 @@ private org.apache.avro.Schema fixupAvroSchemaConvertedFromIcebergSchema( schemaConvertedFromIceberg.getFields().stream() .map( field -> { + org.apache.avro.Schema.Field updatedField; if (field.name().equals("time_field")) { // Iceberg's AvroSchemaUtil uses timestamp-micros with Long value for time // field, while AvroToRowDataConverters#convertToTime() always looks for @@ -124,10 +125,12 @@ private org.apache.avro.Schema fixupAvroSchemaConvertedFromIcebergSchema( LogicalTypes.timeMillis() .addToSchema( org.apache.avro.Schema.create(org.apache.avro.Schema.Type.INT)); - return new org.apache.avro.Schema.Field("time_field", fieldSchema); + updatedField = new org.apache.avro.Schema.Field("time_field", fieldSchema); + } else { + updatedField = field; } - return field; + return new org.apache.avro.Schema.Field(updatedField, updatedField.schema()); }) .collect(Collectors.toList()); return org.apache.avro.Schema.createRecord( diff --git a/flink/v1.17/flink/src/test/java/org/apache/iceberg/flink/DataGenerators.java b/flink/v1.17/flink/src/test/java/org/apache/iceberg/flink/DataGenerators.java index dfe0d63eb01c..f8d035e0abcc 100644 --- a/flink/v1.17/flink/src/test/java/org/apache/iceberg/flink/DataGenerators.java +++ b/flink/v1.17/flink/src/test/java/org/apache/iceberg/flink/DataGenerators.java @@ -113,6 +113,7 @@ private org.apache.avro.Schema fixupAvroSchemaConvertedFromIcebergSchema( schemaConvertedFromIceberg.getFields().stream() .map( field -> { + org.apache.avro.Schema.Field updatedField; if (field.name().equals("time_field")) { // Iceberg's AvroSchemaUtil uses timestamp-micros with Long value for time // field, while AvroToRowDataConverters#convertToTime() always looks for @@ -124,10 +125,12 @@ private org.apache.avro.Schema fixupAvroSchemaConvertedFromIcebergSchema( LogicalTypes.timeMillis() .addToSchema( org.apache.avro.Schema.create(org.apache.avro.Schema.Type.INT)); - return new org.apache.avro.Schema.Field("time_field", fieldSchema); + updatedField = new org.apache.avro.Schema.Field("time_field", fieldSchema); + } else { + updatedField = field; } - return field; + return new org.apache.avro.Schema.Field(updatedField, updatedField.schema()); }) .collect(Collectors.toList()); return org.apache.avro.Schema.createRecord( From 235c79fb2b142b7842efa9601f74ea800887361e Mon Sep 17 00:00:00 2001 From: ajantha-bhat Date: Wed, 12 Apr 2023 14:57:42 +0530 Subject: [PATCH 5/5] Address comments --- .../test/java/org/apache/iceberg/flink/DataGenerators.java | 4 +--- .../test/java/org/apache/iceberg/flink/DataGenerators.java | 4 +--- .../test/java/org/apache/iceberg/flink/DataGenerators.java | 4 +--- 3 files changed, 3 insertions(+), 9 deletions(-) diff --git a/flink/v1.15/flink/src/test/java/org/apache/iceberg/flink/DataGenerators.java b/flink/v1.15/flink/src/test/java/org/apache/iceberg/flink/DataGenerators.java index f8d035e0abcc..1363c152e07c 100644 --- a/flink/v1.15/flink/src/test/java/org/apache/iceberg/flink/DataGenerators.java +++ b/flink/v1.15/flink/src/test/java/org/apache/iceberg/flink/DataGenerators.java @@ -113,7 +113,7 @@ private org.apache.avro.Schema fixupAvroSchemaConvertedFromIcebergSchema( schemaConvertedFromIceberg.getFields().stream() .map( field -> { - org.apache.avro.Schema.Field updatedField; + org.apache.avro.Schema.Field updatedField = field; if (field.name().equals("time_field")) { // Iceberg's AvroSchemaUtil uses timestamp-micros with Long value for time // field, while AvroToRowDataConverters#convertToTime() always looks for @@ -126,8 +126,6 @@ private org.apache.avro.Schema fixupAvroSchemaConvertedFromIcebergSchema( .addToSchema( org.apache.avro.Schema.create(org.apache.avro.Schema.Type.INT)); updatedField = new org.apache.avro.Schema.Field("time_field", fieldSchema); - } else { - updatedField = field; } return new org.apache.avro.Schema.Field(updatedField, updatedField.schema()); diff --git a/flink/v1.16/flink/src/test/java/org/apache/iceberg/flink/DataGenerators.java b/flink/v1.16/flink/src/test/java/org/apache/iceberg/flink/DataGenerators.java index f8d035e0abcc..1363c152e07c 100644 --- a/flink/v1.16/flink/src/test/java/org/apache/iceberg/flink/DataGenerators.java +++ b/flink/v1.16/flink/src/test/java/org/apache/iceberg/flink/DataGenerators.java @@ -113,7 +113,7 @@ private org.apache.avro.Schema fixupAvroSchemaConvertedFromIcebergSchema( schemaConvertedFromIceberg.getFields().stream() .map( field -> { - org.apache.avro.Schema.Field updatedField; + org.apache.avro.Schema.Field updatedField = field; if (field.name().equals("time_field")) { // Iceberg's AvroSchemaUtil uses timestamp-micros with Long value for time // field, while AvroToRowDataConverters#convertToTime() always looks for @@ -126,8 +126,6 @@ private org.apache.avro.Schema fixupAvroSchemaConvertedFromIcebergSchema( .addToSchema( org.apache.avro.Schema.create(org.apache.avro.Schema.Type.INT)); updatedField = new org.apache.avro.Schema.Field("time_field", fieldSchema); - } else { - updatedField = field; } return new org.apache.avro.Schema.Field(updatedField, updatedField.schema()); diff --git a/flink/v1.17/flink/src/test/java/org/apache/iceberg/flink/DataGenerators.java b/flink/v1.17/flink/src/test/java/org/apache/iceberg/flink/DataGenerators.java index f8d035e0abcc..1363c152e07c 100644 --- a/flink/v1.17/flink/src/test/java/org/apache/iceberg/flink/DataGenerators.java +++ b/flink/v1.17/flink/src/test/java/org/apache/iceberg/flink/DataGenerators.java @@ -113,7 +113,7 @@ private org.apache.avro.Schema fixupAvroSchemaConvertedFromIcebergSchema( schemaConvertedFromIceberg.getFields().stream() .map( field -> { - org.apache.avro.Schema.Field updatedField; + org.apache.avro.Schema.Field updatedField = field; if (field.name().equals("time_field")) { // Iceberg's AvroSchemaUtil uses timestamp-micros with Long value for time // field, while AvroToRowDataConverters#convertToTime() always looks for @@ -126,8 +126,6 @@ private org.apache.avro.Schema fixupAvroSchemaConvertedFromIcebergSchema( .addToSchema( org.apache.avro.Schema.create(org.apache.avro.Schema.Type.INT)); updatedField = new org.apache.avro.Schema.Field("time_field", fieldSchema); - } else { - updatedField = field; } return new org.apache.avro.Schema.Field(updatedField, updatedField.schema());