diff --git a/.github/workflows/pr_build_linux.yml b/.github/workflows/pr_build_linux.yml index fe7df906de..9b918ad8b2 100644 --- a/.github/workflows/pr_build_linux.yml +++ b/.github/workflows/pr_build_linux.yml @@ -151,7 +151,7 @@ jobs: org.apache.comet.objectstore.NativeConfigSuite - name: "sql" value: | - ${{ matrix.profile.maven_opts != 'Spark 3.4, JDK 11, Scala 2.12' && 'org.apache.spark.sql.CometToPrettyStringSuite' || ''}} + org.apache.spark.sql.CometToPrettyStringSuite fail-fast: false name: ${{ matrix.os }}/${{ matrix.profile.name }} [${{ matrix.suite.name }}] runs-on: ${{ matrix.os }} @@ -171,7 +171,7 @@ jobs: uses: ./.github/actions/java-test with: artifact_name: ${{ matrix.os }}-${{ matrix.profile.name }}-${{ matrix.suite.name }}-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }} - suites: ${{ matrix.suite.value }} + suites: ${{ matrix.suite.name == 'sql' && matrix.profile.name == 'Spark 3.4, JDK 11, Scala 2.12' && '' || matrix.suite.value }} maven_opts: ${{ matrix.profile.maven_opts }} scan_impl: ${{ matrix.profile.scan_impl }} upload-test-reports: true \ No newline at end of file diff --git a/.github/workflows/pr_build_macos.yml b/.github/workflows/pr_build_macos.yml index 1abe644f51..fb6a8295bc 100644 --- a/.github/workflows/pr_build_macos.yml +++ b/.github/workflows/pr_build_macos.yml @@ -116,7 +116,7 @@ jobs: org.apache.comet.objectstore.NativeConfigSuite - name: "sql" value: | - ${{ matrix.profile.maven_opts != 'Spark 3.4, JDK 11, Scala 2.12' && 'org.apache.spark.sql.CometToPrettyStringSuite' || ''}} + org.apache.spark.sql.CometToPrettyStringSuite fail-fast: false name: ${{ matrix.os }}/${{ matrix.profile.name }} [${{ matrix.suite.name }}] runs-on: ${{ matrix.os }} @@ -133,5 +133,5 @@ jobs: uses: ./.github/actions/java-test with: artifact_name: ${{ matrix.os }}-${{ matrix.profile.name }}-${{ matrix.suite.name }}-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }} - suites: ${{ matrix.suite.value }} + suites: ${{ matrix.suite.name == 'sql' && matrix.profile.name == 'Spark 3.4, JDK 11, Scala 2.12' && '' || matrix.suite.value }} maven_opts: ${{ matrix.profile.maven_opts }} diff --git a/native/spark-expr/src/conversion_funcs/cast.rs b/native/spark-expr/src/conversion_funcs/cast.rs index 0c7b437a56..af997ccf80 100644 --- a/native/spark-expr/src/conversion_funcs/cast.rs +++ b/native/spark-expr/src/conversion_funcs/cast.rs @@ -248,7 +248,7 @@ fn can_cast_from_string(to_type: &DataType, options: &SparkCastOptions) -> bool } } -fn can_cast_to_string(from_type: &DataType, options: &SparkCastOptions) -> bool { +fn can_cast_to_string(from_type: &DataType, _options: &SparkCastOptions) -> bool { use DataType::*; match from_type { Boolean | Int8 | Int16 | Int32 | Int64 | Date32 | Date64 | Timestamp(_, _) => true, @@ -267,7 +267,7 @@ fn can_cast_to_string(from_type: &DataType, options: &SparkCastOptions) -> bool Binary => true, Struct(fields) => fields .iter() - .all(|f| can_cast_to_string(f.data_type(), options)), + .all(|f| can_cast_to_string(f.data_type(), _options)), _ => false, } } diff --git a/spark/src/main/scala/org/apache/comet/serde/literals.scala b/spark/src/main/scala/org/apache/comet/serde/literals.scala index c18755e07c..312f12a4c5 100644 --- a/spark/src/main/scala/org/apache/comet/serde/literals.scala +++ b/spark/src/main/scala/org/apache/comet/serde/literals.scala @@ -132,7 +132,9 @@ object CometLiteral extends CometExpressionSerde[Literal] with Logging { case ByteType => array.foreach(v => { val casted = v.asInstanceOf[lang.Byte] - listLiteralBuilder.addByteValues(casted.intValue()) + listLiteralBuilder.addByteValues( + if (casted != null) casted.intValue() + else null.asInstanceOf[Integer]) listLiteralBuilder.addNullMask(casted != null) }) case ShortType =>