diff --git a/.github/workflows/spark_sql_test_ansi.yml b/.github/workflows/spark_sql_test_ansi.yml
index 6553ceacdb..0f579ef3fb 100644
--- a/.github/workflows/spark_sql_test_ansi.yml
+++ b/.github/workflows/spark_sql_test_ansi.yml
@@ -43,7 +43,7 @@ jobs:
matrix:
os: [ubuntu-24.04]
java-version: [17]
- spark-version: [{short: '4.0', full: '4.0.0'}]
+ spark-version: [{short: '4.0', full: '4.0.1'}]
module:
- {name: "catalyst", args1: "catalyst/test", args2: ""}
- {name: "sql/core-1", args1: "", args2: sql/testOnly * -- -l org.apache.spark.tags.ExtendedSQLTest -l org.apache.spark.tags.SlowSQLTest}
diff --git a/dev/diffs/4.0.0.diff b/dev/diffs/4.0.1.diff
similarity index 100%
rename from dev/diffs/4.0.0.diff
rename to dev/diffs/4.0.1.diff
diff --git a/docs/source/contributor-guide/roadmap.md b/docs/source/contributor-guide/roadmap.md
index 176ab7d18c..59b84ed0c4 100644
--- a/docs/source/contributor-guide/roadmap.md
+++ b/docs/source/contributor-guide/roadmap.md
@@ -33,9 +33,9 @@ releases. Once this integration is complete, we plan on switching from the `nati
[#2060]: https://github.com/apache/datafusion-comet/issues/2060
[#2189]: https://github.com/apache/datafusion-comet/issues/2189
-### Spark 4.0.0 Support
+### Spark 4.0 Support
-Comet has experimental support for Spark 4.0.0, but there is more work to do ([#1637]), such as enabling
+Comet has experimental support for Spark 4.0, but there is more work to do ([#1637]), such as enabling
more Spark SQL tests and fully implementing ANSI support ([#313]) for all supported expressions.
[#313]: https://github.com/apache/datafusion-comet/issues/313
diff --git a/docs/source/user-guide/latest/installation.md b/docs/source/user-guide/latest/installation.md
index b7d1f706f8..780e7cfe62 100644
--- a/docs/source/user-guide/latest/installation.md
+++ b/docs/source/user-guide/latest/installation.md
@@ -47,9 +47,9 @@ Note that we do not test the full matrix of supported Java and Scala versions in
Experimental support is provided for the following versions of Apache Spark and is intended for development/testing
use only and should not be used in production yet.
-| Spark Version | Java Version | Scala Version | Comet Tests in CI | Spark SQL Tests in CI |
-| -------------- | ------------ | ------------- | ----------------- |-----------------------|
-| 4.0.0 | 17 | 2.13 | Yes | Yes |
+| Spark Version | Java Version | Scala Version | Comet Tests in CI | Spark SQL Tests in CI |
+|---------------| ------------ | ------------- | ----------------- |-----------------------|
+| 4.0.1 | 17 | 2.13 | Yes | Yes |
Note that Comet may not fully work with proprietary forks of Apache Spark such as the Spark versions offered by
Cloud Service Providers.
diff --git a/native/core/src/common/bit.rs b/native/core/src/common/bit.rs
index 6bfc2c89c9..15055e9d62 100644
--- a/native/core/src/common/bit.rs
+++ b/native/core/src/common/bit.rs
@@ -657,7 +657,7 @@ impl BitReader {
debug_assert!(self.bit_offset == 0 || i == num_bits_to_read);
// Check if there's opportunity to directly copy bytes using `memcpy`.
- if (offset + i) % 8 == 0 && i < num_bits_to_read {
+ if (offset + i).is_multiple_of(8) && i < num_bits_to_read {
let num_bytes = (num_bits_to_read - i) / 8;
let dst_byte_offset = (offset + i) / 8;
if num_bytes > 0 {
@@ -671,7 +671,7 @@ impl BitReader {
}
}
- debug_assert!((offset + i) % 8 != 0 || num_bits_to_read - i < 8);
+ debug_assert!(!(offset + i).is_multiple_of(8) || num_bits_to_read - i < 8);
// Now copy the remaining bits if there's any.
while i < num_bits_to_read {
diff --git a/pom.xml b/pom.xml
index 0b0b95a020..8fd7382f53 100644
--- a/pom.xml
+++ b/pom.xml
@@ -616,7 +616,7 @@ under the License.
2.13.16
2.13
- 4.0.0
+ 4.0.1
4.0
1.15.2
4.13.6