diff --git a/.github/actions/java-test/action.yaml b/.github/actions/java-test/action.yaml
index b205092ff0..fa8302ea66 100644
--- a/.github/actions/java-test/action.yaml
+++ b/.github/actions/java-test/action.yaml
@@ -68,7 +68,7 @@ runs:
env:
COMET_PARQUET_SCAN_IMPL: ${{ inputs.scan_impl }}
run: |
- MAVEN_OPTS="-XX:+UnlockDiagnosticVMOptions -XX:+ShowMessageBoxOnError -XX:+HeapDumpOnOutOfMemoryError -XX:ErrorFile=./hs_err_pid%p.log" SPARK_HOME=`pwd` ./mvnw -B clean install ${{ inputs.maven_opts }}
+ MAVEN_OPTS="-Xmx4G -Xms2G -XX:+UnlockDiagnosticVMOptions -XX:+ShowMessageBoxOnError -XX:+HeapDumpOnOutOfMemoryError -XX:ErrorFile=./hs_err_pid%p.log" SPARK_HOME=`pwd` ./mvnw -B clean install ${{ inputs.maven_opts }}
- name: Run specified tests
shell: bash
if: ${{ inputs.suites != '' }}
@@ -77,7 +77,7 @@ runs:
run: |
MAVEN_SUITES="$(echo "${{ inputs.suites }}" | paste -sd, -)"
echo "Running with MAVEN_SUITES=$MAVEN_SUITES"
- MAVEN_OPTS="-DwildcardSuites=$MAVEN_SUITES -XX:+UnlockDiagnosticVMOptions -XX:+ShowMessageBoxOnError -XX:+HeapDumpOnOutOfMemoryError -XX:ErrorFile=./hs_err_pid%p.log" SPARK_HOME=`pwd` ./mvnw -B clean install ${{ inputs.maven_opts }}
+ MAVEN_OPTS="-Xmx4G -Xms2G -DwildcardSuites=$MAVEN_SUITES -XX:+UnlockDiagnosticVMOptions -XX:+ShowMessageBoxOnError -XX:+HeapDumpOnOutOfMemoryError -XX:ErrorFile=./hs_err_pid%p.log" SPARK_HOME=`pwd` ./mvnw -B clean install ${{ inputs.maven_opts }}
- name: Upload crash logs
if: failure()
uses: actions/upload-artifact@v4
diff --git a/.github/workflows/pr_build_linux.yml b/.github/workflows/pr_build_linux.yml
index a0373e15e9..4f97d8a949 100644
--- a/.github/workflows/pr_build_linux.yml
+++ b/.github/workflows/pr_build_linux.yml
@@ -149,6 +149,9 @@ jobs:
runs-on: ${{ matrix.os }}
container:
image: amd64/rust
+ env:
+ JAVA_TOOL_OPTIONS: ${{ matrix.profile.java_version == '17' && '--add-exports=java.base/sun.nio.ch=ALL-UNNAMED --add-exports=java.base/sun.util.calendar=ALL-UNNAMED --add-opens=java.base/java.nio=ALL-UNNAMED --add-opens=java.base/java.lang=ALL-UNNAMED' || '' }}
+
steps:
- uses: actions/checkout@v4
- name: Setup Rust & Java toolchain
diff --git a/.github/workflows/pr_build_macos.yml b/.github/workflows/pr_build_macos.yml
index 0ae35b7f8c..dd640a13d1 100644
--- a/.github/workflows/pr_build_macos.yml
+++ b/.github/workflows/pr_build_macos.yml
@@ -57,9 +57,11 @@ jobs:
java_version: "17"
maven_opts: "-Pspark-3.5 -Pscala-2.13"
- - name: "Spark 4.0, JDK 17, Scala 2.13"
- java_version: "17"
- maven_opts: "-Pspark-4.0 -Pscala-2.13"
+ # TODO fails with OOM
+ # https://github.com/apache/datafusion-comet/issues/1949
+# - name: "Spark 4.0, JDK 17, Scala 2.13"
+# java_version: "17"
+# maven_opts: "-Pspark-4.0 -Pscala-2.13"
suite:
- name: "fuzz"
diff --git a/.github/workflows/spark_sql_test_ansi.yml b/.github/workflows/spark_sql_test_ansi.yml
index a3e85307d9..c3b8e47725 100644
--- a/.github/workflows/spark_sql_test_ansi.yml
+++ b/.github/workflows/spark_sql_test_ansi.yml
@@ -43,7 +43,7 @@ jobs:
matrix:
os: [ubuntu-24.04]
java-version: [17]
- spark-version: [{short: '4.0', full: '4.0.0-preview1'}]
+ spark-version: [{short: '4.0', full: '4.0.0'}]
module:
- {name: "catalyst", args1: "catalyst/test", args2: ""}
- {name: "sql/core-1", args1: "", args2: sql/testOnly * -- -l org.apache.spark.tags.ExtendedSQLTest -l org.apache.spark.tags.SlowSQLTest}
diff --git a/common/src/main/java/org/apache/comet/parquet/TypeUtil.java b/common/src/main/java/org/apache/comet/parquet/TypeUtil.java
index 89d436f76d..1e9d5b937c 100644
--- a/common/src/main/java/org/apache/comet/parquet/TypeUtil.java
+++ b/common/src/main/java/org/apache/comet/parquet/TypeUtil.java
@@ -74,7 +74,8 @@ public static ColumnDescriptor convertToParquet(StructField field) {
builder = Types.primitive(PrimitiveType.PrimitiveTypeName.INT64, repetition);
} else if (type == DataTypes.BinaryType) {
builder = Types.primitive(PrimitiveType.PrimitiveTypeName.BINARY, repetition);
- } else if (type == DataTypes.StringType) {
+ } else if (type == DataTypes.StringType
+ || (type.sameType(DataTypes.StringType) && isSpark40Plus())) {
builder =
Types.primitive(PrimitiveType.PrimitiveTypeName.BINARY, repetition)
.as(LogicalTypeAnnotation.stringType());
@@ -199,6 +200,13 @@ && isUnsignedIntTypeMatched(logicalTypeAnnotation, 64)) {
|| canReadAsBinaryDecimal(descriptor, sparkType)) {
return;
}
+
+ if (sparkType.sameType(DataTypes.StringType) && isSpark40Plus()) {
+ LogicalTypeAnnotation lta = descriptor.getPrimitiveType().getLogicalTypeAnnotation();
+ if (lta instanceof LogicalTypeAnnotation.StringLogicalTypeAnnotation) {
+ return;
+ }
+ }
break;
case FIXED_LEN_BYTE_ARRAY:
if (canReadAsIntDecimal(descriptor, sparkType)
@@ -314,7 +322,7 @@ private static boolean isUnsignedIntTypeMatched(
&& ((IntLogicalTypeAnnotation) logicalTypeAnnotation).getBitWidth() == bitWidth;
}
- private static boolean isSpark40Plus() {
+ static boolean isSpark40Plus() {
return package$.MODULE$.SPARK_VERSION().compareTo("4.0") >= 0;
}
}
diff --git a/common/src/main/spark-4.0/org/apache/spark/sql/comet/shims/ShimTaskMetrics.scala b/common/src/main/spark-4.0/org/apache/spark/sql/comet/shims/ShimTaskMetrics.scala
index 5b2a5fb5bf..b6a1b56d97 100644
--- a/common/src/main/spark-4.0/org/apache/spark/sql/comet/shims/ShimTaskMetrics.scala
+++ b/common/src/main/spark-4.0/org/apache/spark/sql/comet/shims/ShimTaskMetrics.scala
@@ -25,5 +25,5 @@ import org.apache.spark.util.AccumulatorV2
object ShimTaskMetrics {
def getTaskAccumulator(taskMetrics: TaskMetrics): Option[AccumulatorV2[_, _]] =
- taskMetrics.externalAccums.lastOption
+ taskMetrics._externalAccums.lastOption
}
diff --git a/common/src/test/java/org/apache/comet/parquet/TestFileReader.java b/common/src/test/java/org/apache/comet/parquet/TestFileReader.java
index 240aa07ac5..d380fc16a9 100644
--- a/common/src/test/java/org/apache/comet/parquet/TestFileReader.java
+++ b/common/src/test/java/org/apache/comet/parquet/TestFileReader.java
@@ -74,6 +74,8 @@
import static org.junit.Assert.*;
import static org.junit.Assert.assertEquals;
+import static org.apache.comet.parquet.TypeUtil.isSpark40Plus;
+
@SuppressWarnings("deprecation")
public class TestFileReader {
private static final MessageType SCHEMA =
@@ -609,7 +611,9 @@ public void testColumnIndexReadWrite() throws Exception {
assertEquals(1, offsetIndex.getFirstRowIndex(1));
assertEquals(3, offsetIndex.getFirstRowIndex(2));
- assertNull(indexReader.readColumnIndex(footer.getBlocks().get(2).getColumns().get(0)));
+ if (!isSpark40Plus()) { // TODO: https://github.com/apache/datafusion-comet/issues/1948
+ assertNull(indexReader.readColumnIndex(footer.getBlocks().get(2).getColumns().get(0)));
+ }
}
}
diff --git a/dev/diffs/4.0.0-preview1.diff b/dev/diffs/4.0.0.diff
similarity index 77%
rename from dev/diffs/4.0.0-preview1.diff
rename to dev/diffs/4.0.0.diff
index e57a245f04..1a43fe3c88 100644
--- a/dev/diffs/4.0.0-preview1.diff
+++ b/dev/diffs/4.0.0.diff
@@ -1,17 +1,17 @@
diff --git a/pom.xml b/pom.xml
-index a4b1b2c3c9f..16863e90255 100644
+index 443d46a4302..3b8483173f1 100644
--- a/pom.xml
+++ b/pom.xml
-@@ -147,6 +147,8 @@
- 0.10.0
- 2.5.2
+@@ -148,6 +148,8 @@
+ 4.0.3
+ 2.5.3
2.0.8
+ 4.0
+ 0.9.0-SNAPSHOT
- 2.13.14
+ 2.13.16
2.13
- 4.0.0-preview1
+ 4.0.0
4.0
- 1.13.1
- 4.9.5
- 2.0.13
+ 1.15.2
+ 4.13.6
+ 2.0.16
spark-4.0
not-needed-yet
@@ -637,9 +637,9 @@ under the License.
scala-2.13
- 2.13.14
+ 2.13.16
2.13
- 4.9.5
+ 4.13.6
@@ -1074,9 +1074,19 @@ under the License.
javax.annotation.meta.TypeQualifierNickname
+
+ com.google.guava
+ guava
+
+ com.google.thirdparty.publicsuffix.TrieParser
+ com.google.thirdparty.publicsuffix.PublicSuffixPatterns
+ com.google.thirdparty.publicsuffix.PublicSuffixType
+
+
true
true
+
diff --git a/spark/src/main/java/org/apache/spark/shuffle/comet/CometBoundedShuffleMemoryAllocator.java b/spark/src/main/java/org/apache/spark/shuffle/comet/CometBoundedShuffleMemoryAllocator.java
index 051b1c6fae..54e9dc6848 100644
--- a/spark/src/main/java/org/apache/spark/shuffle/comet/CometBoundedShuffleMemoryAllocator.java
+++ b/spark/src/main/java/org/apache/spark/shuffle/comet/CometBoundedShuffleMemoryAllocator.java
@@ -80,14 +80,10 @@ public final class CometBoundedShuffleMemoryAllocator extends CometShuffleMemory
private synchronized long _acquireMemory(long size) {
if (allocatedMemory >= totalMemory) {
throw new SparkOutOfMemoryError(
- "Unable to acquire "
- + size
- + " bytes of memory, current usage "
- + "is "
- + allocatedMemory
- + " bytes and max memory is "
- + totalMemory
- + " bytes");
+ "UNABLE_TO_ACQUIRE_MEMORY",
+ java.util.Map.of(
+ "requestedBytes", String.valueOf(size),
+ "receivedBytes", String.valueOf(totalMemory - allocatedMemory)));
}
long allocationSize = Math.min(size, totalMemory - allocatedMemory);
allocatedMemory += allocationSize;
@@ -127,12 +123,10 @@ private synchronized MemoryBlock allocateMemoryBlock(long required) {
allocatedMemory -= got;
throw new SparkOutOfMemoryError(
- "Unable to acquire "
- + required
- + " bytes of memory, got "
- + got
- + " bytes. Available: "
- + (totalMemory - allocatedMemory));
+ "UNABLE_TO_ACQUIRE_MEMORY",
+ java.util.Map.of(
+ "requestedBytes", String.valueOf(required),
+ "receivedBytes", String.valueOf(totalMemory - allocatedMemory)));
}
int pageNumber = allocatedPages.nextClearBit(0);
diff --git a/spark/src/main/scala/org/apache/comet/serde/QueryPlanSerde.scala b/spark/src/main/scala/org/apache/comet/serde/QueryPlanSerde.scala
index e0fadc3144..e0797dc668 100644
--- a/spark/src/main/scala/org/apache/comet/serde/QueryPlanSerde.scala
+++ b/spark/src/main/scala/org/apache/comet/serde/QueryPlanSerde.scala
@@ -1501,7 +1501,7 @@ object QueryPlanSerde extends Logging with CometExprShim {
case s: StringDecode =>
// Right child is the encoding expression.
- s.right match {
+ s.charset match {
case Literal(str, DataTypes.StringType)
if str.toString.toLowerCase(Locale.ROOT) == "utf-8" =>
// decode(col, 'utf-8') can be treated as a cast with "try" eval mode that puts nulls
@@ -1511,7 +1511,7 @@ object QueryPlanSerde extends Logging with CometExprShim {
expr,
None,
DataTypes.StringType,
- exprToProtoInternal(s.left, inputs, binding).get,
+ exprToProtoInternal(s.bin, inputs, binding).get,
CometEvalMode.TRY)
case _ =>
withInfo(expr, "Comet only supports decoding with 'utf-8'.")
diff --git a/spark/src/main/scala/org/apache/spark/sql/comet/CometNativeScanExec.scala b/spark/src/main/scala/org/apache/spark/sql/comet/CometNativeScanExec.scala
index 92b2e6a88e..d989db2f4f 100644
--- a/spark/src/main/scala/org/apache/spark/sql/comet/CometNativeScanExec.scala
+++ b/spark/src/main/scala/org/apache/spark/sql/comet/CometNativeScanExec.scala
@@ -27,6 +27,7 @@ import org.apache.spark.sql.catalyst._
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.plans.QueryPlan
import org.apache.spark.sql.catalyst.plans.physical.{Partitioning, UnknownPartitioning}
+import org.apache.spark.sql.comet.shims.ShimStreamSourceAwareSparkPlan
import org.apache.spark.sql.execution._
import org.apache.spark.sql.execution.datasources._
import org.apache.spark.sql.execution.metric.{SQLMetric, SQLMetrics}
@@ -56,7 +57,8 @@ case class CometNativeScanExec(
originalPlan: FileSourceScanExec,
override val serializedPlanOpt: SerializedPlan)
extends CometLeafExec
- with DataSourceScanExec {
+ with DataSourceScanExec
+ with ShimStreamSourceAwareSparkPlan {
override lazy val metadata: Map[String, String] = originalPlan.metadata
diff --git a/spark/src/main/spark-3.x/org/apache/spark/sql/comet/shims/ShimStreamSourceAwareSparkPlan.scala b/spark/src/main/spark-3.x/org/apache/spark/sql/comet/shims/ShimStreamSourceAwareSparkPlan.scala
new file mode 100644
index 0000000000..845c2170ea
--- /dev/null
+++ b/spark/src/main/spark-3.x/org/apache/spark/sql/comet/shims/ShimStreamSourceAwareSparkPlan.scala
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.spark.sql.comet.shims
+
+trait ShimStreamSourceAwareSparkPlan {
+}
\ No newline at end of file
diff --git a/spark/src/main/spark-4.0/org/apache/spark/sql/comet/shims/ShimCometScanExec.scala b/spark/src/main/spark-4.0/org/apache/spark/sql/comet/shims/ShimCometScanExec.scala
index 7fe9ea53a4..d2cda6ecb8 100644
--- a/spark/src/main/spark-4.0/org/apache/spark/sql/comet/shims/ShimCometScanExec.scala
+++ b/spark/src/main/spark-4.0/org/apache/spark/sql/comet/shims/ShimCometScanExec.scala
@@ -30,7 +30,7 @@ import org.apache.spark.sql.execution.{FileSourceScanExec, PartitionedFileUtil,
import org.apache.spark.sql.sources.Filter
import org.apache.spark.sql.types.StructType
-trait ShimCometScanExec {
+trait ShimCometScanExec extends ShimStreamSourceAwareSparkPlan {
def wrapped: FileSourceScanExec
lazy val fileConstantMetadataColumns: Seq[AttributeReference] =
@@ -56,7 +56,7 @@ trait ShimCometScanExec {
protected def isNeededForSchema(sparkSchema: StructType): Boolean = false
protected def getPartitionedFile(f: FileStatusWithMetadata, p: PartitionDirectory): PartitionedFile =
- PartitionedFileUtil.getPartitionedFile(f, p.values, 0, f.getLen)
+ PartitionedFileUtil.getPartitionedFile(f, f.getPath, p.values, 0, f.getLen)
protected def splitFiles(sparkSession: SparkSession,
file: FileStatusWithMetadata,
@@ -64,7 +64,7 @@ trait ShimCometScanExec {
isSplitable: Boolean,
maxSplitBytes: Long,
partitionValues: InternalRow): Seq[PartitionedFile] =
- PartitionedFileUtil.splitFiles(file, isSplitable, maxSplitBytes, partitionValues)
+ PartitionedFileUtil.splitFiles(file, filePath, isSplitable, maxSplitBytes, partitionValues)
protected def getPushedDownFilters(relation: HadoopFsRelation , dataFilters: Seq[Expression]): Seq[Filter] = {
translateToV1Filters(relation, dataFilters, _.toLiteral)
diff --git a/spark/src/main/spark-4.0/org/apache/spark/sql/comet/shims/ShimStreamSourceAwareSparkPlan.scala b/spark/src/main/spark-4.0/org/apache/spark/sql/comet/shims/ShimStreamSourceAwareSparkPlan.scala
new file mode 100644
index 0000000000..749f8fa164
--- /dev/null
+++ b/spark/src/main/spark-4.0/org/apache/spark/sql/comet/shims/ShimStreamSourceAwareSparkPlan.scala
@@ -0,0 +1,27 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.spark.sql.comet.shims
+
+import org.apache.spark.sql.connector.read.streaming.SparkDataStream
+import org.apache.spark.sql.execution.StreamSourceAwareSparkPlan
+
+trait ShimStreamSourceAwareSparkPlan extends StreamSourceAwareSparkPlan {
+ override def getStream: Option[SparkDataStream] = None
+}
\ No newline at end of file
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q1/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q1/explain.txt
index 3e544cfec5..342160b00c 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q1/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q1/explain.txt
@@ -207,12 +207,12 @@ Input [5]: [ctr_customer_sk#11, ctr_store_sk#12, ctr_total_return#13, (avg(ctr_t
Output [2]: [s_store_sk#30, s_state#31]
Batched: true
Location [not included in comparison]/{warehouse_dir}/store]
-PushedFilters: [IsNotNull(s_store_sk)]
+PushedFilters: [IsNotNull(s_state), IsNotNull(s_store_sk)]
ReadSchema: struct
(34) CometFilter
Input [2]: [s_store_sk#30, s_state#31]
-Condition : ((staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, s_state#31, 2, true, false, true) = TN) AND isnotnull(s_store_sk#30))
+Condition : ((isnotnull(s_state#31) AND (static_invoke(CharVarcharCodegenUtils.readSidePadding(s_state#31, 2)) = TN)) AND isnotnull(s_store_sk#30))
(35) CometProject
Input [2]: [s_store_sk#30, s_state#31]
@@ -248,7 +248,7 @@ Condition : isnotnull(c_customer_sk#32)
(42) CometProject
Input [2]: [c_customer_sk#32, c_customer_id#33]
-Arguments: [c_customer_sk#32, c_customer_id#34], [c_customer_sk#32, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, c_customer_id#33, 16, true, false, true) AS c_customer_id#34]
+Arguments: [c_customer_sk#32, c_customer_id#34], [c_customer_sk#32, static_invoke(CharVarcharCodegenUtils.readSidePadding(c_customer_id#33, 16)) AS c_customer_id#34]
(43) CometColumnarToRow [codegen id : 6]
Input [2]: [c_customer_sk#32, c_customer_id#34]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q10/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q10/explain.txt
index 2036dea193..f9346ea0c1 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q10/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q10/explain.txt
@@ -223,7 +223,7 @@ Condition : isnotnull(cd_demo_sk#22)
(38) CometProject
Input [9]: [cd_demo_sk#22, cd_gender#23, cd_marital_status#24, cd_education_status#25, cd_purchase_estimate#26, cd_credit_rating#27, cd_dep_count#28, cd_dep_employed_count#29, cd_dep_college_count#30]
-Arguments: [cd_demo_sk#22, cd_gender#31, cd_marital_status#32, cd_education_status#33, cd_purchase_estimate#26, cd_credit_rating#34, cd_dep_count#28, cd_dep_employed_count#29, cd_dep_college_count#30], [cd_demo_sk#22, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, cd_gender#23, 1, true, false, true) AS cd_gender#31, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, cd_marital_status#24, 1, true, false, true) AS cd_marital_status#32, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, cd_education_status#25, 20, true, false, true) AS cd_education_status#33, cd_purchase_estimate#26, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, cd_credit_rating#27, 10, true, false, true) AS cd_credit_rating#34, cd_dep_count#28, cd_dep_employed_count#29, cd_dep_college_count#30]
+Arguments: [cd_demo_sk#22, cd_gender#31, cd_marital_status#32, cd_education_status#33, cd_purchase_estimate#26, cd_credit_rating#34, cd_dep_count#28, cd_dep_employed_count#29, cd_dep_college_count#30], [cd_demo_sk#22, static_invoke(CharVarcharCodegenUtils.readSidePadding(cd_gender#23, 1)) AS cd_gender#31, static_invoke(CharVarcharCodegenUtils.readSidePadding(cd_marital_status#24, 1)) AS cd_marital_status#32, static_invoke(CharVarcharCodegenUtils.readSidePadding(cd_education_status#25, 20)) AS cd_education_status#33, cd_purchase_estimate#26, static_invoke(CharVarcharCodegenUtils.readSidePadding(cd_credit_rating#27, 10)) AS cd_credit_rating#34, cd_dep_count#28, cd_dep_employed_count#29, cd_dep_college_count#30]
(39) CometColumnarToRow [codegen id : 4]
Input [9]: [cd_demo_sk#22, cd_gender#31, cd_marital_status#32, cd_education_status#33, cd_purchase_estimate#26, cd_credit_rating#34, cd_dep_count#28, cd_dep_employed_count#29, cd_dep_college_count#30]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q11/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q11/explain.txt
index 195faa2217..fe08f8924c 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q11/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q11/explain.txt
@@ -90,11 +90,11 @@ ReadSchema: struct
(9) CometFilter
Input [3]: [ca_address_sk#13, ca_state#14, ca_country#15]
-Condition : (((isnotnull(ca_country#15) AND (ca_country#15 = United States)) AND isnotnull(ca_address_sk#13)) AND ((staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, ca_state#14, 2, true, false, true) IN (TX,OH) OR staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, ca_state#14, 2, true, false, true) IN (OR,NM,KY)) OR staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, ca_state#14, 2, true, false, true) IN (VA,TX,MS)))
+Condition : (((isnotnull(ca_country#15) AND (ca_country#15 = United States)) AND isnotnull(ca_address_sk#13)) AND ((static_invoke(CharVarcharCodegenUtils.readSidePadding(ca_state#14, 2)) IN (TX,OH) OR static_invoke(CharVarcharCodegenUtils.readSidePadding(ca_state#14, 2)) IN (OR,NM,KY)) OR static_invoke(CharVarcharCodegenUtils.readSidePadding(ca_state#14, 2)) IN (VA,TX,MS)))
(10) CometProject
Input [3]: [ca_address_sk#13, ca_state#14, ca_country#15]
-Arguments: [ca_address_sk#13, ca_state#16], [ca_address_sk#13, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, ca_state#14, 2, true, false, true) AS ca_state#16]
+Arguments: [ca_address_sk#13, ca_state#16], [ca_address_sk#13, static_invoke(CharVarcharCodegenUtils.readSidePadding(ca_state#14, 2)) AS ca_state#16]
(11) CometBroadcastExchange
Input [2]: [ca_address_sk#13, ca_state#16]
@@ -137,11 +137,11 @@ ReadSchema: struct
(24) CometFilter
Input [2]: [ca_address_sk#16, ca_state#17]
-Condition : ((staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, ca_state#17, 2, true, false, true) = GA) AND isnotnull(ca_address_sk#16))
+Condition : ((isnotnull(ca_state#17) AND (static_invoke(CharVarcharCodegenUtils.readSidePadding(ca_state#17, 2)) = GA)) AND isnotnull(ca_address_sk#16))
(25) CometProject
Input [2]: [ca_address_sk#16, ca_state#17]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q17/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q17/explain.txt
index 76dec1900a..ff35520d2b 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q17/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q17/explain.txt
@@ -112,12 +112,12 @@ Arguments: [ss_item_sk#1, ss_store_sk#3, ss_quantity#5, ss_sold_date_sk#6, sr_re
Output [2]: [d_date_sk#19, d_quarter_name#20]
Batched: true
Location [not included in comparison]/{warehouse_dir}/date_dim]
-PushedFilters: [IsNotNull(d_date_sk)]
+PushedFilters: [IsNotNull(d_quarter_name), IsNotNull(d_date_sk)]
ReadSchema: struct
(14) CometFilter
Input [2]: [d_date_sk#19, d_quarter_name#20]
-Condition : ((staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, d_quarter_name#20, 6, true, false, true) = 2001Q1) AND isnotnull(d_date_sk#19))
+Condition : ((isnotnull(d_quarter_name#20) AND (static_invoke(CharVarcharCodegenUtils.readSidePadding(d_quarter_name#20, 6)) = 2001Q1)) AND isnotnull(d_date_sk#19))
(15) CometProject
Input [2]: [d_date_sk#19, d_quarter_name#20]
@@ -145,7 +145,7 @@ ReadSchema: struct
(20) CometFilter
Input [2]: [d_date_sk#21, d_quarter_name#22]
-Condition : (staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, d_quarter_name#22, 6, true, false, true) IN (2001Q1,2001Q2,2001Q3) AND isnotnull(d_date_sk#21))
+Condition : (static_invoke(CharVarcharCodegenUtils.readSidePadding(d_quarter_name#22, 6)) IN (2001Q1,2001Q2,2001Q3) AND isnotnull(d_date_sk#21))
(21) CometProject
Input [2]: [d_date_sk#21, d_quarter_name#22]
@@ -189,7 +189,7 @@ Condition : isnotnull(s_store_sk#24)
(30) CometProject
Input [2]: [s_store_sk#24, s_state#25]
-Arguments: [s_store_sk#24, s_state#26], [s_store_sk#24, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, s_state#25, 2, true, false, true) AS s_state#26]
+Arguments: [s_store_sk#24, s_state#26], [s_store_sk#24, static_invoke(CharVarcharCodegenUtils.readSidePadding(s_state#25, 2)) AS s_state#26]
(31) CometBroadcastExchange
Input [2]: [s_store_sk#24, s_state#26]
@@ -217,7 +217,7 @@ Condition : isnotnull(i_item_sk#27)
(36) CometProject
Input [3]: [i_item_sk#27, i_item_id#28, i_item_desc#29]
-Arguments: [i_item_sk#27, i_item_id#30, i_item_desc#29], [i_item_sk#27, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_item_id#28, 16, true, false, true) AS i_item_id#30, i_item_desc#29]
+Arguments: [i_item_sk#27, i_item_id#30, i_item_desc#29], [i_item_sk#27, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_item_id#28, 16)) AS i_item_id#30, i_item_desc#29]
(37) CometBroadcastExchange
Input [3]: [i_item_sk#27, i_item_id#30, i_item_desc#29]
@@ -274,12 +274,12 @@ BroadcastExchange (50)
Output [2]: [d_date_sk#19, d_quarter_name#20]
Batched: true
Location [not included in comparison]/{warehouse_dir}/date_dim]
-PushedFilters: [IsNotNull(d_date_sk)]
+PushedFilters: [IsNotNull(d_quarter_name), IsNotNull(d_date_sk)]
ReadSchema: struct
(47) CometFilter
Input [2]: [d_date_sk#19, d_quarter_name#20]
-Condition : ((staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, d_quarter_name#20, 6, true, false, true) = 2001Q1) AND isnotnull(d_date_sk#19))
+Condition : ((isnotnull(d_quarter_name#20) AND (static_invoke(CharVarcharCodegenUtils.readSidePadding(d_quarter_name#20, 6)) = 2001Q1)) AND isnotnull(d_date_sk#19))
(48) CometProject
Input [2]: [d_date_sk#19, d_quarter_name#20]
@@ -309,7 +309,7 @@ ReadSchema: struct
(52) CometFilter
Input [2]: [d_date_sk#21, d_quarter_name#22]
-Condition : (staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, d_quarter_name#22, 6, true, false, true) IN (2001Q1,2001Q2,2001Q3) AND isnotnull(d_date_sk#21))
+Condition : (static_invoke(CharVarcharCodegenUtils.readSidePadding(d_quarter_name#22, 6)) IN (2001Q1,2001Q2,2001Q3) AND isnotnull(d_date_sk#21))
(53) CometProject
Input [2]: [d_date_sk#21, d_quarter_name#22]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q18/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q18/explain.txt
index 336e4af39b..0d2cf9fdc4 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q18/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q18/explain.txt
@@ -61,12 +61,12 @@ Condition : ((isnotnull(cs_bill_cdemo_sk#2) AND isnotnull(cs_bill_customer_sk#1)
Output [4]: [cd_demo_sk#11, cd_gender#12, cd_education_status#13, cd_dep_count#14]
Batched: true
Location [not included in comparison]/{warehouse_dir}/customer_demographics]
-PushedFilters: [IsNotNull(cd_demo_sk)]
+PushedFilters: [IsNotNull(cd_gender), IsNotNull(cd_education_status), IsNotNull(cd_demo_sk)]
ReadSchema: struct
(4) CometFilter
Input [4]: [cd_demo_sk#11, cd_gender#12, cd_education_status#13, cd_dep_count#14]
-Condition : (((staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, cd_gender#12, 1, true, false, true) = F) AND (staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, cd_education_status#13, 20, true, false, true) = Unknown )) AND isnotnull(cd_demo_sk#11))
+Condition : ((((isnotnull(cd_gender#12) AND isnotnull(cd_education_status#13)) AND (static_invoke(CharVarcharCodegenUtils.readSidePadding(cd_gender#12, 1)) = F)) AND (static_invoke(CharVarcharCodegenUtils.readSidePadding(cd_education_status#13, 20)) = Unknown )) AND isnotnull(cd_demo_sk#11))
(5) CometProject
Input [4]: [cd_demo_sk#11, cd_gender#12, cd_education_status#13, cd_dep_count#14]
@@ -146,11 +146,11 @@ ReadSchema: struct
(21) CometFilter
Input [2]: [ca_address_sk#19, ca_zip#20]
-Condition : (isnotnull(ca_address_sk#19) AND isnotnull(staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, ca_zip#20, 10, true, false, true)))
+Condition : (isnotnull(ca_address_sk#19) AND isnotnull(static_invoke(CharVarcharCodegenUtils.readSidePadding(ca_zip#20, 10))))
(22) CometProject
Input [2]: [ca_address_sk#19, ca_zip#20]
-Arguments: [ca_address_sk#19, ca_zip#21], [ca_address_sk#19, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, ca_zip#20, 10, true, false, true) AS ca_zip#21]
+Arguments: [ca_address_sk#19, ca_zip#21], [ca_address_sk#19, static_invoke(CharVarcharCodegenUtils.readSidePadding(ca_zip#20, 10)) AS ca_zip#21]
(23) CometBroadcastExchange
Input [2]: [ca_address_sk#19, ca_zip#21]
@@ -167,11 +167,11 @@ ReadSchema: struct
(27) CometFilter
Input [2]: [s_store_sk#22, s_zip#23]
-Condition : (isnotnull(staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, s_zip#23, 10, true, false, true)) AND isnotnull(s_store_sk#22))
+Condition : (isnotnull(static_invoke(CharVarcharCodegenUtils.readSidePadding(s_zip#23, 10))) AND isnotnull(s_store_sk#22))
(28) CometProject
Input [2]: [s_store_sk#22, s_zip#23]
-Arguments: [s_store_sk#22, s_zip#24], [s_store_sk#22, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, s_zip#23, 10, true, false, true) AS s_zip#24]
+Arguments: [s_store_sk#22, s_zip#24], [s_store_sk#22, static_invoke(CharVarcharCodegenUtils.readSidePadding(s_zip#23, 10)) AS s_zip#24]
(29) CometBroadcastExchange
Input [2]: [s_store_sk#22, s_zip#24]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q2/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q2/explain.txt
index fe2fcbbaca..3352161822 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q2/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q2/explain.txt
@@ -79,7 +79,7 @@ Condition : (isnotnull(d_date_sk#9) AND isnotnull(d_week_seq#10))
(8) CometProject
Input [3]: [d_date_sk#9, d_week_seq#10, d_day_name#11]
-Arguments: [d_date_sk#9, d_week_seq#10, d_day_name#12], [d_date_sk#9, d_week_seq#10, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, d_day_name#11, 9, true, false, true) AS d_day_name#12]
+Arguments: [d_date_sk#9, d_week_seq#10, d_day_name#12], [d_date_sk#9, d_week_seq#10, static_invoke(CharVarcharCodegenUtils.readSidePadding(d_day_name#11, 9)) AS d_day_name#12]
(9) CometBroadcastExchange
Input [3]: [d_date_sk#9, d_week_seq#10, d_day_name#12]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q20/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q20/explain.txt
index 2f882364a6..7f6589a261 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q20/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q20/explain.txt
@@ -47,11 +47,11 @@ ReadSchema: struct= 0.99)) A
(10) CometProject
Input [3]: [i_item_sk#8, i_item_id#9, i_current_price#10]
-Arguments: [i_item_sk#8, i_item_id#11], [i_item_sk#8, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_item_id#9, 16, true, false, true) AS i_item_id#11]
+Arguments: [i_item_sk#8, i_item_id#11], [i_item_sk#8, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_item_id#9, 16)) AS i_item_id#11]
(11) CometBroadcastExchange
Input [2]: [i_item_sk#8, i_item_id#11]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q22/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q22/explain.txt
index f62bcf1bc4..b34a520bb0 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q22/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q22/explain.txt
@@ -80,7 +80,7 @@ Condition : isnotnull(i_item_sk#8)
(11) CometProject
Input [5]: [i_item_sk#8, i_brand#9, i_class#10, i_category#11, i_product_name#12]
-Arguments: [i_item_sk#8, i_brand#13, i_class#14, i_category#15, i_product_name#16], [i_item_sk#8, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_brand#9, 50, true, false, true) AS i_brand#13, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_class#10, 50, true, false, true) AS i_class#14, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_category#11, 50, true, false, true) AS i_category#15, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_product_name#12, 50, true, false, true) AS i_product_name#16]
+Arguments: [i_item_sk#8, i_brand#13, i_class#14, i_category#15, i_product_name#16], [i_item_sk#8, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_brand#9, 50)) AS i_brand#13, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_class#10, 50)) AS i_class#14, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_category#11, 50)) AS i_category#15, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_product_name#12, 50)) AS i_product_name#16]
(12) CometBroadcastExchange
Input [5]: [i_item_sk#8, i_brand#13, i_class#14, i_category#15, i_product_name#16]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q23b/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q23b/explain.txt
index 18541e65d9..f6eaa0e14b 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q23b/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q23b/explain.txt
@@ -351,7 +351,7 @@ Join type: LeftSemi
Join condition: None
(56) Project [codegen id : 7]
-Output [3]: [c_customer_sk#32, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, c_first_name#33, 20, true, false, true) AS c_first_name#35, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, c_last_name#34, 30, true, false, true) AS c_last_name#36]
+Output [3]: [c_customer_sk#32, static_invoke(CharVarcharCodegenUtils.readSidePadding(c_first_name#33, 20)) AS c_first_name#35, static_invoke(CharVarcharCodegenUtils.readSidePadding(c_last_name#34, 30)) AS c_last_name#36]
Input [3]: [c_customer_sk#32, c_first_name#33, c_last_name#34]
(57) BroadcastExchange
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q24a/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q24a/explain.txt
index 99549dc2ac..56f839112f 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q24a/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q24a/explain.txt
@@ -112,11 +112,11 @@ ReadSchema: struct
(20) CometFilter
Input [6]: [i_item_sk#17, i_current_price#18, i_size#19, i_color#20, i_units#21, i_manager_id#22]
-Condition : ((staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_color#20, 20, true, false, true) = pale ) AND isnotnull(i_item_sk#17))
+Condition : ((isnotnull(i_color#20) AND (static_invoke(CharVarcharCodegenUtils.readSidePadding(i_color#20, 20)) = pale )) AND isnotnull(i_item_sk#17))
(21) CometProject
Input [6]: [i_item_sk#17, i_current_price#18, i_size#19, i_color#20, i_units#21, i_manager_id#22]
-Arguments: [i_item_sk#17, i_current_price#18, i_size#23, i_color#24, i_units#25, i_manager_id#22], [i_item_sk#17, i_current_price#18, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_size#19, 20, true, false, true) AS i_size#23, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_color#20, 20, true, false, true) AS i_color#24, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_units#21, 10, true, false, true) AS i_units#25, i_manager_id#22]
+Arguments: [i_item_sk#17, i_current_price#18, i_size#23, i_color#24, i_units#25, i_manager_id#22], [i_item_sk#17, i_current_price#18, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_size#19, 20)) AS i_size#23, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_color#20, 20)) AS i_color#24, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_units#21, 10)) AS i_units#25, i_manager_id#22]
(22) CometBroadcastExchange
Input [6]: [i_item_sk#17, i_current_price#18, i_size#23, i_color#24, i_units#25, i_manager_id#22]
@@ -172,7 +172,7 @@ Condition : (isnotnull(c_customer_sk#26) AND isnotnull(c_birth_country#29))
(27) CometProject
Input [4]: [c_customer_sk#26, c_first_name#27, c_last_name#28, c_birth_country#29]
-Arguments: [c_customer_sk#26, c_first_name#30, c_last_name#31, c_birth_country#29], [c_customer_sk#26, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, c_first_name#27, 20, true, false, true) AS c_first_name#30, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, c_last_name#28, 30, true, false, true) AS c_last_name#31, c_birth_country#29]
+Arguments: [c_customer_sk#26, c_first_name#30, c_last_name#31, c_birth_country#29], [c_customer_sk#26, static_invoke(CharVarcharCodegenUtils.readSidePadding(c_first_name#27, 20)) AS c_first_name#30, static_invoke(CharVarcharCodegenUtils.readSidePadding(c_last_name#28, 30)) AS c_last_name#31, c_birth_country#29]
(28) CometBroadcastExchange
Input [4]: [c_customer_sk#26, c_first_name#30, c_last_name#31, c_birth_country#29]
@@ -199,11 +199,11 @@ ReadSchema: struct
(33) CometFilter
Input [3]: [ca_state#32, ca_zip#33, ca_country#34]
-Condition : (isnotnull(ca_country#34) AND isnotnull(staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, ca_zip#33, 10, true, false, true)))
+Condition : (isnotnull(ca_country#34) AND isnotnull(static_invoke(CharVarcharCodegenUtils.readSidePadding(ca_zip#33, 10))))
(34) CometProject
Input [3]: [ca_state#32, ca_zip#33, ca_country#34]
-Arguments: [ca_state#35, ca_zip#36, ca_country#34], [staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, ca_state#32, 2, true, false, true) AS ca_state#35, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, ca_zip#33, 10, true, false, true) AS ca_zip#36, ca_country#34]
+Arguments: [ca_state#35, ca_zip#36, ca_country#34], [static_invoke(CharVarcharCodegenUtils.readSidePadding(ca_state#32, 2)) AS ca_state#35, static_invoke(CharVarcharCodegenUtils.readSidePadding(ca_zip#33, 10)) AS ca_zip#36, ca_country#34]
(35) CometColumnarToRow [codegen id : 1]
Input [3]: [ca_state#35, ca_zip#36, ca_country#34]
@@ -351,7 +351,7 @@ Condition : isnotnull(i_item_sk#60)
(59) CometProject
Input [6]: [i_item_sk#60, i_current_price#61, i_size#62, i_color#63, i_units#64, i_manager_id#65]
-Arguments: [i_item_sk#60, i_current_price#61, i_size#66, i_color#67, i_units#68, i_manager_id#65], [i_item_sk#60, i_current_price#61, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_size#62, 20, true, false, true) AS i_size#66, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_color#63, 20, true, false, true) AS i_color#67, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_units#64, 10, true, false, true) AS i_units#68, i_manager_id#65]
+Arguments: [i_item_sk#60, i_current_price#61, i_size#66, i_color#67, i_units#68, i_manager_id#65], [i_item_sk#60, i_current_price#61, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_size#62, 20)) AS i_size#66, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_color#63, 20)) AS i_color#67, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_units#64, 10)) AS i_units#68, i_manager_id#65]
(60) CometBroadcastExchange
Input [6]: [i_item_sk#60, i_current_price#61, i_size#66, i_color#67, i_units#68, i_manager_id#65]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q24b/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q24b/explain.txt
index 55a94e03a4..2b9944725c 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q24b/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q24b/explain.txt
@@ -112,11 +112,11 @@ ReadSchema: struct
(20) CometFilter
Input [6]: [i_item_sk#17, i_current_price#18, i_size#19, i_color#20, i_units#21, i_manager_id#22]
-Condition : ((staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_color#20, 20, true, false, true) = chiffon ) AND isnotnull(i_item_sk#17))
+Condition : ((isnotnull(i_color#20) AND (static_invoke(CharVarcharCodegenUtils.readSidePadding(i_color#20, 20)) = chiffon )) AND isnotnull(i_item_sk#17))
(21) CometProject
Input [6]: [i_item_sk#17, i_current_price#18, i_size#19, i_color#20, i_units#21, i_manager_id#22]
-Arguments: [i_item_sk#17, i_current_price#18, i_size#23, i_color#24, i_units#25, i_manager_id#22], [i_item_sk#17, i_current_price#18, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_size#19, 20, true, false, true) AS i_size#23, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_color#20, 20, true, false, true) AS i_color#24, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_units#21, 10, true, false, true) AS i_units#25, i_manager_id#22]
+Arguments: [i_item_sk#17, i_current_price#18, i_size#23, i_color#24, i_units#25, i_manager_id#22], [i_item_sk#17, i_current_price#18, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_size#19, 20)) AS i_size#23, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_color#20, 20)) AS i_color#24, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_units#21, 10)) AS i_units#25, i_manager_id#22]
(22) CometBroadcastExchange
Input [6]: [i_item_sk#17, i_current_price#18, i_size#23, i_color#24, i_units#25, i_manager_id#22]
@@ -172,7 +172,7 @@ Condition : (isnotnull(c_customer_sk#26) AND isnotnull(c_birth_country#29))
(27) CometProject
Input [4]: [c_customer_sk#26, c_first_name#27, c_last_name#28, c_birth_country#29]
-Arguments: [c_customer_sk#26, c_first_name#30, c_last_name#31, c_birth_country#29], [c_customer_sk#26, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, c_first_name#27, 20, true, false, true) AS c_first_name#30, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, c_last_name#28, 30, true, false, true) AS c_last_name#31, c_birth_country#29]
+Arguments: [c_customer_sk#26, c_first_name#30, c_last_name#31, c_birth_country#29], [c_customer_sk#26, static_invoke(CharVarcharCodegenUtils.readSidePadding(c_first_name#27, 20)) AS c_first_name#30, static_invoke(CharVarcharCodegenUtils.readSidePadding(c_last_name#28, 30)) AS c_last_name#31, c_birth_country#29]
(28) CometBroadcastExchange
Input [4]: [c_customer_sk#26, c_first_name#30, c_last_name#31, c_birth_country#29]
@@ -199,11 +199,11 @@ ReadSchema: struct
(33) CometFilter
Input [3]: [ca_state#32, ca_zip#33, ca_country#34]
-Condition : (isnotnull(ca_country#34) AND isnotnull(staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, ca_zip#33, 10, true, false, true)))
+Condition : (isnotnull(ca_country#34) AND isnotnull(static_invoke(CharVarcharCodegenUtils.readSidePadding(ca_zip#33, 10))))
(34) CometProject
Input [3]: [ca_state#32, ca_zip#33, ca_country#34]
-Arguments: [ca_state#35, ca_zip#36, ca_country#34], [staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, ca_state#32, 2, true, false, true) AS ca_state#35, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, ca_zip#33, 10, true, false, true) AS ca_zip#36, ca_country#34]
+Arguments: [ca_state#35, ca_zip#36, ca_country#34], [static_invoke(CharVarcharCodegenUtils.readSidePadding(ca_state#32, 2)) AS ca_state#35, static_invoke(CharVarcharCodegenUtils.readSidePadding(ca_zip#33, 10)) AS ca_zip#36, ca_country#34]
(35) CometColumnarToRow [codegen id : 1]
Input [3]: [ca_state#35, ca_zip#36, ca_country#34]
@@ -351,7 +351,7 @@ Condition : isnotnull(i_item_sk#60)
(59) CometProject
Input [6]: [i_item_sk#60, i_current_price#61, i_size#62, i_color#63, i_units#64, i_manager_id#65]
-Arguments: [i_item_sk#60, i_current_price#61, i_size#66, i_color#67, i_units#68, i_manager_id#65], [i_item_sk#60, i_current_price#61, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_size#62, 20, true, false, true) AS i_size#66, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_color#63, 20, true, false, true) AS i_color#67, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_units#64, 10, true, false, true) AS i_units#68, i_manager_id#65]
+Arguments: [i_item_sk#60, i_current_price#61, i_size#66, i_color#67, i_units#68, i_manager_id#65], [i_item_sk#60, i_current_price#61, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_size#62, 20)) AS i_size#66, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_color#63, 20)) AS i_color#67, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_units#64, 10)) AS i_units#68, i_manager_id#65]
(60) CometBroadcastExchange
Input [6]: [i_item_sk#60, i_current_price#61, i_size#66, i_color#67, i_units#68, i_manager_id#65]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q25/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q25/explain.txt
index dfeb1fee37..a5202da7fd 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q25/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q25/explain.txt
@@ -189,7 +189,7 @@ Condition : isnotnull(s_store_sk#26)
(30) CometProject
Input [3]: [s_store_sk#26, s_store_id#27, s_store_name#28]
-Arguments: [s_store_sk#26, s_store_id#29, s_store_name#28], [s_store_sk#26, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, s_store_id#27, 16, true, false, true) AS s_store_id#29, s_store_name#28]
+Arguments: [s_store_sk#26, s_store_id#29, s_store_name#28], [s_store_sk#26, static_invoke(CharVarcharCodegenUtils.readSidePadding(s_store_id#27, 16)) AS s_store_id#29, s_store_name#28]
(31) CometBroadcastExchange
Input [3]: [s_store_sk#26, s_store_id#29, s_store_name#28]
@@ -217,7 +217,7 @@ Condition : isnotnull(i_item_sk#30)
(36) CometProject
Input [3]: [i_item_sk#30, i_item_id#31, i_item_desc#32]
-Arguments: [i_item_sk#30, i_item_id#33, i_item_desc#32], [i_item_sk#30, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_item_id#31, 16, true, false, true) AS i_item_id#33, i_item_desc#32]
+Arguments: [i_item_sk#30, i_item_id#33, i_item_desc#32], [i_item_sk#30, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_item_id#31, 16)) AS i_item_id#33, i_item_desc#32]
(37) CometBroadcastExchange
Input [3]: [i_item_sk#30, i_item_id#33, i_item_desc#32]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q26/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q26/explain.txt
index cd6239a955..040bf15468 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q26/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q26/explain.txt
@@ -49,12 +49,12 @@ Condition : ((isnotnull(cs_bill_cdemo_sk#1) AND isnotnull(cs_item_sk#2)) AND isn
Output [4]: [cd_demo_sk#10, cd_gender#11, cd_marital_status#12, cd_education_status#13]
Batched: true
Location [not included in comparison]/{warehouse_dir}/customer_demographics]
-PushedFilters: [IsNotNull(cd_demo_sk)]
+PushedFilters: [IsNotNull(cd_gender), IsNotNull(cd_marital_status), IsNotNull(cd_education_status), IsNotNull(cd_demo_sk)]
ReadSchema: struct
(4) CometFilter
Input [4]: [cd_demo_sk#10, cd_gender#11, cd_marital_status#12, cd_education_status#13]
-Condition : ((((staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, cd_gender#11, 1, true, false, true) = M) AND (staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, cd_marital_status#12, 1, true, false, true) = S)) AND (staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, cd_education_status#13, 20, true, false, true) = College )) AND isnotnull(cd_demo_sk#10))
+Condition : ((((((isnotnull(cd_gender#11) AND isnotnull(cd_marital_status#12)) AND isnotnull(cd_education_status#13)) AND (static_invoke(CharVarcharCodegenUtils.readSidePadding(cd_gender#11, 1)) = M)) AND (static_invoke(CharVarcharCodegenUtils.readSidePadding(cd_marital_status#12, 1)) = S)) AND (static_invoke(CharVarcharCodegenUtils.readSidePadding(cd_education_status#13, 20)) = College )) AND isnotnull(cd_demo_sk#10))
(5) CometProject
Input [4]: [cd_demo_sk#10, cd_gender#11, cd_marital_status#12, cd_education_status#13]
@@ -114,7 +114,7 @@ Condition : isnotnull(i_item_sk#16)
(17) CometProject
Input [2]: [i_item_sk#16, i_item_id#17]
-Arguments: [i_item_sk#16, i_item_id#18], [i_item_sk#16, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_item_id#17, 16, true, false, true) AS i_item_id#18]
+Arguments: [i_item_sk#16, i_item_id#18], [i_item_sk#16, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_item_id#17, 16)) AS i_item_id#18]
(18) CometBroadcastExchange
Input [2]: [i_item_sk#16, i_item_id#18]
@@ -138,7 +138,7 @@ ReadSchema: struct
(22) CometFilter
Input [3]: [p_promo_sk#19, p_channel_email#20, p_channel_event#21]
-Condition : (((staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, p_channel_email#20, 1, true, false, true) = N) OR (staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, p_channel_event#21, 1, true, false, true) = N)) AND isnotnull(p_promo_sk#19))
+Condition : (((static_invoke(CharVarcharCodegenUtils.readSidePadding(p_channel_email#20, 1)) = N) OR (static_invoke(CharVarcharCodegenUtils.readSidePadding(p_channel_event#21, 1)) = N)) AND isnotnull(p_promo_sk#19))
(23) CometProject
Input [3]: [p_promo_sk#19, p_channel_email#20, p_channel_event#21]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q27/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q27/explain.txt
index 3c9a97d54e..36482430cd 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q27/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q27/explain.txt
@@ -50,12 +50,12 @@ Condition : ((isnotnull(ss_cdemo_sk#2) AND isnotnull(ss_store_sk#3)) AND isnotnu
Output [4]: [cd_demo_sk#10, cd_gender#11, cd_marital_status#12, cd_education_status#13]
Batched: true
Location [not included in comparison]/{warehouse_dir}/customer_demographics]
-PushedFilters: [IsNotNull(cd_demo_sk)]
+PushedFilters: [IsNotNull(cd_gender), IsNotNull(cd_marital_status), IsNotNull(cd_education_status), IsNotNull(cd_demo_sk)]
ReadSchema: struct
(4) CometFilter
Input [4]: [cd_demo_sk#10, cd_gender#11, cd_marital_status#12, cd_education_status#13]
-Condition : ((((staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, cd_gender#11, 1, true, false, true) = M) AND (staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, cd_marital_status#12, 1, true, false, true) = S)) AND (staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, cd_education_status#13, 20, true, false, true) = College )) AND isnotnull(cd_demo_sk#10))
+Condition : ((((((isnotnull(cd_gender#11) AND isnotnull(cd_marital_status#12)) AND isnotnull(cd_education_status#13)) AND (static_invoke(CharVarcharCodegenUtils.readSidePadding(cd_gender#11, 1)) = M)) AND (static_invoke(CharVarcharCodegenUtils.readSidePadding(cd_marital_status#12, 1)) = S)) AND (static_invoke(CharVarcharCodegenUtils.readSidePadding(cd_education_status#13, 20)) = College )) AND isnotnull(cd_demo_sk#10))
(5) CometProject
Input [4]: [cd_demo_sk#10, cd_gender#11, cd_marital_status#12, cd_education_status#13]
@@ -106,16 +106,16 @@ Arguments: [ss_item_sk#1, ss_store_sk#3, ss_quantity#4, ss_list_price#5, ss_sale
Output [2]: [s_store_sk#16, s_state#17]
Batched: true
Location [not included in comparison]/{warehouse_dir}/store]
-PushedFilters: [IsNotNull(s_store_sk)]
+PushedFilters: [IsNotNull(s_state), IsNotNull(s_store_sk)]
ReadSchema: struct
(16) CometFilter
Input [2]: [s_store_sk#16, s_state#17]
-Condition : ((staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, s_state#17, 2, true, false, true) = TN) AND isnotnull(s_store_sk#16))
+Condition : ((isnotnull(s_state#17) AND (static_invoke(CharVarcharCodegenUtils.readSidePadding(s_state#17, 2)) = TN)) AND isnotnull(s_store_sk#16))
(17) CometProject
Input [2]: [s_store_sk#16, s_state#17]
-Arguments: [s_store_sk#16, s_state#18], [s_store_sk#16, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, s_state#17, 2, true, false, true) AS s_state#18]
+Arguments: [s_store_sk#16, s_state#18], [s_store_sk#16, static_invoke(CharVarcharCodegenUtils.readSidePadding(s_state#17, 2)) AS s_state#18]
(18) CometBroadcastExchange
Input [2]: [s_store_sk#16, s_state#18]
@@ -143,7 +143,7 @@ Condition : isnotnull(i_item_sk#19)
(23) CometProject
Input [2]: [i_item_sk#19, i_item_id#20]
-Arguments: [i_item_sk#19, i_item_id#21], [i_item_sk#19, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_item_id#20, 16, true, false, true) AS i_item_id#21]
+Arguments: [i_item_sk#19, i_item_id#21], [i_item_sk#19, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_item_id#20, 16)) AS i_item_id#21]
(24) CometBroadcastExchange
Input [2]: [i_item_sk#19, i_item_id#21]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q29/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q29/explain.txt
index 4c9735860e..bc2bc01bc1 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q29/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q29/explain.txt
@@ -208,7 +208,7 @@ Condition : isnotnull(s_store_sk#27)
(33) CometProject
Input [3]: [s_store_sk#27, s_store_id#28, s_store_name#29]
-Arguments: [s_store_sk#27, s_store_id#30, s_store_name#29], [s_store_sk#27, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, s_store_id#28, 16, true, false, true) AS s_store_id#30, s_store_name#29]
+Arguments: [s_store_sk#27, s_store_id#30, s_store_name#29], [s_store_sk#27, static_invoke(CharVarcharCodegenUtils.readSidePadding(s_store_id#28, 16)) AS s_store_id#30, s_store_name#29]
(34) CometBroadcastExchange
Input [3]: [s_store_sk#27, s_store_id#30, s_store_name#29]
@@ -236,7 +236,7 @@ Condition : isnotnull(i_item_sk#31)
(39) CometProject
Input [3]: [i_item_sk#31, i_item_id#32, i_item_desc#33]
-Arguments: [i_item_sk#31, i_item_id#34, i_item_desc#33], [i_item_sk#31, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_item_id#32, 16, true, false, true) AS i_item_id#34, i_item_desc#33]
+Arguments: [i_item_sk#31, i_item_id#34, i_item_desc#33], [i_item_sk#31, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_item_id#32, 16)) AS i_item_id#34, i_item_desc#33]
(40) CometBroadcastExchange
Input [3]: [i_item_sk#31, i_item_id#34, i_item_desc#33]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q3/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q3/explain.txt
index df6779ceb1..f540f6fcd2 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q3/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q3/explain.txt
@@ -74,7 +74,7 @@ Condition : ((isnotnull(i_manufact_id#10) AND (i_manufact_id#10 = 128)) AND isno
(11) CometProject
Input [4]: [i_item_sk#7, i_brand_id#8, i_brand#9, i_manufact_id#10]
-Arguments: [i_item_sk#7, i_brand_id#8, i_brand#11], [i_item_sk#7, i_brand_id#8, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_brand#9, 50, true, false, true) AS i_brand#11]
+Arguments: [i_item_sk#7, i_brand_id#8, i_brand#11], [i_item_sk#7, i_brand_id#8, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_brand#9, 50)) AS i_brand#11]
(12) CometBroadcastExchange
Input [3]: [i_item_sk#7, i_brand_id#8, i_brand#11]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q30/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q30/explain.txt
index 122cdd61d0..0b191d0079 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q30/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q30/explain.txt
@@ -106,11 +106,11 @@ ReadSchema: struct
(10) CometFilter
Input [2]: [ca_address_sk#8, ca_state#9]
-Condition : (isnotnull(ca_address_sk#8) AND isnotnull(staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, ca_state#9, 2, true, false, true)))
+Condition : (isnotnull(ca_address_sk#8) AND isnotnull(static_invoke(CharVarcharCodegenUtils.readSidePadding(ca_state#9, 2))))
(11) CometProject
Input [2]: [ca_address_sk#8, ca_state#9]
-Arguments: [ca_address_sk#8, ca_state#10], [ca_address_sk#8, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, ca_state#9, 2, true, false, true) AS ca_state#10]
+Arguments: [ca_address_sk#8, ca_state#10], [ca_address_sk#8, static_invoke(CharVarcharCodegenUtils.readSidePadding(ca_state#9, 2)) AS ca_state#10]
(12) CometBroadcastExchange
Input [2]: [ca_address_sk#8, ca_state#10]
@@ -265,7 +265,7 @@ Condition : (isnotnull(c_customer_sk#35) AND isnotnull(c_current_addr_sk#37))
(44) CometProject
Input [14]: [c_customer_sk#35, c_customer_id#36, c_current_addr_sk#37, c_salutation#38, c_first_name#39, c_last_name#40, c_preferred_cust_flag#41, c_birth_day#42, c_birth_month#43, c_birth_year#44, c_birth_country#45, c_login#46, c_email_address#47, c_last_review_date#48]
-Arguments: [c_customer_sk#35, c_customer_id#49, c_current_addr_sk#37, c_salutation#50, c_first_name#51, c_last_name#52, c_preferred_cust_flag#53, c_birth_day#42, c_birth_month#43, c_birth_year#44, c_birth_country#45, c_login#54, c_email_address#55, c_last_review_date#48], [c_customer_sk#35, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, c_customer_id#36, 16, true, false, true) AS c_customer_id#49, c_current_addr_sk#37, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, c_salutation#38, 10, true, false, true) AS c_salutation#50, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, c_first_name#39, 20, true, false, true) AS c_first_name#51, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, c_last_name#40, 30, true, false, true) AS c_last_name#52, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, c_preferred_cust_flag#41, 1, true, false, true) AS c_preferred_cust_flag#53, c_birth_day#42, c_birth_month#43, c_birth_year#44, c_birth_country#45, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, c_login#46, 13, true, false, true) AS c_login#54, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, c_email_address#47, 50, true, false, true) AS c_email_address#55, c_last_review_date#48]
+Arguments: [c_customer_sk#35, c_customer_id#49, c_current_addr_sk#37, c_salutation#50, c_first_name#51, c_last_name#52, c_preferred_cust_flag#53, c_birth_day#42, c_birth_month#43, c_birth_year#44, c_birth_country#45, c_login#54, c_email_address#55, c_last_review_date#48], [c_customer_sk#35, static_invoke(CharVarcharCodegenUtils.readSidePadding(c_customer_id#36, 16)) AS c_customer_id#49, c_current_addr_sk#37, static_invoke(CharVarcharCodegenUtils.readSidePadding(c_salutation#38, 10)) AS c_salutation#50, static_invoke(CharVarcharCodegenUtils.readSidePadding(c_first_name#39, 20)) AS c_first_name#51, static_invoke(CharVarcharCodegenUtils.readSidePadding(c_last_name#40, 30)) AS c_last_name#52, static_invoke(CharVarcharCodegenUtils.readSidePadding(c_preferred_cust_flag#41, 1)) AS c_preferred_cust_flag#53, c_birth_day#42, c_birth_month#43, c_birth_year#44, c_birth_country#45, static_invoke(CharVarcharCodegenUtils.readSidePadding(c_login#46, 13)) AS c_login#54, static_invoke(CharVarcharCodegenUtils.readSidePadding(c_email_address#47, 50)) AS c_email_address#55, c_last_review_date#48]
(45) CometColumnarToRow [codegen id : 5]
Input [14]: [c_customer_sk#35, c_customer_id#49, c_current_addr_sk#37, c_salutation#50, c_first_name#51, c_last_name#52, c_preferred_cust_flag#53, c_birth_day#42, c_birth_month#43, c_birth_year#44, c_birth_country#45, c_login#54, c_email_address#55, c_last_review_date#48]
@@ -288,12 +288,12 @@ Input [16]: [ctr_customer_sk#14, ctr_total_return#16, c_customer_sk#35, c_custom
Output [2]: [ca_address_sk#56, ca_state#57]
Batched: true
Location [not included in comparison]/{warehouse_dir}/customer_address]
-PushedFilters: [IsNotNull(ca_address_sk)]
+PushedFilters: [IsNotNull(ca_state), IsNotNull(ca_address_sk)]
ReadSchema: struct
(50) CometFilter
Input [2]: [ca_address_sk#56, ca_state#57]
-Condition : ((staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, ca_state#57, 2, true, false, true) = GA) AND isnotnull(ca_address_sk#56))
+Condition : ((isnotnull(ca_state#57) AND (static_invoke(CharVarcharCodegenUtils.readSidePadding(ca_state#57, 2)) = GA)) AND isnotnull(ca_address_sk#56))
(51) CometProject
Input [2]: [ca_address_sk#56, ca_state#57]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q33/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q33/explain.txt
index ef4f99ceae..9f72b04119 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q33/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q33/explain.txt
@@ -151,11 +151,12 @@ Condition : isnotnull(i_item_sk#11)
Output [2]: [i_category#13, i_manufact_id#14]
Batched: true
Location [not included in comparison]/{warehouse_dir}/item]
+PushedFilters: [IsNotNull(i_category)]
ReadSchema: struct
(18) CometFilter
Input [2]: [i_category#13, i_manufact_id#14]
-Condition : (staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_category#13, 50, true, false, true) = Electronics )
+Condition : (isnotnull(i_category#13) AND (static_invoke(CharVarcharCodegenUtils.readSidePadding(i_category#13, 50)) = Electronics ))
(19) CometProject
Input [2]: [i_category#13, i_manufact_id#14]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q34/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q34/explain.txt
index 13934b6c66..933488f2b7 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q34/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q34/explain.txt
@@ -111,7 +111,7 @@ ReadSchema: struct10000 ) OR (staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, hd_buy_potential#13, 15, true, false, true) = unknown ))) AND (hd_vehicle_count#15 > 0)) AND CASE WHEN (hd_vehicle_count#15 > 0) THEN (knownfloatingpointnormalized(normalizenanandzero((cast(hd_dep_count#14 as double) / knownfloatingpointnormalized(normalizenanandzero(cast(hd_vehicle_count#15 as double)))))) > 1.2) END) AND isnotnull(hd_demo_sk#12))
+Condition : ((((isnotnull(hd_vehicle_count#15) AND ((static_invoke(CharVarcharCodegenUtils.readSidePadding(hd_buy_potential#13, 15)) = >10000 ) OR (static_invoke(CharVarcharCodegenUtils.readSidePadding(hd_buy_potential#13, 15)) = unknown ))) AND (hd_vehicle_count#15 > 0)) AND CASE WHEN (hd_vehicle_count#15 > 0) THEN (knownfloatingpointnormalized(normalizenanandzero((cast(hd_dep_count#14 as double) / knownfloatingpointnormalized(normalizenanandzero(cast(hd_vehicle_count#15 as double)))))) > 1.2) END) AND isnotnull(hd_demo_sk#12))
(17) CometProject
Input [4]: [hd_demo_sk#12, hd_buy_potential#13, hd_dep_count#14, hd_vehicle_count#15]
@@ -161,7 +161,7 @@ Condition : isnotnull(c_customer_sk#18)
(27) CometProject
Input [5]: [c_customer_sk#18, c_salutation#19, c_first_name#20, c_last_name#21, c_preferred_cust_flag#22]
-Arguments: [c_customer_sk#18, c_salutation#23, c_first_name#24, c_last_name#25, c_preferred_cust_flag#26], [c_customer_sk#18, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, c_salutation#19, 10, true, false, true) AS c_salutation#23, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, c_first_name#20, 20, true, false, true) AS c_first_name#24, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, c_last_name#21, 30, true, false, true) AS c_last_name#25, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, c_preferred_cust_flag#22, 1, true, false, true) AS c_preferred_cust_flag#26]
+Arguments: [c_customer_sk#18, c_salutation#23, c_first_name#24, c_last_name#25, c_preferred_cust_flag#26], [c_customer_sk#18, static_invoke(CharVarcharCodegenUtils.readSidePadding(c_salutation#19, 10)) AS c_salutation#23, static_invoke(CharVarcharCodegenUtils.readSidePadding(c_first_name#20, 20)) AS c_first_name#24, static_invoke(CharVarcharCodegenUtils.readSidePadding(c_last_name#21, 30)) AS c_last_name#25, static_invoke(CharVarcharCodegenUtils.readSidePadding(c_preferred_cust_flag#22, 1)) AS c_preferred_cust_flag#26]
(28) CometBroadcastExchange
Input [5]: [c_customer_sk#18, c_salutation#23, c_first_name#24, c_last_name#25, c_preferred_cust_flag#26]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q35/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q35/explain.txt
index 0587acad05..d74ec55bc0 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q35/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q35/explain.txt
@@ -191,7 +191,7 @@ Condition : isnotnull(ca_address_sk#20)
(31) CometProject
Input [2]: [ca_address_sk#20, ca_state#21]
-Arguments: [ca_address_sk#20, ca_state#22], [ca_address_sk#20, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, ca_state#21, 2, true, false, true) AS ca_state#22]
+Arguments: [ca_address_sk#20, ca_state#22], [ca_address_sk#20, static_invoke(CharVarcharCodegenUtils.readSidePadding(ca_state#21, 2)) AS ca_state#22]
(32) CometColumnarToRow [codegen id : 3]
Input [2]: [ca_address_sk#20, ca_state#22]
@@ -223,7 +223,7 @@ Condition : isnotnull(cd_demo_sk#23)
(38) CometProject
Input [6]: [cd_demo_sk#23, cd_gender#24, cd_marital_status#25, cd_dep_count#26, cd_dep_employed_count#27, cd_dep_college_count#28]
-Arguments: [cd_demo_sk#23, cd_gender#29, cd_marital_status#30, cd_dep_count#26, cd_dep_employed_count#27, cd_dep_college_count#28], [cd_demo_sk#23, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, cd_gender#24, 1, true, false, true) AS cd_gender#29, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, cd_marital_status#25, 1, true, false, true) AS cd_marital_status#30, cd_dep_count#26, cd_dep_employed_count#27, cd_dep_college_count#28]
+Arguments: [cd_demo_sk#23, cd_gender#29, cd_marital_status#30, cd_dep_count#26, cd_dep_employed_count#27, cd_dep_college_count#28], [cd_demo_sk#23, static_invoke(CharVarcharCodegenUtils.readSidePadding(cd_gender#24, 1)) AS cd_gender#29, static_invoke(CharVarcharCodegenUtils.readSidePadding(cd_marital_status#25, 1)) AS cd_marital_status#30, cd_dep_count#26, cd_dep_employed_count#27, cd_dep_college_count#28]
(39) CometColumnarToRow [codegen id : 4]
Input [6]: [cd_demo_sk#23, cd_gender#29, cd_marital_status#30, cd_dep_count#26, cd_dep_employed_count#27, cd_dep_college_count#28]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q36/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q36/explain.txt
index c29eb227ce..880f2c745d 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q36/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q36/explain.txt
@@ -86,7 +86,7 @@ Condition : isnotnull(i_item_sk#9)
(11) CometProject
Input [3]: [i_item_sk#9, i_class#10, i_category#11]
-Arguments: [i_item_sk#9, i_class#12, i_category#13], [i_item_sk#9, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_class#10, 50, true, false, true) AS i_class#12, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_category#11, 50, true, false, true) AS i_category#13]
+Arguments: [i_item_sk#9, i_class#12, i_category#13], [i_item_sk#9, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_class#10, 50)) AS i_class#12, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_category#11, 50)) AS i_category#13]
(12) CometBroadcastExchange
Input [3]: [i_item_sk#9, i_class#12, i_category#13]
@@ -105,12 +105,12 @@ Arguments: [ss_store_sk#2, ss_ext_sales_price#3, ss_net_profit#4, i_class#12, i_
Output [2]: [s_store_sk#14, s_state#15]
Batched: true
Location [not included in comparison]/{warehouse_dir}/store]
-PushedFilters: [IsNotNull(s_store_sk)]
+PushedFilters: [IsNotNull(s_state), IsNotNull(s_store_sk)]
ReadSchema: struct
(16) CometFilter
Input [2]: [s_store_sk#14, s_state#15]
-Condition : ((staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, s_state#15, 2, true, false, true) = TN) AND isnotnull(s_store_sk#14))
+Condition : ((isnotnull(s_state#15) AND (static_invoke(CharVarcharCodegenUtils.readSidePadding(s_state#15, 2)) = TN)) AND isnotnull(s_store_sk#14))
(17) CometProject
Input [2]: [s_store_sk#14, s_state#15]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q37/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q37/explain.txt
index 3471befeca..3b5a2111e3 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q37/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q37/explain.txt
@@ -40,7 +40,7 @@ Condition : ((((isnotnull(i_current_price#4) AND (i_current_price#4 >= 68.00)) A
(3) CometProject
Input [5]: [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4, i_manufact_id#5]
-Arguments: [i_item_sk#1, i_item_id#6, i_item_desc#3, i_current_price#4], [i_item_sk#1, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_item_id#2, 16, true, false, true) AS i_item_id#6, i_item_desc#3, i_current_price#4]
+Arguments: [i_item_sk#1, i_item_id#6, i_item_desc#3, i_current_price#4], [i_item_sk#1, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_item_id#2, 16)) AS i_item_id#6, i_item_desc#3, i_current_price#4]
(4) CometScan parquet spark_catalog.default.inventory
Output [3]: [inv_item_sk#7, inv_quantity_on_hand#8, inv_date_sk#9]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q38/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q38/explain.txt
index a6a7d781fb..79fbdd589e 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q38/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q38/explain.txt
@@ -102,7 +102,7 @@ Condition : isnotnull(c_customer_sk#7)
(11) CometProject
Input [3]: [c_customer_sk#7, c_first_name#8, c_last_name#9]
-Arguments: [c_customer_sk#7, c_first_name#10, c_last_name#11], [c_customer_sk#7, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, c_first_name#8, 20, true, false, true) AS c_first_name#10, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, c_last_name#9, 30, true, false, true) AS c_last_name#11]
+Arguments: [c_customer_sk#7, c_first_name#10, c_last_name#11], [c_customer_sk#7, static_invoke(CharVarcharCodegenUtils.readSidePadding(c_first_name#8, 20)) AS c_first_name#10, static_invoke(CharVarcharCodegenUtils.readSidePadding(c_last_name#9, 30)) AS c_last_name#11]
(12) CometBroadcastExchange
Input [3]: [c_customer_sk#7, c_first_name#10, c_last_name#11]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q4/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q4/explain.txt
index c5fcf23458..bbe2c4f231 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q4/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q4/explain.txt
@@ -128,11 +128,11 @@ ReadSchema: struct= 0.99)) A
(20) CometProject
Input [3]: [i_item_sk#14, i_item_id#15, i_current_price#16]
-Arguments: [i_item_sk#14, i_item_id#17], [i_item_sk#14, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_item_id#15, 16, true, false, true) AS i_item_id#17]
+Arguments: [i_item_sk#14, i_item_id#17], [i_item_sk#14, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_item_id#15, 16)) AS i_item_id#17]
(21) CometBroadcastExchange
Input [2]: [i_item_sk#14, i_item_id#17]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q41/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q41/explain.txt
index 1c46adf592..022c10e9b5 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q41/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q41/explain.txt
@@ -24,12 +24,12 @@
Output [3]: [i_manufact_id#1, i_manufact#2, i_product_name#3]
Batched: true
Location [not included in comparison]/{warehouse_dir}/item]
-PushedFilters: [IsNotNull(i_manufact_id), GreaterThanOrEqual(i_manufact_id,738), LessThanOrEqual(i_manufact_id,778)]
+PushedFilters: [IsNotNull(i_manufact_id), GreaterThanOrEqual(i_manufact_id,738), LessThanOrEqual(i_manufact_id,778), IsNotNull(i_manufact)]
ReadSchema: struct
(2) CometFilter
Input [3]: [i_manufact_id#1, i_manufact#2, i_product_name#3]
-Condition : ((isnotnull(i_manufact_id#1) AND (i_manufact_id#1 >= 738)) AND (i_manufact_id#1 <= 778))
+Condition : (((isnotnull(i_manufact_id#1) AND (i_manufact_id#1 >= 738)) AND (i_manufact_id#1 <= 778)) AND isnotnull(i_manufact#2))
(3) CometProject
Input [3]: [i_manufact_id#1, i_manufact#2, i_product_name#3]
@@ -43,11 +43,11 @@ ReadSchema: struct
(2) CometFilter
Input [3]: [i_item_sk#1, i_brand#2, i_category#3]
-Condition : ((isnotnull(i_item_sk#1) AND isnotnull(staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_category#3, 50, true, false, true))) AND isnotnull(staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_brand#2, 50, true, false, true)))
+Condition : ((isnotnull(i_item_sk#1) AND isnotnull(static_invoke(CharVarcharCodegenUtils.readSidePadding(i_category#3, 50)))) AND isnotnull(static_invoke(CharVarcharCodegenUtils.readSidePadding(i_brand#2, 50))))
(3) CometProject
Input [3]: [i_item_sk#1, i_brand#2, i_category#3]
-Arguments: [i_item_sk#1, i_brand#4, i_category#5], [i_item_sk#1, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_brand#2, 50, true, false, true) AS i_brand#4, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_category#3, 50, true, false, true) AS i_category#5]
+Arguments: [i_item_sk#1, i_brand#4, i_category#5], [i_item_sk#1, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_brand#2, 50)) AS i_brand#4, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_category#3, 50)) AS i_category#5]
(4) CometScan parquet spark_catalog.default.store_sales
Output [4]: [ss_item_sk#6, ss_store_sk#7, ss_sales_price#8, ss_sold_date_sk#9]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q48/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q48/explain.txt
index 8513867b94..e6ad677a93 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q48/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q48/explain.txt
@@ -76,11 +76,11 @@ ReadSchema: struct
(15) CometFilter
Input [3]: [ca_address_sk#15, ca_state#16, ca_country#17]
-Condition : (((isnotnull(ca_country#17) AND (ca_country#17 = United States)) AND isnotnull(ca_address_sk#15)) AND ((staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, ca_state#16, 2, true, false, true) IN (CO,OH,TX) OR staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, ca_state#16, 2, true, false, true) IN (OR,MN,KY)) OR staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, ca_state#16, 2, true, false, true) IN (VA,CA,MS)))
+Condition : (((isnotnull(ca_country#17) AND (ca_country#17 = United States)) AND isnotnull(ca_address_sk#15)) AND ((static_invoke(CharVarcharCodegenUtils.readSidePadding(ca_state#16, 2)) IN (CO,OH,TX) OR static_invoke(CharVarcharCodegenUtils.readSidePadding(ca_state#16, 2)) IN (OR,MN,KY)) OR static_invoke(CharVarcharCodegenUtils.readSidePadding(ca_state#16, 2)) IN (VA,CA,MS)))
(16) CometProject
Input [3]: [ca_address_sk#15, ca_state#16, ca_country#17]
-Arguments: [ca_address_sk#15, ca_state#18], [ca_address_sk#15, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, ca_state#16, 2, true, false, true) AS ca_state#18]
+Arguments: [ca_address_sk#15, ca_state#18], [ca_address_sk#15, static_invoke(CharVarcharCodegenUtils.readSidePadding(ca_state#16, 2)) AS ca_state#18]
(17) CometBroadcastExchange
Input [2]: [ca_address_sk#15, ca_state#18]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q5/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q5/explain.txt
index 9422a82484..202e2607bc 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q5/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q5/explain.txt
@@ -155,7 +155,7 @@ Condition : isnotnull(s_store_sk#24)
(16) CometProject
Input [2]: [s_store_sk#24, s_store_id#25]
-Arguments: [s_store_sk#24, s_store_id#26], [s_store_sk#24, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, s_store_id#25, 16, true, false, true) AS s_store_id#26]
+Arguments: [s_store_sk#24, s_store_id#26], [s_store_sk#24, static_invoke(CharVarcharCodegenUtils.readSidePadding(s_store_id#25, 16)) AS s_store_id#26]
(17) CometBroadcastExchange
Input [2]: [s_store_sk#24, s_store_id#26]
@@ -255,7 +255,7 @@ Condition : isnotnull(cp_catalog_page_sk#66)
(37) CometProject
Input [2]: [cp_catalog_page_sk#66, cp_catalog_page_id#67]
-Arguments: [cp_catalog_page_sk#66, cp_catalog_page_id#68], [cp_catalog_page_sk#66, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, cp_catalog_page_id#67, 16, true, false, true) AS cp_catalog_page_id#68]
+Arguments: [cp_catalog_page_sk#66, cp_catalog_page_id#68], [cp_catalog_page_sk#66, static_invoke(CharVarcharCodegenUtils.readSidePadding(cp_catalog_page_id#67, 16)) AS cp_catalog_page_id#68]
(38) CometBroadcastExchange
Input [2]: [cp_catalog_page_sk#66, cp_catalog_page_id#68]
@@ -374,7 +374,7 @@ Condition : isnotnull(web_site_sk#113)
(62) CometProject
Input [2]: [web_site_sk#113, web_site_id#114]
-Arguments: [web_site_sk#113, web_site_id#115], [web_site_sk#113, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, web_site_id#114, 16, true, false, true) AS web_site_id#115]
+Arguments: [web_site_sk#113, web_site_id#115], [web_site_sk#113, static_invoke(CharVarcharCodegenUtils.readSidePadding(web_site_id#114, 16)) AS web_site_id#115]
(63) CometBroadcastExchange
Input [2]: [web_site_sk#113, web_site_id#115]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q50/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q50/explain.txt
index 3d4384afec..6c360cb79d 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q50/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q50/explain.txt
@@ -81,7 +81,7 @@ Condition : isnotnull(s_store_sk#11)
(10) CometProject
Input [11]: [s_store_sk#11, s_store_name#12, s_company_id#13, s_street_number#14, s_street_name#15, s_street_type#16, s_suite_number#17, s_city#18, s_county#19, s_state#20, s_zip#21]
-Arguments: [s_store_sk#11, s_store_name#12, s_company_id#13, s_street_number#14, s_street_name#15, s_street_type#22, s_suite_number#23, s_city#18, s_county#19, s_state#24, s_zip#25], [s_store_sk#11, s_store_name#12, s_company_id#13, s_street_number#14, s_street_name#15, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, s_street_type#16, 15, true, false, true) AS s_street_type#22, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, s_suite_number#17, 10, true, false, true) AS s_suite_number#23, s_city#18, s_county#19, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, s_state#20, 2, true, false, true) AS s_state#24, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, s_zip#21, 10, true, false, true) AS s_zip#25]
+Arguments: [s_store_sk#11, s_store_name#12, s_company_id#13, s_street_number#14, s_street_name#15, s_street_type#22, s_suite_number#23, s_city#18, s_county#19, s_state#24, s_zip#25], [s_store_sk#11, s_store_name#12, s_company_id#13, s_street_number#14, s_street_name#15, static_invoke(CharVarcharCodegenUtils.readSidePadding(s_street_type#16, 15)) AS s_street_type#22, static_invoke(CharVarcharCodegenUtils.readSidePadding(s_suite_number#17, 10)) AS s_suite_number#23, s_city#18, s_county#19, static_invoke(CharVarcharCodegenUtils.readSidePadding(s_state#20, 2)) AS s_state#24, static_invoke(CharVarcharCodegenUtils.readSidePadding(s_zip#21, 10)) AS s_zip#25]
(11) CometBroadcastExchange
Input [11]: [s_store_sk#11, s_store_name#12, s_company_id#13, s_street_number#14, s_street_name#15, s_street_type#22, s_suite_number#23, s_city#18, s_county#19, s_state#24, s_zip#25]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q52/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q52/explain.txt
index 8bbdc11695..63e0fe540d 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q52/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q52/explain.txt
@@ -74,7 +74,7 @@ Condition : ((isnotnull(i_manager_id#10) AND (i_manager_id#10 = 1)) AND isnotnul
(11) CometProject
Input [4]: [i_item_sk#7, i_brand_id#8, i_brand#9, i_manager_id#10]
-Arguments: [i_item_sk#7, i_brand_id#8, i_brand#11], [i_item_sk#7, i_brand_id#8, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_brand#9, 50, true, false, true) AS i_brand#11]
+Arguments: [i_item_sk#7, i_brand_id#8, i_brand#11], [i_item_sk#7, i_brand_id#8, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_brand#9, 50)) AS i_brand#11]
(12) CometBroadcastExchange
Input [3]: [i_item_sk#7, i_brand_id#8, i_brand#11]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q53/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q53/explain.txt
index 3288c875e3..50312222e5 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q53/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q53/explain.txt
@@ -41,7 +41,7 @@ ReadSchema: struct
(9) CometFilter
Input [3]: [i_item_sk#14, i_class#15, i_category#16]
-Condition : (((staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_category#16, 50, true, false, true) = Women ) AND (staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_class#15, 50, true, false, true) = maternity )) AND isnotnull(i_item_sk#14))
+Condition : ((((isnotnull(i_category#16) AND isnotnull(i_class#15)) AND (static_invoke(CharVarcharCodegenUtils.readSidePadding(i_category#16, 50)) = Women )) AND (static_invoke(CharVarcharCodegenUtils.readSidePadding(i_class#15, 50)) = maternity )) AND isnotnull(i_item_sk#14))
(10) CometProject
Input [3]: [i_item_sk#14, i_class#15, i_category#16]
@@ -225,11 +225,11 @@ ReadSchema: struct
(34) CometFilter
Input [3]: [ca_address_sk#26, ca_county#27, ca_state#28]
-Condition : ((isnotnull(ca_address_sk#26) AND isnotnull(ca_county#27)) AND isnotnull(staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, ca_state#28, 2, true, false, true)))
+Condition : ((isnotnull(ca_address_sk#26) AND isnotnull(ca_county#27)) AND isnotnull(static_invoke(CharVarcharCodegenUtils.readSidePadding(ca_state#28, 2))))
(35) CometProject
Input [3]: [ca_address_sk#26, ca_county#27, ca_state#28]
-Arguments: [ca_address_sk#26, ca_county#27, ca_state#29], [ca_address_sk#26, ca_county#27, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, ca_state#28, 2, true, false, true) AS ca_state#29]
+Arguments: [ca_address_sk#26, ca_county#27, ca_state#29], [ca_address_sk#26, ca_county#27, static_invoke(CharVarcharCodegenUtils.readSidePadding(ca_state#28, 2)) AS ca_state#29]
(36) CometBroadcastExchange
Input [3]: [ca_address_sk#26, ca_county#27, ca_state#29]
@@ -253,11 +253,11 @@ ReadSchema: struct
(40) CometFilter
Input [2]: [s_county#30, s_state#31]
-Condition : (isnotnull(s_county#30) AND isnotnull(staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, s_state#31, 2, true, false, true)))
+Condition : (isnotnull(s_county#30) AND isnotnull(static_invoke(CharVarcharCodegenUtils.readSidePadding(s_state#31, 2))))
(41) CometProject
Input [2]: [s_county#30, s_state#31]
-Arguments: [s_county#30, s_state#32], [s_county#30, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, s_state#31, 2, true, false, true) AS s_state#32]
+Arguments: [s_county#30, s_state#32], [s_county#30, static_invoke(CharVarcharCodegenUtils.readSidePadding(s_state#31, 2)) AS s_state#32]
(42) CometBroadcastExchange
Input [2]: [s_county#30, s_state#32]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q55/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q55/explain.txt
index 49415c4c0d..cd20f7ea48 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q55/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q55/explain.txt
@@ -74,7 +74,7 @@ Condition : ((isnotnull(i_manager_id#10) AND (i_manager_id#10 = 28)) AND isnotnu
(11) CometProject
Input [4]: [i_item_sk#7, i_brand_id#8, i_brand#9, i_manager_id#10]
-Arguments: [i_item_sk#7, i_brand_id#8, i_brand#11], [i_item_sk#7, i_brand_id#8, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_brand#9, 50, true, false, true) AS i_brand#11]
+Arguments: [i_item_sk#7, i_brand_id#8, i_brand#11], [i_item_sk#7, i_brand_id#8, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_brand#9, 50)) AS i_brand#11]
(12) CometBroadcastExchange
Input [3]: [i_item_sk#7, i_brand_id#8, i_brand#11]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q56/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q56/explain.txt
index 68f69d6f08..4d1c083b57 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q56/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q56/explain.txt
@@ -156,11 +156,11 @@ ReadSchema: struct
(18) CometFilter
Input [2]: [i_item_id#13, i_color#14]
-Condition : staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_color#14, 20, true, false, true) IN (slate ,blanched ,burnished )
+Condition : static_invoke(CharVarcharCodegenUtils.readSidePadding(i_color#14, 20)) IN (slate ,blanched ,burnished )
(19) CometProject
Input [2]: [i_item_id#13, i_color#14]
-Arguments: [i_item_id#15], [staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_item_id#13, 16, true, false, true) AS i_item_id#15]
+Arguments: [i_item_id#15], [static_invoke(CharVarcharCodegenUtils.readSidePadding(i_item_id#13, 16)) AS i_item_id#15]
(20) CometBroadcastExchange
Input [1]: [i_item_id#15]
@@ -169,11 +169,11 @@ Arguments: [i_item_id#15]
(21) CometBroadcastHashJoin
Left output [2]: [i_item_sk#11, i_item_id#12]
Right output [1]: [i_item_id#15]
-Arguments: [staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_item_id#12, 16, true, false, true)], [i_item_id#15], LeftSemi, BuildRight
+Arguments: [static_invoke(CharVarcharCodegenUtils.readSidePadding(i_item_id#12, 16))], [i_item_id#15], LeftSemi, BuildRight
(22) CometProject
Input [2]: [i_item_sk#11, i_item_id#12]
-Arguments: [i_item_sk#11, i_item_id#16], [i_item_sk#11, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_item_id#12, 16, true, false, true) AS i_item_id#16]
+Arguments: [i_item_sk#11, i_item_id#16], [i_item_sk#11, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_item_id#12, 16)) AS i_item_id#16]
(23) CometBroadcastExchange
Input [2]: [i_item_sk#11, i_item_id#16]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q57/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q57/explain.txt
index 7f9fc803e0..4a09e26857 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q57/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q57/explain.txt
@@ -61,11 +61,11 @@ ReadSchema: struct
(2) CometFilter
Input [3]: [i_item_sk#1, i_brand#2, i_category#3]
-Condition : ((isnotnull(i_item_sk#1) AND isnotnull(staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_category#3, 50, true, false, true))) AND isnotnull(staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_brand#2, 50, true, false, true)))
+Condition : ((isnotnull(i_item_sk#1) AND isnotnull(static_invoke(CharVarcharCodegenUtils.readSidePadding(i_category#3, 50)))) AND isnotnull(static_invoke(CharVarcharCodegenUtils.readSidePadding(i_brand#2, 50))))
(3) CometProject
Input [3]: [i_item_sk#1, i_brand#2, i_category#3]
-Arguments: [i_item_sk#1, i_brand#4, i_category#5], [i_item_sk#1, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_brand#2, 50, true, false, true) AS i_brand#4, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_category#3, 50, true, false, true) AS i_category#5]
+Arguments: [i_item_sk#1, i_brand#4, i_category#5], [i_item_sk#1, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_brand#2, 50)) AS i_brand#4, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_category#3, 50)) AS i_category#5]
(4) CometScan parquet spark_catalog.default.catalog_sales
Output [4]: [cs_call_center_sk#6, cs_item_sk#7, cs_sales_price#8, cs_sold_date_sk#9]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q58/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q58/explain.txt
index 5778b8e76b..7409c3e41d 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q58/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q58/explain.txt
@@ -79,11 +79,11 @@ ReadSchema: struct
(4) CometFilter
Input [2]: [i_item_sk#5, i_item_id#6]
-Condition : (isnotnull(i_item_sk#5) AND isnotnull(staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_item_id#6, 16, true, false, true)))
+Condition : (isnotnull(i_item_sk#5) AND isnotnull(static_invoke(CharVarcharCodegenUtils.readSidePadding(i_item_id#6, 16))))
(5) CometProject
Input [2]: [i_item_sk#5, i_item_id#6]
-Arguments: [i_item_sk#5, i_item_id#7], [i_item_sk#5, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_item_id#6, 16, true, false, true) AS i_item_id#7]
+Arguments: [i_item_sk#5, i_item_id#7], [i_item_sk#5, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_item_id#6, 16)) AS i_item_id#7]
(6) CometBroadcastExchange
Input [2]: [i_item_sk#5, i_item_id#7]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q59/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q59/explain.txt
index 6a7a5b4c3a..1701c20fcd 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q59/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q59/explain.txt
@@ -74,7 +74,7 @@ Condition : (isnotnull(d_date_sk#4) AND isnotnull(d_week_seq#5))
(5) CometProject
Input [3]: [d_date_sk#4, d_week_seq#5, d_day_name#6]
-Arguments: [d_date_sk#4, d_week_seq#5, d_day_name#7], [d_date_sk#4, d_week_seq#5, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, d_day_name#6, 9, true, false, true) AS d_day_name#7]
+Arguments: [d_date_sk#4, d_week_seq#5, d_day_name#7], [d_date_sk#4, d_week_seq#5, static_invoke(CharVarcharCodegenUtils.readSidePadding(d_day_name#6, 9)) AS d_day_name#7]
(6) CometBroadcastExchange
Input [3]: [d_date_sk#4, d_week_seq#5, d_day_name#7]
@@ -122,11 +122,11 @@ ReadSchema: struct
(15) CometFilter
Input [3]: [s_store_sk#36, s_store_id#37, s_store_name#38]
-Condition : (isnotnull(s_store_sk#36) AND isnotnull(staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, s_store_id#37, 16, true, false, true)))
+Condition : (isnotnull(s_store_sk#36) AND isnotnull(static_invoke(CharVarcharCodegenUtils.readSidePadding(s_store_id#37, 16))))
(16) CometProject
Input [3]: [s_store_sk#36, s_store_id#37, s_store_name#38]
-Arguments: [s_store_sk#36, s_store_id#39, s_store_name#38], [s_store_sk#36, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, s_store_id#37, 16, true, false, true) AS s_store_id#39, s_store_name#38]
+Arguments: [s_store_sk#36, s_store_id#39, s_store_name#38], [s_store_sk#36, static_invoke(CharVarcharCodegenUtils.readSidePadding(s_store_id#37, 16)) AS s_store_id#39, s_store_name#38]
(17) CometColumnarToRow [codegen id : 2]
Input [3]: [s_store_sk#36, s_store_id#39, s_store_name#38]
@@ -199,11 +199,11 @@ ReadSchema: struct
(32) CometFilter
Input [2]: [s_store_sk#70, s_store_id#71]
-Condition : (isnotnull(s_store_sk#70) AND isnotnull(staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, s_store_id#71, 16, true, false, true)))
+Condition : (isnotnull(s_store_sk#70) AND isnotnull(static_invoke(CharVarcharCodegenUtils.readSidePadding(s_store_id#71, 16))))
(33) CometProject
Input [2]: [s_store_sk#70, s_store_id#71]
-Arguments: [s_store_sk#70, s_store_id#72], [s_store_sk#70, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, s_store_id#71, 16, true, false, true) AS s_store_id#72]
+Arguments: [s_store_sk#70, s_store_id#72], [s_store_sk#70, static_invoke(CharVarcharCodegenUtils.readSidePadding(s_store_id#71, 16)) AS s_store_id#72]
(34) CometColumnarToRow [codegen id : 5]
Input [2]: [s_store_sk#70, s_store_id#72]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q6/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q6/explain.txt
index 00bf642be4..c44cf5411b 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q6/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q6/explain.txt
@@ -58,7 +58,7 @@ Condition : isnotnull(ca_address_sk#1)
(3) CometProject
Input [2]: [ca_address_sk#1, ca_state#2]
-Arguments: [ca_address_sk#1, ca_state#3], [ca_address_sk#1, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, ca_state#2, 2, true, false, true) AS ca_state#3]
+Arguments: [ca_address_sk#1, ca_state#3], [ca_address_sk#1, static_invoke(CharVarcharCodegenUtils.readSidePadding(ca_state#2, 2)) AS ca_state#3]
(4) CometScan parquet spark_catalog.default.customer
Output [2]: [c_customer_sk#4, c_current_addr_sk#5]
@@ -144,12 +144,12 @@ Input [2]: [ca_state#3, ss_item_sk#6]
Output [3]: [i_item_sk#14, i_current_price#15, i_category#16]
Batched: true
Location [not included in comparison]/{warehouse_dir}/item]
-PushedFilters: [IsNotNull(i_current_price), IsNotNull(i_item_sk)]
+PushedFilters: [IsNotNull(i_current_price), IsNotNull(i_category), IsNotNull(i_item_sk)]
ReadSchema: struct
(22) CometFilter
Input [3]: [i_item_sk#14, i_current_price#15, i_category#16]
-Condition : (isnotnull(i_current_price#15) AND isnotnull(i_item_sk#14))
+Condition : ((isnotnull(i_current_price#15) AND isnotnull(i_category#16)) AND isnotnull(i_item_sk#14))
(23) CometColumnarToRow [codegen id : 3]
Input [3]: [i_item_sk#14, i_current_price#15, i_category#16]
@@ -162,11 +162,11 @@ ReadSchema: struct
(25) CometFilter
Input [2]: [i_current_price#17, i_category#18]
-Condition : isnotnull(staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_category#18, 50, true, false, true))
+Condition : isnotnull(static_invoke(CharVarcharCodegenUtils.readSidePadding(i_category#18, 50)))
(26) CometProject
Input [2]: [i_current_price#17, i_category#18]
-Arguments: [i_category#19, i_current_price#17], [staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_category#18, 50, true, false, true) AS i_category#19, i_current_price#17]
+Arguments: [i_category#19, i_current_price#17], [static_invoke(CharVarcharCodegenUtils.readSidePadding(i_category#18, 50)) AS i_category#19, i_current_price#17]
(27) CometColumnarToRow [codegen id : 1]
Input [2]: [i_category#19, i_current_price#17]
@@ -201,7 +201,7 @@ Input [2]: [avg(i_current_price)#25, i_category#19]
Arguments: HashedRelationBroadcastMode(List(input[1, string, true]),false), [plan_id=2]
(34) BroadcastHashJoin [codegen id : 3]
-Left keys [1]: [staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_category#16, 50, true, false, true)]
+Left keys [1]: [static_invoke(CharVarcharCodegenUtils.readSidePadding(i_category#16, 50))]
Right keys [1]: [i_category#19]
Join type: Inner
Join condition: (cast(i_current_price#15 as decimal(14,7)) > (1.2 * avg(i_current_price)#25))
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q60/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q60/explain.txt
index 059e3c12cb..cbeaba426d 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q60/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q60/explain.txt
@@ -152,15 +152,16 @@ Condition : isnotnull(i_item_sk#11)
Output [2]: [i_item_id#13, i_category#14]
Batched: true
Location [not included in comparison]/{warehouse_dir}/item]
+PushedFilters: [IsNotNull(i_category)]
ReadSchema: struct
(18) CometFilter
Input [2]: [i_item_id#13, i_category#14]
-Condition : (staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_category#14, 50, true, false, true) = Music )
+Condition : (isnotnull(i_category#14) AND (static_invoke(CharVarcharCodegenUtils.readSidePadding(i_category#14, 50)) = Music ))
(19) CometProject
Input [2]: [i_item_id#13, i_category#14]
-Arguments: [i_item_id#15], [staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_item_id#13, 16, true, false, true) AS i_item_id#15]
+Arguments: [i_item_id#15], [static_invoke(CharVarcharCodegenUtils.readSidePadding(i_item_id#13, 16)) AS i_item_id#15]
(20) CometBroadcastExchange
Input [1]: [i_item_id#15]
@@ -169,11 +170,11 @@ Arguments: [i_item_id#15]
(21) CometBroadcastHashJoin
Left output [2]: [i_item_sk#11, i_item_id#12]
Right output [1]: [i_item_id#15]
-Arguments: [staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_item_id#12, 16, true, false, true)], [i_item_id#15], LeftSemi, BuildRight
+Arguments: [static_invoke(CharVarcharCodegenUtils.readSidePadding(i_item_id#12, 16))], [i_item_id#15], LeftSemi, BuildRight
(22) CometProject
Input [2]: [i_item_sk#11, i_item_id#12]
-Arguments: [i_item_sk#11, i_item_id#16], [i_item_sk#11, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_item_id#12, 16, true, false, true) AS i_item_id#16]
+Arguments: [i_item_sk#11, i_item_id#16], [i_item_sk#11, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_item_id#12, 16)) AS i_item_id#16]
(23) CometBroadcastExchange
Input [2]: [i_item_sk#11, i_item_id#16]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q61/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q61/explain.txt
index d1918a1e67..f733b30d86 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q61/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q61/explain.txt
@@ -117,7 +117,7 @@ ReadSchema: struct
(33) CometFilter
Input [2]: [i_item_sk#21, i_category#22]
-Condition : ((staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_category#22, 50, true, false, true) = Jewelry ) AND isnotnull(i_item_sk#21))
+Condition : ((isnotnull(i_category#22) AND (static_invoke(CharVarcharCodegenUtils.readSidePadding(i_category#22, 50)) = Jewelry )) AND isnotnull(i_item_sk#21))
(34) CometProject
Input [2]: [i_item_sk#21, i_category#22]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q62/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q62/explain.txt
index 7071a3293a..01e61914cb 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q62/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q62/explain.txt
@@ -79,7 +79,7 @@ Condition : isnotnull(sm_ship_mode_sk#8)
(10) CometProject
Input [2]: [sm_ship_mode_sk#8, sm_type#9]
-Arguments: [sm_ship_mode_sk#8, sm_type#10], [sm_ship_mode_sk#8, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, sm_type#9, 30, true, false, true) AS sm_type#10]
+Arguments: [sm_ship_mode_sk#8, sm_type#10], [sm_ship_mode_sk#8, static_invoke(CharVarcharCodegenUtils.readSidePadding(sm_type#9, 30)) AS sm_type#10]
(11) CometBroadcastExchange
Input [2]: [sm_ship_mode_sk#8, sm_type#10]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q63/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q63/explain.txt
index 6236eba4b3..771e7a9fe3 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q63/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q63/explain.txt
@@ -41,7 +41,7 @@ ReadSchema: struct
(38) CometFilter
Input [3]: [s_store_sk#39, s_store_name#40, s_zip#41]
-Condition : ((isnotnull(s_store_sk#39) AND isnotnull(s_store_name#40)) AND isnotnull(staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, s_zip#41, 10, true, false, true)))
+Condition : ((isnotnull(s_store_sk#39) AND isnotnull(s_store_name#40)) AND isnotnull(static_invoke(CharVarcharCodegenUtils.readSidePadding(s_zip#41, 10))))
(39) CometProject
Input [3]: [s_store_sk#39, s_store_name#40, s_zip#41]
-Arguments: [s_store_sk#39, s_store_name#40, s_zip#42], [s_store_sk#39, s_store_name#40, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, s_zip#41, 10, true, false, true) AS s_zip#42]
+Arguments: [s_store_sk#39, s_store_name#40, s_zip#42], [s_store_sk#39, s_store_name#40, static_invoke(CharVarcharCodegenUtils.readSidePadding(s_zip#41, 10)) AS s_zip#42]
(40) CometColumnarToRow [codegen id : 5]
Input [3]: [s_store_sk#39, s_store_name#40, s_zip#42]
@@ -462,11 +462,11 @@ ReadSchema: struct
(60) CometFilter
Input [2]: [cd_demo_sk#53, cd_marital_status#54]
-Condition : (isnotnull(cd_demo_sk#53) AND isnotnull(staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, cd_marital_status#54, 1, true, false, true)))
+Condition : (isnotnull(cd_demo_sk#53) AND isnotnull(static_invoke(CharVarcharCodegenUtils.readSidePadding(cd_marital_status#54, 1))))
(61) CometProject
Input [2]: [cd_demo_sk#53, cd_marital_status#54]
-Arguments: [cd_demo_sk#53, cd_marital_status#55], [cd_demo_sk#53, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, cd_marital_status#54, 1, true, false, true) AS cd_marital_status#55]
+Arguments: [cd_demo_sk#53, cd_marital_status#55], [cd_demo_sk#53, static_invoke(CharVarcharCodegenUtils.readSidePadding(cd_marital_status#54, 1)) AS cd_marital_status#55]
(62) CometColumnarToRow [codegen id : 9]
Input [2]: [cd_demo_sk#53, cd_marital_status#55]
@@ -580,7 +580,7 @@ Condition : isnotnull(ca_address_sk#63)
(86) CometProject
Input [5]: [ca_address_sk#63, ca_street_number#64, ca_street_name#65, ca_city#66, ca_zip#67]
-Arguments: [ca_address_sk#63, ca_street_number#68, ca_street_name#65, ca_city#66, ca_zip#69], [ca_address_sk#63, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, ca_street_number#64, 10, true, false, true) AS ca_street_number#68, ca_street_name#65, ca_city#66, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, ca_zip#67, 10, true, false, true) AS ca_zip#69]
+Arguments: [ca_address_sk#63, ca_street_number#68, ca_street_name#65, ca_city#66, ca_zip#69], [ca_address_sk#63, static_invoke(CharVarcharCodegenUtils.readSidePadding(ca_street_number#64, 10)) AS ca_street_number#68, ca_street_name#65, ca_city#66, static_invoke(CharVarcharCodegenUtils.readSidePadding(ca_zip#67, 10)) AS ca_zip#69]
(87) CometColumnarToRow [codegen id : 14]
Input [5]: [ca_address_sk#63, ca_street_number#68, ca_street_name#65, ca_city#66, ca_zip#69]
@@ -662,11 +662,11 @@ ReadSchema: struct= 64.00)) AND (i_current_price#78 <= 74.00)) AND (i_current_price#78 >= 65.00)) AND (i_current_price#78 <= 79.00)) AND isnotnull(i_item_sk#77))
+Condition : ((((((isnotnull(i_current_price#78) AND static_invoke(CharVarcharCodegenUtils.readSidePadding(i_color#79, 20)) IN (purple ,burlywood ,indian ,spring ,floral ,medium )) AND (i_current_price#78 >= 64.00)) AND (i_current_price#78 <= 74.00)) AND (i_current_price#78 >= 65.00)) AND (i_current_price#78 <= 79.00)) AND isnotnull(i_item_sk#77))
(105) CometProject
Input [4]: [i_item_sk#77, i_current_price#78, i_color#79, i_product_name#80]
-Arguments: [i_item_sk#77, i_product_name#81], [i_item_sk#77, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_product_name#80, 50, true, false, true) AS i_product_name#81]
+Arguments: [i_item_sk#77, i_product_name#81], [i_item_sk#77, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_product_name#80, 50)) AS i_product_name#81]
(106) CometColumnarToRow [codegen id : 18]
Input [2]: [i_item_sk#77, i_product_name#81]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q65/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q65/explain.txt
index fc158da82e..62cc77ce59 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q65/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q65/explain.txt
@@ -156,7 +156,7 @@ Condition : isnotnull(i_item_sk#14)
(23) CometProject
Input [5]: [i_item_sk#14, i_item_desc#15, i_current_price#16, i_wholesale_cost#17, i_brand#18]
-Arguments: [i_item_sk#14, i_item_desc#15, i_current_price#16, i_wholesale_cost#17, i_brand#19], [i_item_sk#14, i_item_desc#15, i_current_price#16, i_wholesale_cost#17, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_brand#18, 50, true, false, true) AS i_brand#19]
+Arguments: [i_item_sk#14, i_item_desc#15, i_current_price#16, i_wholesale_cost#17, i_brand#19], [i_item_sk#14, i_item_desc#15, i_current_price#16, i_wholesale_cost#17, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_brand#18, 50)) AS i_brand#19]
(24) CometColumnarToRow [codegen id : 3]
Input [5]: [i_item_sk#14, i_item_desc#15, i_current_price#16, i_wholesale_cost#17, i_brand#19]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q66/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q66/explain.txt
index deaa8b2f1c..85f7898700 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q66/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q66/explain.txt
@@ -81,7 +81,7 @@ Condition : isnotnull(w_warehouse_sk#9)
(5) CometProject
Input [7]: [w_warehouse_sk#9, w_warehouse_name#10, w_warehouse_sq_ft#11, w_city#12, w_county#13, w_state#14, w_country#15]
-Arguments: [w_warehouse_sk#9, w_warehouse_name#10, w_warehouse_sq_ft#11, w_city#12, w_county#13, w_state#16, w_country#15], [w_warehouse_sk#9, w_warehouse_name#10, w_warehouse_sq_ft#11, w_city#12, w_county#13, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, w_state#14, 2, true, false, true) AS w_state#16, w_country#15]
+Arguments: [w_warehouse_sk#9, w_warehouse_name#10, w_warehouse_sq_ft#11, w_city#12, w_county#13, w_state#16, w_country#15], [w_warehouse_sk#9, w_warehouse_name#10, w_warehouse_sq_ft#11, w_city#12, w_county#13, static_invoke(CharVarcharCodegenUtils.readSidePadding(w_state#14, 2)) AS w_state#16, w_country#15]
(6) CometBroadcastExchange
Input [7]: [w_warehouse_sk#9, w_warehouse_name#10, w_warehouse_sq_ft#11, w_city#12, w_county#13, w_state#16, w_country#15]
@@ -157,7 +157,7 @@ ReadSchema: struct
(21) CometFilter
Input [2]: [sm_ship_mode_sk#22, sm_carrier#23]
-Condition : (staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, sm_carrier#23, 20, true, false, true) IN (DHL ,BARIAN ) AND isnotnull(sm_ship_mode_sk#22))
+Condition : (static_invoke(CharVarcharCodegenUtils.readSidePadding(sm_carrier#23, 20)) IN (DHL ,BARIAN ) AND isnotnull(sm_ship_mode_sk#22))
(22) CometProject
Input [2]: [sm_ship_mode_sk#22, sm_carrier#23]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q67/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q67/explain.txt
index bbd8a6044d..5a1bf3a196 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q67/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q67/explain.txt
@@ -89,7 +89,7 @@ Condition : isnotnull(s_store_sk#12)
(11) CometProject
Input [2]: [s_store_sk#12, s_store_id#13]
-Arguments: [s_store_sk#12, s_store_id#14], [s_store_sk#12, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, s_store_id#13, 16, true, false, true) AS s_store_id#14]
+Arguments: [s_store_sk#12, s_store_id#14], [s_store_sk#12, static_invoke(CharVarcharCodegenUtils.readSidePadding(s_store_id#13, 16)) AS s_store_id#14]
(12) CometBroadcastExchange
Input [2]: [s_store_sk#12, s_store_id#14]
@@ -117,7 +117,7 @@ Condition : isnotnull(i_item_sk#15)
(17) CometProject
Input [5]: [i_item_sk#15, i_brand#16, i_class#17, i_category#18, i_product_name#19]
-Arguments: [i_item_sk#15, i_brand#20, i_class#21, i_category#22, i_product_name#23], [i_item_sk#15, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_brand#16, 50, true, false, true) AS i_brand#20, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_class#17, 50, true, false, true) AS i_class#21, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_category#18, 50, true, false, true) AS i_category#22, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_product_name#19, 50, true, false, true) AS i_product_name#23]
+Arguments: [i_item_sk#15, i_brand#20, i_class#21, i_category#22, i_product_name#23], [i_item_sk#15, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_brand#16, 50)) AS i_brand#20, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_class#17, 50)) AS i_class#21, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_category#18, 50)) AS i_category#22, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_product_name#19, 50)) AS i_product_name#23]
(18) CometBroadcastExchange
Input [5]: [i_item_sk#15, i_brand#20, i_class#21, i_category#22, i_product_name#23]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q68/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q68/explain.txt
index e1b16f88df..5f9d675096 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q68/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q68/explain.txt
@@ -202,7 +202,7 @@ Condition : (isnotnull(c_customer_sk#34) AND isnotnull(c_current_addr_sk#35))
(33) CometProject
Input [4]: [c_customer_sk#34, c_current_addr_sk#35, c_first_name#36, c_last_name#37]
-Arguments: [c_customer_sk#34, c_current_addr_sk#35, c_first_name#38, c_last_name#39], [c_customer_sk#34, c_current_addr_sk#35, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, c_first_name#36, 20, true, false, true) AS c_first_name#38, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, c_last_name#37, 30, true, false, true) AS c_last_name#39]
+Arguments: [c_customer_sk#34, c_current_addr_sk#35, c_first_name#38, c_last_name#39], [c_customer_sk#34, c_current_addr_sk#35, static_invoke(CharVarcharCodegenUtils.readSidePadding(c_first_name#36, 20)) AS c_first_name#38, static_invoke(CharVarcharCodegenUtils.readSidePadding(c_last_name#37, 30)) AS c_last_name#39]
(34) CometColumnarToRow [codegen id : 2]
Input [4]: [c_customer_sk#34, c_current_addr_sk#35, c_first_name#38, c_last_name#39]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q69/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q69/explain.txt
index b983c94a2d..1ced787ef1 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q69/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q69/explain.txt
@@ -182,7 +182,7 @@ ReadSchema: struct
(29) CometFilter
Input [2]: [ca_address_sk#18, ca_state#19]
-Condition : (staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, ca_state#19, 2, true, false, true) IN (KY,GA,NM) AND isnotnull(ca_address_sk#18))
+Condition : (static_invoke(CharVarcharCodegenUtils.readSidePadding(ca_state#19, 2)) IN (KY,GA,NM) AND isnotnull(ca_address_sk#18))
(30) CometProject
Input [2]: [ca_address_sk#18, ca_state#19]
@@ -218,7 +218,7 @@ Condition : isnotnull(cd_demo_sk#20)
(37) CometProject
Input [6]: [cd_demo_sk#20, cd_gender#21, cd_marital_status#22, cd_education_status#23, cd_purchase_estimate#24, cd_credit_rating#25]
-Arguments: [cd_demo_sk#20, cd_gender#26, cd_marital_status#27, cd_education_status#28, cd_purchase_estimate#24, cd_credit_rating#29], [cd_demo_sk#20, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, cd_gender#21, 1, true, false, true) AS cd_gender#26, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, cd_marital_status#22, 1, true, false, true) AS cd_marital_status#27, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, cd_education_status#23, 20, true, false, true) AS cd_education_status#28, cd_purchase_estimate#24, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, cd_credit_rating#25, 10, true, false, true) AS cd_credit_rating#29]
+Arguments: [cd_demo_sk#20, cd_gender#26, cd_marital_status#27, cd_education_status#28, cd_purchase_estimate#24, cd_credit_rating#29], [cd_demo_sk#20, static_invoke(CharVarcharCodegenUtils.readSidePadding(cd_gender#21, 1)) AS cd_gender#26, static_invoke(CharVarcharCodegenUtils.readSidePadding(cd_marital_status#22, 1)) AS cd_marital_status#27, static_invoke(CharVarcharCodegenUtils.readSidePadding(cd_education_status#23, 20)) AS cd_education_status#28, cd_purchase_estimate#24, static_invoke(CharVarcharCodegenUtils.readSidePadding(cd_credit_rating#25, 10)) AS cd_credit_rating#29]
(38) CometColumnarToRow [codegen id : 4]
Input [6]: [cd_demo_sk#20, cd_gender#26, cd_marital_status#27, cd_education_status#28, cd_purchase_estimate#24, cd_credit_rating#29]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q7/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q7/explain.txt
index 2aabd42361..cda70b8675 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q7/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q7/explain.txt
@@ -49,12 +49,12 @@ Condition : ((isnotnull(ss_cdemo_sk#2) AND isnotnull(ss_item_sk#1)) AND isnotnul
Output [4]: [cd_demo_sk#10, cd_gender#11, cd_marital_status#12, cd_education_status#13]
Batched: true
Location [not included in comparison]/{warehouse_dir}/customer_demographics]
-PushedFilters: [IsNotNull(cd_demo_sk)]
+PushedFilters: [IsNotNull(cd_gender), IsNotNull(cd_marital_status), IsNotNull(cd_education_status), IsNotNull(cd_demo_sk)]
ReadSchema: struct
(4) CometFilter
Input [4]: [cd_demo_sk#10, cd_gender#11, cd_marital_status#12, cd_education_status#13]
-Condition : ((((staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, cd_gender#11, 1, true, false, true) = M) AND (staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, cd_marital_status#12, 1, true, false, true) = S)) AND (staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, cd_education_status#13, 20, true, false, true) = College )) AND isnotnull(cd_demo_sk#10))
+Condition : ((((((isnotnull(cd_gender#11) AND isnotnull(cd_marital_status#12)) AND isnotnull(cd_education_status#13)) AND (static_invoke(CharVarcharCodegenUtils.readSidePadding(cd_gender#11, 1)) = M)) AND (static_invoke(CharVarcharCodegenUtils.readSidePadding(cd_marital_status#12, 1)) = S)) AND (static_invoke(CharVarcharCodegenUtils.readSidePadding(cd_education_status#13, 20)) = College )) AND isnotnull(cd_demo_sk#10))
(5) CometProject
Input [4]: [cd_demo_sk#10, cd_gender#11, cd_marital_status#12, cd_education_status#13]
@@ -114,7 +114,7 @@ Condition : isnotnull(i_item_sk#16)
(17) CometProject
Input [2]: [i_item_sk#16, i_item_id#17]
-Arguments: [i_item_sk#16, i_item_id#18], [i_item_sk#16, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_item_id#17, 16, true, false, true) AS i_item_id#18]
+Arguments: [i_item_sk#16, i_item_id#18], [i_item_sk#16, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_item_id#17, 16)) AS i_item_id#18]
(18) CometBroadcastExchange
Input [2]: [i_item_sk#16, i_item_id#18]
@@ -138,7 +138,7 @@ ReadSchema: struct
(22) CometFilter
Input [3]: [p_promo_sk#19, p_channel_email#20, p_channel_event#21]
-Condition : (((staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, p_channel_email#20, 1, true, false, true) = N) OR (staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, p_channel_event#21, 1, true, false, true) = N)) AND isnotnull(p_promo_sk#19))
+Condition : (((static_invoke(CharVarcharCodegenUtils.readSidePadding(p_channel_email#20, 1)) = N) OR (static_invoke(CharVarcharCodegenUtils.readSidePadding(p_channel_event#21, 1)) = N)) AND isnotnull(p_promo_sk#19))
(23) CometProject
Input [3]: [p_promo_sk#19, p_channel_email#20, p_channel_event#21]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q70/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q70/explain.txt
index 547c865426..e8c3811a9b 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q70/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q70/explain.txt
@@ -133,7 +133,7 @@ Condition : isnotnull(s_store_sk#14)
(17) CometProject
Input [2]: [s_store_sk#14, s_state#15]
-Arguments: [s_store_sk#14, s_state#16], [s_store_sk#14, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, s_state#15, 2, true, false, true) AS s_state#16]
+Arguments: [s_store_sk#14, s_state#16], [s_store_sk#14, static_invoke(CharVarcharCodegenUtils.readSidePadding(s_state#15, 2)) AS s_state#16]
(18) CometBroadcastExchange
Input [2]: [s_store_sk#14, s_state#16]
@@ -209,13 +209,13 @@ Input [1]: [s_state#16]
Arguments: HashedRelationBroadcastMode(List(input[0, string, true]),false), [plan_id=2]
(35) BroadcastHashJoin [codegen id : 4]
-Left keys [1]: [staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, s_state#9, 2, true, false, true)]
+Left keys [1]: [static_invoke(CharVarcharCodegenUtils.readSidePadding(s_state#9, 2))]
Right keys [1]: [s_state#16]
Join type: LeftSemi
Join condition: None
(36) Project [codegen id : 4]
-Output [3]: [s_store_sk#7, s_county#8, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, s_state#9, 2, true, false, true) AS s_state#23]
+Output [3]: [s_store_sk#7, s_county#8, static_invoke(CharVarcharCodegenUtils.readSidePadding(s_state#9, 2)) AS s_state#23]
Input [3]: [s_store_sk#7, s_county#8, s_state#9]
(37) BroadcastExchange
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q71/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q71/explain.txt
index f6d8a457a3..37be052ba3 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q71/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q71/explain.txt
@@ -53,7 +53,7 @@ Condition : ((isnotnull(i_manager_id#4) AND (i_manager_id#4 = 1)) AND isnotnull(
(3) CometProject
Input [4]: [i_item_sk#1, i_brand_id#2, i_brand#3, i_manager_id#4]
-Arguments: [i_item_sk#1, i_brand_id#2, i_brand#5], [i_item_sk#1, i_brand_id#2, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_brand#3, 50, true, false, true) AS i_brand#5]
+Arguments: [i_item_sk#1, i_brand_id#2, i_brand#5], [i_item_sk#1, i_brand_id#2, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_brand#3, 50)) AS i_brand#5]
(4) CometBroadcastExchange
Input [3]: [i_item_sk#1, i_brand_id#2, i_brand#5]
@@ -170,7 +170,7 @@ ReadSchema: struct
(27) CometFilter
Input [4]: [t_time_sk#35, t_hour#36, t_minute#37, t_meal_time#38]
-Condition : (((staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, t_meal_time#38, 20, true, false, true) = breakfast ) OR (staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, t_meal_time#38, 20, true, false, true) = dinner )) AND isnotnull(t_time_sk#35))
+Condition : (((static_invoke(CharVarcharCodegenUtils.readSidePadding(t_meal_time#38, 20)) = breakfast ) OR (static_invoke(CharVarcharCodegenUtils.readSidePadding(t_meal_time#38, 20)) = dinner )) AND isnotnull(t_time_sk#35))
(28) CometProject
Input [4]: [t_time_sk#35, t_hour#36, t_minute#37, t_meal_time#38]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q72/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q72/explain.txt
index 1aa31ec604..f7cfd53f2a 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q72/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q72/explain.txt
@@ -154,12 +154,12 @@ Arguments: [cs_ship_date_sk#1, cs_bill_cdemo_sk#2, cs_bill_hdemo_sk#3, cs_item_s
Output [2]: [cd_demo_sk#18, cd_marital_status#19]
Batched: true
Location [not included in comparison]/{warehouse_dir}/customer_demographics]
-PushedFilters: [IsNotNull(cd_demo_sk)]
+PushedFilters: [IsNotNull(cd_marital_status), IsNotNull(cd_demo_sk)]
ReadSchema: struct
(19) CometFilter
Input [2]: [cd_demo_sk#18, cd_marital_status#19]
-Condition : ((staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, cd_marital_status#19, 1, true, false, true) = D) AND isnotnull(cd_demo_sk#18))
+Condition : ((isnotnull(cd_marital_status#19) AND (static_invoke(CharVarcharCodegenUtils.readSidePadding(cd_marital_status#19, 1)) = D)) AND isnotnull(cd_demo_sk#18))
(20) CometProject
Input [2]: [cd_demo_sk#18, cd_marital_status#19]
@@ -182,12 +182,12 @@ Arguments: [cs_ship_date_sk#1, cs_bill_hdemo_sk#3, cs_item_sk#4, cs_promo_sk#5,
Output [2]: [hd_demo_sk#20, hd_buy_potential#21]
Batched: true
Location [not included in comparison]/{warehouse_dir}/household_demographics]
-PushedFilters: [IsNotNull(hd_demo_sk)]
+PushedFilters: [IsNotNull(hd_buy_potential), IsNotNull(hd_demo_sk)]
ReadSchema: struct
(25) CometFilter
Input [2]: [hd_demo_sk#20, hd_buy_potential#21]
-Condition : ((staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, hd_buy_potential#21, 15, true, false, true) = >10000 ) AND isnotnull(hd_demo_sk#20))
+Condition : ((isnotnull(hd_buy_potential#21) AND (static_invoke(CharVarcharCodegenUtils.readSidePadding(hd_buy_potential#21, 15)) = >10000 )) AND isnotnull(hd_demo_sk#20))
(26) CometProject
Input [2]: [hd_demo_sk#20, hd_buy_potential#21]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q73/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q73/explain.txt
index 77957c5099..5ebbecfd4a 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q73/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q73/explain.txt
@@ -111,7 +111,7 @@ ReadSchema: struct10000 ) OR (staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, hd_buy_potential#13, 15, true, false, true) = unknown ))) AND (hd_vehicle_count#15 > 0)) AND CASE WHEN (hd_vehicle_count#15 > 0) THEN (knownfloatingpointnormalized(normalizenanandzero((cast(hd_dep_count#14 as double) / knownfloatingpointnormalized(normalizenanandzero(cast(hd_vehicle_count#15 as double)))))) > 1.0) END) AND isnotnull(hd_demo_sk#12))
+Condition : ((((isnotnull(hd_vehicle_count#15) AND ((static_invoke(CharVarcharCodegenUtils.readSidePadding(hd_buy_potential#13, 15)) = >10000 ) OR (static_invoke(CharVarcharCodegenUtils.readSidePadding(hd_buy_potential#13, 15)) = unknown ))) AND (hd_vehicle_count#15 > 0)) AND CASE WHEN (hd_vehicle_count#15 > 0) THEN (knownfloatingpointnormalized(normalizenanandzero((cast(hd_dep_count#14 as double) / knownfloatingpointnormalized(normalizenanandzero(cast(hd_vehicle_count#15 as double)))))) > 1.0) END) AND isnotnull(hd_demo_sk#12))
(17) CometProject
Input [4]: [hd_demo_sk#12, hd_buy_potential#13, hd_dep_count#14, hd_vehicle_count#15]
@@ -161,7 +161,7 @@ Condition : isnotnull(c_customer_sk#18)
(27) CometProject
Input [5]: [c_customer_sk#18, c_salutation#19, c_first_name#20, c_last_name#21, c_preferred_cust_flag#22]
-Arguments: [c_customer_sk#18, c_salutation#23, c_first_name#24, c_last_name#25, c_preferred_cust_flag#26], [c_customer_sk#18, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, c_salutation#19, 10, true, false, true) AS c_salutation#23, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, c_first_name#20, 20, true, false, true) AS c_first_name#24, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, c_last_name#21, 30, true, false, true) AS c_last_name#25, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, c_preferred_cust_flag#22, 1, true, false, true) AS c_preferred_cust_flag#26]
+Arguments: [c_customer_sk#18, c_salutation#23, c_first_name#24, c_last_name#25, c_preferred_cust_flag#26], [c_customer_sk#18, static_invoke(CharVarcharCodegenUtils.readSidePadding(c_salutation#19, 10)) AS c_salutation#23, static_invoke(CharVarcharCodegenUtils.readSidePadding(c_first_name#20, 20)) AS c_first_name#24, static_invoke(CharVarcharCodegenUtils.readSidePadding(c_last_name#21, 30)) AS c_last_name#25, static_invoke(CharVarcharCodegenUtils.readSidePadding(c_preferred_cust_flag#22, 1)) AS c_preferred_cust_flag#26]
(28) CometBroadcastExchange
Input [5]: [c_customer_sk#18, c_salutation#23, c_first_name#24, c_last_name#25, c_preferred_cust_flag#26]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q74/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q74/explain.txt
index 369422f048..2319ef342b 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q74/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q74/explain.txt
@@ -89,11 +89,11 @@ ReadSchema: struct
(4) CometFilter
Input [6]: [i_item_sk#7, i_brand_id#8, i_class_id#9, i_category_id#10, i_category#11, i_manufact_id#12]
-Condition : ((((((staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_category#11, 50, true, false, true) = Books ) AND isnotnull(i_item_sk#7)) AND isnotnull(i_brand_id#8)) AND isnotnull(i_class_id#9)) AND isnotnull(i_category_id#10)) AND isnotnull(i_manufact_id#12))
+Condition : ((((((isnotnull(i_category#11) AND (static_invoke(CharVarcharCodegenUtils.readSidePadding(i_category#11, 50)) = Books )) AND isnotnull(i_item_sk#7)) AND isnotnull(i_brand_id#8)) AND isnotnull(i_class_id#9)) AND isnotnull(i_category_id#10)) AND isnotnull(i_manufact_id#12))
(5) CometProject
Input [6]: [i_item_sk#7, i_brand_id#8, i_class_id#9, i_category_id#10, i_category#11, i_manufact_id#12]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q76/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q76/explain.txt
index 19589995a4..d22f387aea 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q76/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q76/explain.txt
@@ -62,7 +62,7 @@ Condition : isnotnull(i_item_sk#5)
(5) CometProject
Input [2]: [i_item_sk#5, i_category#6]
-Arguments: [i_item_sk#5, i_category#7], [i_item_sk#5, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_category#6, 50, true, false, true) AS i_category#7]
+Arguments: [i_item_sk#5, i_category#7], [i_item_sk#5, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_category#6, 50)) AS i_category#7]
(6) CometBroadcastExchange
Input [2]: [i_item_sk#5, i_category#7]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q79/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q79/explain.txt
index ae60c0a4d6..2474673e7b 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q79/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q79/explain.txt
@@ -167,7 +167,7 @@ Condition : isnotnull(c_customer_sk#27)
(28) CometProject
Input [3]: [c_customer_sk#27, c_first_name#28, c_last_name#29]
-Arguments: [c_customer_sk#27, c_first_name#30, c_last_name#31], [c_customer_sk#27, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, c_first_name#28, 20, true, false, true) AS c_first_name#30, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, c_last_name#29, 30, true, false, true) AS c_last_name#31]
+Arguments: [c_customer_sk#27, c_first_name#30, c_last_name#31], [c_customer_sk#27, static_invoke(CharVarcharCodegenUtils.readSidePadding(c_first_name#28, 20)) AS c_first_name#30, static_invoke(CharVarcharCodegenUtils.readSidePadding(c_last_name#29, 30)) AS c_last_name#31]
(29) CometColumnarToRow [codegen id : 2]
Input [3]: [c_customer_sk#27, c_first_name#30, c_last_name#31]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q8/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q8/explain.txt
index c65c920712..f8603b301a 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q8/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q8/explain.txt
@@ -95,11 +95,11 @@ ReadSchema: struct
(10) CometFilter
Input [3]: [s_store_sk#8, s_store_name#9, s_zip#10]
-Condition : (isnotnull(s_store_sk#8) AND isnotnull(staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, s_zip#10, 10, true, false, true)))
+Condition : (isnotnull(s_store_sk#8) AND isnotnull(static_invoke(CharVarcharCodegenUtils.readSidePadding(s_zip#10, 10))))
(11) CometProject
Input [3]: [s_store_sk#8, s_store_name#9, s_zip#10]
-Arguments: [s_store_sk#8, s_store_name#9, s_zip#11], [s_store_sk#8, s_store_name#9, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, s_zip#10, 10, true, false, true) AS s_zip#11]
+Arguments: [s_store_sk#8, s_store_name#9, s_zip#11], [s_store_sk#8, s_store_name#9, static_invoke(CharVarcharCodegenUtils.readSidePadding(s_zip#10, 10)) AS s_zip#11]
(12) CometBroadcastExchange
Input [3]: [s_store_sk#8, s_store_name#9, s_zip#11]
@@ -122,11 +122,11 @@ ReadSchema: struct
(16) CometFilter
Input [1]: [ca_zip#12]
-Condition : (substr(staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, ca_zip#12, 10, true, false, true), 1, 5) INSET 10144, 10336, 10390, 10445, 10516, 10567, 11101, 11356, 11376, 11489, 11634, 11928, 12305, 13354, 13375, 13376, 13394, 13595, 13695, 13955, 14060, 14089, 14171, 14328, 14663, 14867, 14922, 15126, 15146, 15371, 15455, 15559, 15723, 15734, 15765, 15798, 15882, 16021, 16725, 16807, 17043, 17183, 17871, 17879, 17920, 18119, 18270, 18376, 18383, 18426, 18652, 18767, 18799, 18840, 18842, 18845, 18906, 19430, 19505, 19512, 19515, 19736, 19769, 19849, 20004, 20260, 20548, 21076, 21195, 21286, 21309, 21337, 21756, 22152, 22245, 22246, 22351, 22437, 22461, 22685, 22744, 22752, 22927, 23006, 23470, 23932, 23968, 24128, 24206, 24317, 24610, 24671, 24676, 24996, 25003, 25103, 25280, 25486, 25631, 25733, 25782, 25858, 25989, 26065, 26105, 26231, 26233, 26653, 26689, 26859, 27068, 27156, 27385, 27700, 28286, 28488, 28545, 28577, 28587, 28709, 28810, 28898, 28915, 29178, 29741, 29839, 30010, 30122, 30431, 30450, 30469, 30625, 30903, 31016, 31029, 31387, 31671, 31880, 32213, 32754, 33123, 33282, 33515, 33786, 34102, 34322, 34425, 35258, 35458, 35474, 35576, 35850, 35942, 36233, 36420, 36446, 36495, 36634, 37125, 37126, 37930, 38122, 38193, 38415, 38607, 38935, 39127, 39192, 39371, 39516, 39736, 39861, 39972, 40081, 40162, 40558, 40604, 41248, 41367, 41368, 41766, 41918, 42029, 42666, 42961, 43285, 43848, 43933, 44165, 44438, 45200, 45266, 45375, 45549, 45692, 45721, 45748, 46081, 46136, 46820, 47305, 47537, 47770, 48033, 48425, 48583, 49130, 49156, 49448, 50016, 50298, 50308, 50412, 51061, 51103, 51200, 51211, 51622, 51649, 51650, 51798, 51949, 52867, 53179, 53268, 53535, 53672, 54364, 54601, 54917, 55253, 55307, 55565, 56240, 56458, 56529, 56571, 56575, 56616, 56691, 56910, 57047, 57647, 57665, 57834, 57855, 58048, 58058, 58078, 58263, 58470, 58943, 59166, 59402, 60099, 60279, 60576, 61265, 61547, 61810, 61860, 62377, 62496, 62878, 62971, 63089, 63193, 63435, 63792, 63837, 63981, 64034, 64147, 64457, 64528, 64544, 65084, 65164, 66162, 66708, 66864, 67030, 67301, 67467, 67473, 67853, 67875, 67897, 68014, 68100, 68101, 68309, 68341, 68621, 68786, 68806, 68880, 68893, 68908, 69035, 69399, 69913, 69952, 70372, 70466, 70738, 71256, 71286, 71791, 71954, 72013, 72151, 72175, 72305, 72325, 72425, 72550, 72823, 73134, 73171, 73241, 73273, 73520, 73650, 74351, 75691, 76107, 76231, 76232, 76614, 76638, 76698, 77191, 77556, 77610, 77721, 78451, 78567, 78668, 78890, 79077, 79777, 79994, 81019, 81096, 81312, 81426, 82136, 82276, 82636, 83041, 83144, 83444, 83849, 83921, 83926, 83933, 84093, 84935, 85816, 86057, 86198, 86284, 86379, 87343, 87501, 87816, 88086, 88190, 88424, 88885, 89091, 89360, 90225, 90257, 90578, 91068, 91110, 91137, 91393, 92712, 94167, 94627, 94898, 94945, 94983, 96451, 96576, 96765, 96888, 96976, 97189, 97789, 98025, 98235, 98294, 98359, 98569, 99076, 99543 AND isnotnull(substr(staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, ca_zip#12, 10, true, false, true), 1, 5)))
+Condition : (substr(static_invoke(CharVarcharCodegenUtils.readSidePadding(ca_zip#12, 10)), 1, 5) INSET 10144, 10336, 10390, 10445, 10516, 10567, 11101, 11356, 11376, 11489, 11634, 11928, 12305, 13354, 13375, 13376, 13394, 13595, 13695, 13955, 14060, 14089, 14171, 14328, 14663, 14867, 14922, 15126, 15146, 15371, 15455, 15559, 15723, 15734, 15765, 15798, 15882, 16021, 16725, 16807, 17043, 17183, 17871, 17879, 17920, 18119, 18270, 18376, 18383, 18426, 18652, 18767, 18799, 18840, 18842, 18845, 18906, 19430, 19505, 19512, 19515, 19736, 19769, 19849, 20004, 20260, 20548, 21076, 21195, 21286, 21309, 21337, 21756, 22152, 22245, 22246, 22351, 22437, 22461, 22685, 22744, 22752, 22927, 23006, 23470, 23932, 23968, 24128, 24206, 24317, 24610, 24671, 24676, 24996, 25003, 25103, 25280, 25486, 25631, 25733, 25782, 25858, 25989, 26065, 26105, 26231, 26233, 26653, 26689, 26859, 27068, 27156, 27385, 27700, 28286, 28488, 28545, 28577, 28587, 28709, 28810, 28898, 28915, 29178, 29741, 29839, 30010, 30122, 30431, 30450, 30469, 30625, 30903, 31016, 31029, 31387, 31671, 31880, 32213, 32754, 33123, 33282, 33515, 33786, 34102, 34322, 34425, 35258, 35458, 35474, 35576, 35850, 35942, 36233, 36420, 36446, 36495, 36634, 37125, 37126, 37930, 38122, 38193, 38415, 38607, 38935, 39127, 39192, 39371, 39516, 39736, 39861, 39972, 40081, 40162, 40558, 40604, 41248, 41367, 41368, 41766, 41918, 42029, 42666, 42961, 43285, 43848, 43933, 44165, 44438, 45200, 45266, 45375, 45549, 45692, 45721, 45748, 46081, 46136, 46820, 47305, 47537, 47770, 48033, 48425, 48583, 49130, 49156, 49448, 50016, 50298, 50308, 50412, 51061, 51103, 51200, 51211, 51622, 51649, 51650, 51798, 51949, 52867, 53179, 53268, 53535, 53672, 54364, 54601, 54917, 55253, 55307, 55565, 56240, 56458, 56529, 56571, 56575, 56616, 56691, 56910, 57047, 57647, 57665, 57834, 57855, 58048, 58058, 58078, 58263, 58470, 58943, 59166, 59402, 60099, 60279, 60576, 61265, 61547, 61810, 61860, 62377, 62496, 62878, 62971, 63089, 63193, 63435, 63792, 63837, 63981, 64034, 64147, 64457, 64528, 64544, 65084, 65164, 66162, 66708, 66864, 67030, 67301, 67467, 67473, 67853, 67875, 67897, 68014, 68100, 68101, 68309, 68341, 68621, 68786, 68806, 68880, 68893, 68908, 69035, 69399, 69913, 69952, 70372, 70466, 70738, 71256, 71286, 71791, 71954, 72013, 72151, 72175, 72305, 72325, 72425, 72550, 72823, 73134, 73171, 73241, 73273, 73520, 73650, 74351, 75691, 76107, 76231, 76232, 76614, 76638, 76698, 77191, 77556, 77610, 77721, 78451, 78567, 78668, 78890, 79077, 79777, 79994, 81019, 81096, 81312, 81426, 82136, 82276, 82636, 83041, 83144, 83444, 83849, 83921, 83926, 83933, 84093, 84935, 85816, 86057, 86198, 86284, 86379, 87343, 87501, 87816, 88086, 88190, 88424, 88885, 89091, 89360, 90225, 90257, 90578, 91068, 91110, 91137, 91393, 92712, 94167, 94627, 94898, 94945, 94983, 96451, 96576, 96765, 96888, 96976, 97189, 97789, 98025, 98235, 98294, 98359, 98569, 99076, 99543 AND isnotnull(substr(static_invoke(CharVarcharCodegenUtils.readSidePadding(ca_zip#12, 10)), 1, 5)))
(17) CometProject
Input [1]: [ca_zip#12]
-Arguments: [ca_zip#13], [substr(staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, ca_zip#12, 10, true, false, true), 1, 5) AS ca_zip#13]
+Arguments: [ca_zip#13], [substr(static_invoke(CharVarcharCodegenUtils.readSidePadding(ca_zip#12, 10)), 1, 5) AS ca_zip#13]
(18) CometScan parquet spark_catalog.default.customer_address
Output [2]: [ca_address_sk#14, ca_zip#15]
@@ -141,18 +141,18 @@ Condition : isnotnull(ca_address_sk#14)
(20) CometProject
Input [2]: [ca_address_sk#14, ca_zip#15]
-Arguments: [ca_address_sk#14, ca_zip#16], [ca_address_sk#14, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, ca_zip#15, 10, true, false, true) AS ca_zip#16]
+Arguments: [ca_address_sk#14, ca_zip#16], [ca_address_sk#14, static_invoke(CharVarcharCodegenUtils.readSidePadding(ca_zip#15, 10)) AS ca_zip#16]
(21) CometScan parquet spark_catalog.default.customer
Output [2]: [c_current_addr_sk#17, c_preferred_cust_flag#18]
Batched: true
Location [not included in comparison]/{warehouse_dir}/customer]
-PushedFilters: [IsNotNull(c_current_addr_sk)]
+PushedFilters: [IsNotNull(c_preferred_cust_flag), IsNotNull(c_current_addr_sk)]
ReadSchema: struct
(22) CometFilter
Input [2]: [c_current_addr_sk#17, c_preferred_cust_flag#18]
-Condition : ((staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, c_preferred_cust_flag#18, 1, true, false, true) = Y) AND isnotnull(c_current_addr_sk#17))
+Condition : ((isnotnull(c_preferred_cust_flag#18) AND (static_invoke(CharVarcharCodegenUtils.readSidePadding(c_preferred_cust_flag#18, 1)) = Y)) AND isnotnull(c_current_addr_sk#17))
(23) CometProject
Input [2]: [c_current_addr_sk#17, c_preferred_cust_flag#18]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q80/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q80/explain.txt
index 54edbdb1b5..aeb51f7360 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q80/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q80/explain.txt
@@ -203,7 +203,7 @@ Condition : isnotnull(s_store_sk#16)
(20) CometProject
Input [2]: [s_store_sk#16, s_store_id#17]
-Arguments: [s_store_sk#16, s_store_id#18], [s_store_sk#16, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, s_store_id#17, 16, true, false, true) AS s_store_id#18]
+Arguments: [s_store_sk#16, s_store_id#18], [s_store_sk#16, static_invoke(CharVarcharCodegenUtils.readSidePadding(s_store_id#17, 16)) AS s_store_id#18]
(21) CometBroadcastExchange
Input [2]: [s_store_sk#16, s_store_id#18]
@@ -250,12 +250,12 @@ Arguments: [ss_promo_sk#3, ss_ext_sales_price#5, ss_net_profit#6, sr_return_amt#
Output [2]: [p_promo_sk#21, p_channel_tv#22]
Batched: true
Location [not included in comparison]/{warehouse_dir}/promotion]
-PushedFilters: [IsNotNull(p_promo_sk)]
+PushedFilters: [IsNotNull(p_channel_tv), IsNotNull(p_promo_sk)]
ReadSchema: struct
(31) CometFilter
Input [2]: [p_promo_sk#21, p_channel_tv#22]
-Condition : ((staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, p_channel_tv#22, 1, true, false, true) = N) AND isnotnull(p_promo_sk#21))
+Condition : ((isnotnull(p_channel_tv#22) AND (static_invoke(CharVarcharCodegenUtils.readSidePadding(p_channel_tv#22, 1)) = N)) AND isnotnull(p_promo_sk#21))
(32) CometProject
Input [2]: [p_promo_sk#21, p_channel_tv#22]
@@ -375,7 +375,7 @@ Condition : isnotnull(cp_catalog_page_sk#55)
(57) CometProject
Input [2]: [cp_catalog_page_sk#55, cp_catalog_page_id#56]
-Arguments: [cp_catalog_page_sk#55, cp_catalog_page_id#57], [cp_catalog_page_sk#55, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, cp_catalog_page_id#56, 16, true, false, true) AS cp_catalog_page_id#57]
+Arguments: [cp_catalog_page_sk#55, cp_catalog_page_id#57], [cp_catalog_page_sk#55, static_invoke(CharVarcharCodegenUtils.readSidePadding(cp_catalog_page_id#56, 16)) AS cp_catalog_page_id#57]
(58) CometBroadcastExchange
Input [2]: [cp_catalog_page_sk#55, cp_catalog_page_id#57]
@@ -515,7 +515,7 @@ Condition : isnotnull(web_site_sk#92)
(88) CometProject
Input [2]: [web_site_sk#92, web_site_id#93]
-Arguments: [web_site_sk#92, web_site_id#94], [web_site_sk#92, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, web_site_id#93, 16, true, false, true) AS web_site_id#94]
+Arguments: [web_site_sk#92, web_site_id#94], [web_site_sk#92, static_invoke(CharVarcharCodegenUtils.readSidePadding(web_site_id#93, 16)) AS web_site_id#94]
(89) CometBroadcastExchange
Input [2]: [web_site_sk#92, web_site_id#94]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q81/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q81/explain.txt
index 9c32b75905..4c6c8a08ec 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q81/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q81/explain.txt
@@ -106,11 +106,11 @@ ReadSchema: struct
(10) CometFilter
Input [2]: [ca_address_sk#8, ca_state#9]
-Condition : (isnotnull(ca_address_sk#8) AND isnotnull(staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, ca_state#9, 2, true, false, true)))
+Condition : (isnotnull(ca_address_sk#8) AND isnotnull(static_invoke(CharVarcharCodegenUtils.readSidePadding(ca_state#9, 2))))
(11) CometProject
Input [2]: [ca_address_sk#8, ca_state#9]
-Arguments: [ca_address_sk#8, ca_state#10], [ca_address_sk#8, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, ca_state#9, 2, true, false, true) AS ca_state#10]
+Arguments: [ca_address_sk#8, ca_state#10], [ca_address_sk#8, static_invoke(CharVarcharCodegenUtils.readSidePadding(ca_state#9, 2)) AS ca_state#10]
(12) CometBroadcastExchange
Input [2]: [ca_address_sk#8, ca_state#10]
@@ -265,7 +265,7 @@ Condition : (isnotnull(c_customer_sk#35) AND isnotnull(c_current_addr_sk#37))
(44) CometProject
Input [6]: [c_customer_sk#35, c_customer_id#36, c_current_addr_sk#37, c_salutation#38, c_first_name#39, c_last_name#40]
-Arguments: [c_customer_sk#35, c_customer_id#41, c_current_addr_sk#37, c_salutation#42, c_first_name#43, c_last_name#44], [c_customer_sk#35, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, c_customer_id#36, 16, true, false, true) AS c_customer_id#41, c_current_addr_sk#37, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, c_salutation#38, 10, true, false, true) AS c_salutation#42, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, c_first_name#39, 20, true, false, true) AS c_first_name#43, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, c_last_name#40, 30, true, false, true) AS c_last_name#44]
+Arguments: [c_customer_sk#35, c_customer_id#41, c_current_addr_sk#37, c_salutation#42, c_first_name#43, c_last_name#44], [c_customer_sk#35, static_invoke(CharVarcharCodegenUtils.readSidePadding(c_customer_id#36, 16)) AS c_customer_id#41, c_current_addr_sk#37, static_invoke(CharVarcharCodegenUtils.readSidePadding(c_salutation#38, 10)) AS c_salutation#42, static_invoke(CharVarcharCodegenUtils.readSidePadding(c_first_name#39, 20)) AS c_first_name#43, static_invoke(CharVarcharCodegenUtils.readSidePadding(c_last_name#40, 30)) AS c_last_name#44]
(45) CometColumnarToRow [codegen id : 5]
Input [6]: [c_customer_sk#35, c_customer_id#41, c_current_addr_sk#37, c_salutation#42, c_first_name#43, c_last_name#44]
@@ -288,16 +288,16 @@ Input [8]: [ctr_customer_sk#14, ctr_total_return#16, c_customer_sk#35, c_custome
Output [12]: [ca_address_sk#45, ca_street_number#46, ca_street_name#47, ca_street_type#48, ca_suite_number#49, ca_city#50, ca_county#51, ca_state#52, ca_zip#53, ca_country#54, ca_gmt_offset#55, ca_location_type#56]
Batched: true
Location [not included in comparison]/{warehouse_dir}/customer_address]
-PushedFilters: [IsNotNull(ca_address_sk)]
+PushedFilters: [IsNotNull(ca_state), IsNotNull(ca_address_sk)]
ReadSchema: struct
(50) CometFilter
Input [12]: [ca_address_sk#45, ca_street_number#46, ca_street_name#47, ca_street_type#48, ca_suite_number#49, ca_city#50, ca_county#51, ca_state#52, ca_zip#53, ca_country#54, ca_gmt_offset#55, ca_location_type#56]
-Condition : ((staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, ca_state#52, 2, true, false, true) = GA) AND isnotnull(ca_address_sk#45))
+Condition : ((isnotnull(ca_state#52) AND (static_invoke(CharVarcharCodegenUtils.readSidePadding(ca_state#52, 2)) = GA)) AND isnotnull(ca_address_sk#45))
(51) CometProject
Input [12]: [ca_address_sk#45, ca_street_number#46, ca_street_name#47, ca_street_type#48, ca_suite_number#49, ca_city#50, ca_county#51, ca_state#52, ca_zip#53, ca_country#54, ca_gmt_offset#55, ca_location_type#56]
-Arguments: [ca_address_sk#45, ca_street_number#57, ca_street_name#47, ca_street_type#58, ca_suite_number#59, ca_city#50, ca_county#51, ca_state#60, ca_zip#61, ca_country#54, ca_gmt_offset#55, ca_location_type#62], [ca_address_sk#45, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, ca_street_number#46, 10, true, false, true) AS ca_street_number#57, ca_street_name#47, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, ca_street_type#48, 15, true, false, true) AS ca_street_type#58, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, ca_suite_number#49, 10, true, false, true) AS ca_suite_number#59, ca_city#50, ca_county#51, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, ca_state#52, 2, true, false, true) AS ca_state#60, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, ca_zip#53, 10, true, false, true) AS ca_zip#61, ca_country#54, ca_gmt_offset#55, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, ca_location_type#56, 20, true, false, true) AS ca_location_type#62]
+Arguments: [ca_address_sk#45, ca_street_number#57, ca_street_name#47, ca_street_type#58, ca_suite_number#59, ca_city#50, ca_county#51, ca_state#60, ca_zip#61, ca_country#54, ca_gmt_offset#55, ca_location_type#62], [ca_address_sk#45, static_invoke(CharVarcharCodegenUtils.readSidePadding(ca_street_number#46, 10)) AS ca_street_number#57, ca_street_name#47, static_invoke(CharVarcharCodegenUtils.readSidePadding(ca_street_type#48, 15)) AS ca_street_type#58, static_invoke(CharVarcharCodegenUtils.readSidePadding(ca_suite_number#49, 10)) AS ca_suite_number#59, ca_city#50, ca_county#51, static_invoke(CharVarcharCodegenUtils.readSidePadding(ca_state#52, 2)) AS ca_state#60, static_invoke(CharVarcharCodegenUtils.readSidePadding(ca_zip#53, 10)) AS ca_zip#61, ca_country#54, ca_gmt_offset#55, static_invoke(CharVarcharCodegenUtils.readSidePadding(ca_location_type#56, 20)) AS ca_location_type#62]
(52) CometColumnarToRow [codegen id : 6]
Input [12]: [ca_address_sk#45, ca_street_number#57, ca_street_name#47, ca_street_type#58, ca_suite_number#59, ca_city#50, ca_county#51, ca_state#60, ca_zip#61, ca_country#54, ca_gmt_offset#55, ca_location_type#62]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q82/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q82/explain.txt
index 6b35c3253d..1c6f1071ab 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q82/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q82/explain.txt
@@ -40,7 +40,7 @@ Condition : ((((isnotnull(i_current_price#4) AND (i_current_price#4 >= 62.00)) A
(3) CometProject
Input [5]: [i_item_sk#1, i_item_id#2, i_item_desc#3, i_current_price#4, i_manufact_id#5]
-Arguments: [i_item_sk#1, i_item_id#6, i_item_desc#3, i_current_price#4], [i_item_sk#1, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_item_id#2, 16, true, false, true) AS i_item_id#6, i_item_desc#3, i_current_price#4]
+Arguments: [i_item_sk#1, i_item_id#6, i_item_desc#3, i_current_price#4], [i_item_sk#1, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_item_id#2, 16)) AS i_item_id#6, i_item_desc#3, i_current_price#4]
(4) CometScan parquet spark_catalog.default.inventory
Output [3]: [inv_item_sk#7, inv_quantity_on_hand#8, inv_date_sk#9]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q83.ansi/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q83.ansi/explain.txt
index 6c53602be9..7ba7ff28cf 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q83.ansi/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q83.ansi/explain.txt
@@ -76,11 +76,11 @@ ReadSchema: struct
(4) CometFilter
Input [2]: [i_item_sk#5, i_item_id#6]
-Condition : (isnotnull(i_item_sk#5) AND isnotnull(staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_item_id#6, 16, true, false, true)))
+Condition : (isnotnull(i_item_sk#5) AND isnotnull(static_invoke(CharVarcharCodegenUtils.readSidePadding(i_item_id#6, 16))))
(5) CometProject
Input [2]: [i_item_sk#5, i_item_id#6]
-Arguments: [i_item_sk#5, i_item_id#7], [i_item_sk#5, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_item_id#6, 16, true, false, true) AS i_item_id#7]
+Arguments: [i_item_sk#5, i_item_id#7], [i_item_sk#5, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_item_id#6, 16)) AS i_item_id#7]
(6) CometBroadcastExchange
Input [2]: [i_item_sk#5, i_item_id#7]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q84/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q84/explain.txt
index 2d39410c5c..7f4c3e38f2 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q84/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q84/explain.txt
@@ -47,7 +47,7 @@ Condition : ((isnotnull(c_current_addr_sk#4) AND isnotnull(c_current_cdemo_sk#2)
(3) CometProject
Input [6]: [c_customer_id#1, c_current_cdemo_sk#2, c_current_hdemo_sk#3, c_current_addr_sk#4, c_first_name#5, c_last_name#6]
-Arguments: [c_customer_id#7, c_current_cdemo_sk#2, c_current_hdemo_sk#3, c_current_addr_sk#4, c_first_name#8, c_last_name#9], [staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, c_customer_id#1, 16, true, false, true) AS c_customer_id#7, c_current_cdemo_sk#2, c_current_hdemo_sk#3, c_current_addr_sk#4, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, c_first_name#5, 20, true, false, true) AS c_first_name#8, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, c_last_name#6, 30, true, false, true) AS c_last_name#9]
+Arguments: [c_customer_id#7, c_current_cdemo_sk#2, c_current_hdemo_sk#3, c_current_addr_sk#4, c_first_name#8, c_last_name#9], [static_invoke(CharVarcharCodegenUtils.readSidePadding(c_customer_id#1, 16)) AS c_customer_id#7, c_current_cdemo_sk#2, c_current_hdemo_sk#3, c_current_addr_sk#4, static_invoke(CharVarcharCodegenUtils.readSidePadding(c_first_name#5, 20)) AS c_first_name#8, static_invoke(CharVarcharCodegenUtils.readSidePadding(c_last_name#6, 30)) AS c_last_name#9]
(4) CometScan parquet spark_catalog.default.customer_address
Output [2]: [ca_address_sk#10, ca_city#11]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q85/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q85/explain.txt
index 2a3d8b7bee..848b9c4788 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q85/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q85/explain.txt
@@ -123,11 +123,11 @@ ReadSchema: struct
(27) CometFilter
Input [3]: [ca_address_sk#29, ca_state#30, ca_country#31]
-Condition : (((isnotnull(ca_country#31) AND (ca_country#31 = United States)) AND isnotnull(ca_address_sk#29)) AND ((staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, ca_state#30, 2, true, false, true) IN (IN,OH,NJ) OR staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, ca_state#30, 2, true, false, true) IN (WI,CT,KY)) OR staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, ca_state#30, 2, true, false, true) IN (LA,IA,AR)))
+Condition : (((isnotnull(ca_country#31) AND (ca_country#31 = United States)) AND isnotnull(ca_address_sk#29)) AND ((static_invoke(CharVarcharCodegenUtils.readSidePadding(ca_state#30, 2)) IN (IN,OH,NJ) OR static_invoke(CharVarcharCodegenUtils.readSidePadding(ca_state#30, 2)) IN (WI,CT,KY)) OR static_invoke(CharVarcharCodegenUtils.readSidePadding(ca_state#30, 2)) IN (LA,IA,AR)))
(28) CometProject
Input [3]: [ca_address_sk#29, ca_state#30, ca_country#31]
-Arguments: [ca_address_sk#29, ca_state#32], [ca_address_sk#29, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, ca_state#30, 2, true, false, true) AS ca_state#32]
+Arguments: [ca_address_sk#29, ca_state#32], [ca_address_sk#29, static_invoke(CharVarcharCodegenUtils.readSidePadding(ca_state#30, 2)) AS ca_state#32]
(29) CometBroadcastExchange
Input [2]: [ca_address_sk#29, ca_state#32]
@@ -239,7 +239,7 @@ Condition : isnotnull(r_reason_sk#35)
(40) CometProject
Input [2]: [r_reason_sk#35, r_reason_desc#36]
-Arguments: [r_reason_sk#35, r_reason_desc#37], [r_reason_sk#35, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, r_reason_desc#36, 100, true, false, true) AS r_reason_desc#37]
+Arguments: [r_reason_sk#35, r_reason_desc#37], [r_reason_sk#35, static_invoke(CharVarcharCodegenUtils.readSidePadding(r_reason_desc#36, 100)) AS r_reason_desc#37]
(41) CometBroadcastExchange
Input [2]: [r_reason_sk#35, r_reason_desc#37]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q86/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q86/explain.txt
index e7d8a722b3..f18232bfc2 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q86/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q86/explain.txt
@@ -80,7 +80,7 @@ Condition : isnotnull(i_item_sk#7)
(11) CometProject
Input [3]: [i_item_sk#7, i_class#8, i_category#9]
-Arguments: [i_item_sk#7, i_class#10, i_category#11], [i_item_sk#7, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_class#8, 50, true, false, true) AS i_class#10, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_category#9, 50, true, false, true) AS i_category#11]
+Arguments: [i_item_sk#7, i_class#10, i_category#11], [i_item_sk#7, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_class#8, 50)) AS i_class#10, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_category#9, 50)) AS i_category#11]
(12) CometBroadcastExchange
Input [3]: [i_item_sk#7, i_class#10, i_category#11]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q87/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q87/explain.txt
index 666895a10f..7874848809 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q87/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q87/explain.txt
@@ -105,7 +105,7 @@ Condition : isnotnull(c_customer_sk#7)
(11) CometProject
Input [3]: [c_customer_sk#7, c_first_name#8, c_last_name#9]
-Arguments: [c_customer_sk#7, c_first_name#10, c_last_name#11], [c_customer_sk#7, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, c_first_name#8, 20, true, false, true) AS c_first_name#10, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, c_last_name#9, 30, true, false, true) AS c_last_name#11]
+Arguments: [c_customer_sk#7, c_first_name#10, c_last_name#11], [c_customer_sk#7, static_invoke(CharVarcharCodegenUtils.readSidePadding(c_first_name#8, 20)) AS c_first_name#10, static_invoke(CharVarcharCodegenUtils.readSidePadding(c_last_name#9, 30)) AS c_last_name#11]
(12) CometBroadcastExchange
Input [3]: [c_customer_sk#7, c_first_name#10, c_last_name#11]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q89/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q89/explain.txt
index b76db55a60..7f51103ea3 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q89/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q89/explain.txt
@@ -41,11 +41,11 @@ ReadSchema: struct
(33) CometFilter
Input [2]: [hd_demo_sk#25, hd_buy_potential#26]
-Condition : (StartsWith(staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, hd_buy_potential#26, 15, true, false, true), Unknown) AND isnotnull(hd_demo_sk#25))
+Condition : ((isnotnull(hd_buy_potential#26) AND StartsWith(static_invoke(CharVarcharCodegenUtils.readSidePadding(hd_buy_potential#26, 15)), Unknown)) AND isnotnull(hd_demo_sk#25))
(34) CometProject
Input [2]: [hd_demo_sk#25, hd_buy_potential#26]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q93/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q93/explain.txt
index cdd887ba43..92c87fefaf 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q93/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q93/explain.txt
@@ -78,12 +78,12 @@ Arguments: [ss_customer_sk#2, ss_quantity#4, ss_sales_price#5, sr_reason_sk#8, s
Output [2]: [r_reason_sk#12, r_reason_desc#13]
Batched: true
Location [not included in comparison]/{warehouse_dir}/reason]
-PushedFilters: [IsNotNull(r_reason_sk)]
+PushedFilters: [IsNotNull(r_reason_desc), IsNotNull(r_reason_sk)]
ReadSchema: struct
(13) CometFilter
Input [2]: [r_reason_sk#12, r_reason_desc#13]
-Condition : ((staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, r_reason_desc#13, 100, true, false, true) = reason 28 ) AND isnotnull(r_reason_sk#12))
+Condition : ((isnotnull(r_reason_desc#13) AND (static_invoke(CharVarcharCodegenUtils.readSidePadding(r_reason_desc#13, 100)) = reason 28 )) AND isnotnull(r_reason_sk#12))
(14) CometProject
Input [2]: [r_reason_sk#12, r_reason_desc#13]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q94/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q94/explain.txt
index 2f6f0874a8..acdaf631ed 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q94/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q94/explain.txt
@@ -147,12 +147,12 @@ Arguments: [ws_ship_addr_sk#2, ws_web_site_sk#3, ws_order_number#5, ws_ext_ship_
Output [2]: [ca_address_sk#16, ca_state#17]
Batched: true
Location [not included in comparison]/{warehouse_dir}/customer_address]
-PushedFilters: [IsNotNull(ca_address_sk)]
+PushedFilters: [IsNotNull(ca_state), IsNotNull(ca_address_sk)]
ReadSchema: struct
(24) CometFilter
Input [2]: [ca_address_sk#16, ca_state#17]
-Condition : ((staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, ca_state#17, 2, true, false, true) = IL) AND isnotnull(ca_address_sk#16))
+Condition : ((isnotnull(ca_state#17) AND (static_invoke(CharVarcharCodegenUtils.readSidePadding(ca_state#17, 2)) = IL)) AND isnotnull(ca_address_sk#16))
(25) CometProject
Input [2]: [ca_address_sk#16, ca_state#17]
@@ -175,12 +175,12 @@ Arguments: [ws_web_site_sk#3, ws_order_number#5, ws_ext_ship_cost#6, ws_net_prof
Output [2]: [web_site_sk#18, web_company_name#19]
Batched: true
Location [not included in comparison]/{warehouse_dir}/web_site]
-PushedFilters: [IsNotNull(web_site_sk)]
+PushedFilters: [IsNotNull(web_company_name), IsNotNull(web_site_sk)]
ReadSchema: struct
(30) CometFilter
Input [2]: [web_site_sk#18, web_company_name#19]
-Condition : ((staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, web_company_name#19, 50, true, false, true) = pri ) AND isnotnull(web_site_sk#18))
+Condition : ((isnotnull(web_company_name#19) AND (static_invoke(CharVarcharCodegenUtils.readSidePadding(web_company_name#19, 50)) = pri )) AND isnotnull(web_site_sk#18))
(31) CometProject
Input [2]: [web_site_sk#18, web_company_name#19]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q95/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q95/explain.txt
index cb738013e8..d6a290825a 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q95/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q95/explain.txt
@@ -214,12 +214,12 @@ Arguments: [ws_ship_addr_sk#2, ws_web_site_sk#3, ws_order_number#4, ws_ext_ship_
Output [2]: [ca_address_sk#21, ca_state#22]
Batched: true
Location [not included in comparison]/{warehouse_dir}/customer_address]
-PushedFilters: [IsNotNull(ca_address_sk)]
+PushedFilters: [IsNotNull(ca_state), IsNotNull(ca_address_sk)]
ReadSchema: struct
(37) CometFilter
Input [2]: [ca_address_sk#21, ca_state#22]
-Condition : ((staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, ca_state#22, 2, true, false, true) = IL) AND isnotnull(ca_address_sk#21))
+Condition : ((isnotnull(ca_state#22) AND (static_invoke(CharVarcharCodegenUtils.readSidePadding(ca_state#22, 2)) = IL)) AND isnotnull(ca_address_sk#21))
(38) CometProject
Input [2]: [ca_address_sk#21, ca_state#22]
@@ -242,12 +242,12 @@ Arguments: [ws_web_site_sk#3, ws_order_number#4, ws_ext_ship_cost#5, ws_net_prof
Output [2]: [web_site_sk#23, web_company_name#24]
Batched: true
Location [not included in comparison]/{warehouse_dir}/web_site]
-PushedFilters: [IsNotNull(web_site_sk)]
+PushedFilters: [IsNotNull(web_company_name), IsNotNull(web_site_sk)]
ReadSchema: struct
(43) CometFilter
Input [2]: [web_site_sk#23, web_company_name#24]
-Condition : ((staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, web_company_name#24, 50, true, false, true) = pri ) AND isnotnull(web_site_sk#23))
+Condition : ((isnotnull(web_company_name#24) AND (static_invoke(CharVarcharCodegenUtils.readSidePadding(web_company_name#24, 50)) = pri )) AND isnotnull(web_site_sk#23))
(44) CometProject
Input [2]: [web_site_sk#23, web_company_name#24]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q98/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q98/explain.txt
index 7c26492ea5..ef0177b579 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q98/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v1_4-spark4_0/q98/explain.txt
@@ -50,11 +50,11 @@ ReadSchema: struct
(4) CometFilter
Input [4]: [cd_demo_sk#11, cd_gender#12, cd_education_status#13, cd_dep_count#14]
-Condition : (((staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, cd_gender#12, 1, true, false, true) = M) AND (staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, cd_education_status#13, 20, true, false, true) = College )) AND isnotnull(cd_demo_sk#11))
+Condition : ((((isnotnull(cd_gender#12) AND isnotnull(cd_education_status#13)) AND (static_invoke(CharVarcharCodegenUtils.readSidePadding(cd_gender#12, 1)) = M)) AND (static_invoke(CharVarcharCodegenUtils.readSidePadding(cd_education_status#13, 20)) = College )) AND isnotnull(cd_demo_sk#11))
(5) CometProject
Input [4]: [cd_demo_sk#11, cd_gender#12, cd_education_status#13, cd_dep_count#14]
@@ -257,11 +257,11 @@ ReadSchema: struct
(55) CometFilter
Input [3]: [ca_address_sk#101, ca_state#102, ca_country#103]
-Condition : (staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, ca_state#102, 2, true, false, true) IN (ND,WI,AL,NC,OK,MS,TN) AND isnotnull(ca_address_sk#101))
+Condition : (static_invoke(CharVarcharCodegenUtils.readSidePadding(ca_state#102, 2)) IN (ND,WI,AL,NC,OK,MS,TN) AND isnotnull(ca_address_sk#101))
(56) CometProject
Input [3]: [ca_address_sk#101, ca_state#102, ca_country#103]
-Arguments: [ca_address_sk#101, ca_state#104, ca_country#103], [ca_address_sk#101, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, ca_state#102, 2, true, false, true) AS ca_state#104, ca_country#103]
+Arguments: [ca_address_sk#101, ca_state#104, ca_country#103], [ca_address_sk#101, static_invoke(CharVarcharCodegenUtils.readSidePadding(ca_state#102, 2)) AS ca_state#104, ca_country#103]
(57) CometBroadcastExchange
Input [3]: [ca_address_sk#101, ca_state#104, ca_country#103]
@@ -537,7 +537,7 @@ ReadSchema: struct
(83) CometFilter
Input [3]: [ca_address_sk#175, ca_state#176, ca_country#177]
-Condition : (staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, ca_state#176, 2, true, false, true) IN (ND,WI,AL,NC,OK,MS,TN) AND isnotnull(ca_address_sk#175))
+Condition : (static_invoke(CharVarcharCodegenUtils.readSidePadding(ca_state#176, 2)) IN (ND,WI,AL,NC,OK,MS,TN) AND isnotnull(ca_address_sk#175))
(84) CometProject
Input [3]: [ca_address_sk#175, ca_state#176, ca_country#177]
@@ -661,7 +661,7 @@ ReadSchema: struct
(111) CometFilter
Input [2]: [ca_address_sk#249, ca_state#250]
-Condition : (staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, ca_state#250, 2, true, false, true) IN (ND,WI,AL,NC,OK,MS,TN) AND isnotnull(ca_address_sk#249))
+Condition : (static_invoke(CharVarcharCodegenUtils.readSidePadding(ca_state#250, 2)) IN (ND,WI,AL,NC,OK,MS,TN) AND isnotnull(ca_address_sk#249))
(112) CometProject
Input [2]: [ca_address_sk#249, ca_state#250]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q20/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q20/explain.txt
index df9260b885..2be4420c43 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q20/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q20/explain.txt
@@ -47,11 +47,11 @@ ReadSchema: struct
(20) CometFilter
Input [6]: [i_item_sk#17, i_current_price#18, i_size#19, i_color#20, i_units#21, i_manager_id#22]
-Condition : ((staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_color#20, 20, true, false, true) = pale ) AND isnotnull(i_item_sk#17))
+Condition : ((isnotnull(i_color#20) AND (static_invoke(CharVarcharCodegenUtils.readSidePadding(i_color#20, 20)) = pale )) AND isnotnull(i_item_sk#17))
(21) CometProject
Input [6]: [i_item_sk#17, i_current_price#18, i_size#19, i_color#20, i_units#21, i_manager_id#22]
-Arguments: [i_item_sk#17, i_current_price#18, i_size#23, i_color#24, i_units#25, i_manager_id#22], [i_item_sk#17, i_current_price#18, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_size#19, 20, true, false, true) AS i_size#23, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_color#20, 20, true, false, true) AS i_color#24, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_units#21, 10, true, false, true) AS i_units#25, i_manager_id#22]
+Arguments: [i_item_sk#17, i_current_price#18, i_size#23, i_color#24, i_units#25, i_manager_id#22], [i_item_sk#17, i_current_price#18, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_size#19, 20)) AS i_size#23, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_color#20, 20)) AS i_color#24, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_units#21, 10)) AS i_units#25, i_manager_id#22]
(22) CometBroadcastExchange
Input [6]: [i_item_sk#17, i_current_price#18, i_size#23, i_color#24, i_units#25, i_manager_id#22]
@@ -175,7 +175,7 @@ Condition : ((isnotnull(c_customer_sk#26) AND isnotnull(c_current_addr_sk#27)) A
(27) CometProject
Input [5]: [c_customer_sk#26, c_current_addr_sk#27, c_first_name#28, c_last_name#29, c_birth_country#30]
-Arguments: [c_customer_sk#26, c_current_addr_sk#27, c_first_name#31, c_last_name#32, c_birth_country#30], [c_customer_sk#26, c_current_addr_sk#27, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, c_first_name#28, 20, true, false, true) AS c_first_name#31, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, c_last_name#29, 30, true, false, true) AS c_last_name#32, c_birth_country#30]
+Arguments: [c_customer_sk#26, c_current_addr_sk#27, c_first_name#31, c_last_name#32, c_birth_country#30], [c_customer_sk#26, c_current_addr_sk#27, static_invoke(CharVarcharCodegenUtils.readSidePadding(c_first_name#28, 20)) AS c_first_name#31, static_invoke(CharVarcharCodegenUtils.readSidePadding(c_last_name#29, 30)) AS c_last_name#32, c_birth_country#30]
(28) CometBroadcastExchange
Input [5]: [c_customer_sk#26, c_current_addr_sk#27, c_first_name#31, c_last_name#32, c_birth_country#30]
@@ -202,11 +202,11 @@ ReadSchema: struct
(4) CometFilter
Input [4]: [cd_demo_sk#10, cd_gender#11, cd_marital_status#12, cd_education_status#13]
-Condition : ((((staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, cd_gender#11, 1, true, false, true) = F) AND (staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, cd_marital_status#12, 1, true, false, true) = W)) AND (staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, cd_education_status#13, 20, true, false, true) = Primary )) AND isnotnull(cd_demo_sk#10))
+Condition : ((((((isnotnull(cd_gender#11) AND isnotnull(cd_marital_status#12)) AND isnotnull(cd_education_status#13)) AND (static_invoke(CharVarcharCodegenUtils.readSidePadding(cd_gender#11, 1)) = F)) AND (static_invoke(CharVarcharCodegenUtils.readSidePadding(cd_marital_status#12, 1)) = W)) AND (static_invoke(CharVarcharCodegenUtils.readSidePadding(cd_education_status#13, 20)) = Primary )) AND isnotnull(cd_demo_sk#10))
(5) CometProject
Input [4]: [cd_demo_sk#10, cd_gender#11, cd_marital_status#12, cd_education_status#13]
@@ -149,16 +149,16 @@ Arguments: [ss_item_sk#1, ss_store_sk#3, ss_quantity#4, ss_list_price#5, ss_sale
Output [2]: [s_store_sk#16, s_state#17]
Batched: true
Location [not included in comparison]/{warehouse_dir}/store]
-PushedFilters: [IsNotNull(s_store_sk)]
+PushedFilters: [IsNotNull(s_state), IsNotNull(s_store_sk)]
ReadSchema: struct
(16) CometFilter
Input [2]: [s_store_sk#16, s_state#17]
-Condition : ((staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, s_state#17, 2, true, false, true) = TN) AND isnotnull(s_store_sk#16))
+Condition : ((isnotnull(s_state#17) AND (static_invoke(CharVarcharCodegenUtils.readSidePadding(s_state#17, 2)) = TN)) AND isnotnull(s_store_sk#16))
(17) CometProject
Input [2]: [s_store_sk#16, s_state#17]
-Arguments: [s_store_sk#16, s_state#18], [s_store_sk#16, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, s_state#17, 2, true, false, true) AS s_state#18]
+Arguments: [s_store_sk#16, s_state#18], [s_store_sk#16, static_invoke(CharVarcharCodegenUtils.readSidePadding(s_state#17, 2)) AS s_state#18]
(18) CometBroadcastExchange
Input [2]: [s_store_sk#16, s_state#18]
@@ -186,7 +186,7 @@ Condition : isnotnull(i_item_sk#19)
(23) CometProject
Input [2]: [i_item_sk#19, i_item_id#20]
-Arguments: [i_item_sk#19, i_item_id#21], [i_item_sk#19, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_item_id#20, 16, true, false, true) AS i_item_id#21]
+Arguments: [i_item_sk#19, i_item_id#21], [i_item_sk#19, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_item_id#20, 16)) AS i_item_id#21]
(24) CometBroadcastExchange
Input [2]: [i_item_sk#19, i_item_id#21]
@@ -265,12 +265,12 @@ Arguments: [ss_item_sk#53, ss_store_sk#55, ss_quantity#56, ss_list_price#57, ss_
Output [2]: [s_store_sk#64, s_state#65]
Batched: true
Location [not included in comparison]/{warehouse_dir}/store]
-PushedFilters: [IsNotNull(s_store_sk)]
+PushedFilters: [IsNotNull(s_state), IsNotNull(s_store_sk)]
ReadSchema: struct
(41) CometFilter
Input [2]: [s_store_sk#64, s_state#65]
-Condition : ((staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, s_state#65, 2, true, false, true) = TN) AND isnotnull(s_store_sk#64))
+Condition : ((isnotnull(s_state#65) AND (static_invoke(CharVarcharCodegenUtils.readSidePadding(s_state#65, 2)) = TN)) AND isnotnull(s_store_sk#64))
(42) CometProject
Input [2]: [s_store_sk#64, s_state#65]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q34/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q34/explain.txt
index 37fa3cf15f..c4a28f06c6 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q34/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q34/explain.txt
@@ -111,7 +111,7 @@ ReadSchema: struct10000 ) OR (staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, hd_buy_potential#13, 15, true, false, true) = unknown ))) AND (hd_vehicle_count#15 > 0)) AND CASE WHEN (hd_vehicle_count#15 > 0) THEN (knownfloatingpointnormalized(normalizenanandzero((cast(hd_dep_count#14 as double) / knownfloatingpointnormalized(normalizenanandzero(cast(hd_vehicle_count#15 as double)))))) > 1.2) END) AND isnotnull(hd_demo_sk#12))
+Condition : ((((isnotnull(hd_vehicle_count#15) AND ((static_invoke(CharVarcharCodegenUtils.readSidePadding(hd_buy_potential#13, 15)) = >10000 ) OR (static_invoke(CharVarcharCodegenUtils.readSidePadding(hd_buy_potential#13, 15)) = unknown ))) AND (hd_vehicle_count#15 > 0)) AND CASE WHEN (hd_vehicle_count#15 > 0) THEN (knownfloatingpointnormalized(normalizenanandzero((cast(hd_dep_count#14 as double) / knownfloatingpointnormalized(normalizenanandzero(cast(hd_vehicle_count#15 as double)))))) > 1.2) END) AND isnotnull(hd_demo_sk#12))
(17) CometProject
Input [4]: [hd_demo_sk#12, hd_buy_potential#13, hd_dep_count#14, hd_vehicle_count#15]
@@ -161,7 +161,7 @@ Condition : isnotnull(c_customer_sk#18)
(27) CometProject
Input [5]: [c_customer_sk#18, c_salutation#19, c_first_name#20, c_last_name#21, c_preferred_cust_flag#22]
-Arguments: [c_customer_sk#18, c_salutation#23, c_first_name#24, c_last_name#25, c_preferred_cust_flag#26], [c_customer_sk#18, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, c_salutation#19, 10, true, false, true) AS c_salutation#23, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, c_first_name#20, 20, true, false, true) AS c_first_name#24, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, c_last_name#21, 30, true, false, true) AS c_last_name#25, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, c_preferred_cust_flag#22, 1, true, false, true) AS c_preferred_cust_flag#26]
+Arguments: [c_customer_sk#18, c_salutation#23, c_first_name#24, c_last_name#25, c_preferred_cust_flag#26], [c_customer_sk#18, static_invoke(CharVarcharCodegenUtils.readSidePadding(c_salutation#19, 10)) AS c_salutation#23, static_invoke(CharVarcharCodegenUtils.readSidePadding(c_first_name#20, 20)) AS c_first_name#24, static_invoke(CharVarcharCodegenUtils.readSidePadding(c_last_name#21, 30)) AS c_last_name#25, static_invoke(CharVarcharCodegenUtils.readSidePadding(c_preferred_cust_flag#22, 1)) AS c_preferred_cust_flag#26]
(28) CometBroadcastExchange
Input [5]: [c_customer_sk#18, c_salutation#23, c_first_name#24, c_last_name#25, c_preferred_cust_flag#26]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q35/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q35/explain.txt
index 4ace157e8d..21959384d3 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q35/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q35/explain.txt
@@ -191,7 +191,7 @@ Condition : isnotnull(ca_address_sk#20)
(31) CometProject
Input [2]: [ca_address_sk#20, ca_state#21]
-Arguments: [ca_address_sk#20, ca_state#22], [ca_address_sk#20, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, ca_state#21, 2, true, false, true) AS ca_state#22]
+Arguments: [ca_address_sk#20, ca_state#22], [ca_address_sk#20, static_invoke(CharVarcharCodegenUtils.readSidePadding(ca_state#21, 2)) AS ca_state#22]
(32) CometColumnarToRow [codegen id : 3]
Input [2]: [ca_address_sk#20, ca_state#22]
@@ -223,7 +223,7 @@ Condition : isnotnull(cd_demo_sk#23)
(38) CometProject
Input [6]: [cd_demo_sk#23, cd_gender#24, cd_marital_status#25, cd_dep_count#26, cd_dep_employed_count#27, cd_dep_college_count#28]
-Arguments: [cd_demo_sk#23, cd_gender#29, cd_marital_status#30, cd_dep_count#26, cd_dep_employed_count#27, cd_dep_college_count#28], [cd_demo_sk#23, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, cd_gender#24, 1, true, false, true) AS cd_gender#29, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, cd_marital_status#25, 1, true, false, true) AS cd_marital_status#30, cd_dep_count#26, cd_dep_employed_count#27, cd_dep_college_count#28]
+Arguments: [cd_demo_sk#23, cd_gender#29, cd_marital_status#30, cd_dep_count#26, cd_dep_employed_count#27, cd_dep_college_count#28], [cd_demo_sk#23, static_invoke(CharVarcharCodegenUtils.readSidePadding(cd_gender#24, 1)) AS cd_gender#29, static_invoke(CharVarcharCodegenUtils.readSidePadding(cd_marital_status#25, 1)) AS cd_marital_status#30, cd_dep_count#26, cd_dep_employed_count#27, cd_dep_college_count#28]
(39) CometColumnarToRow [codegen id : 4]
Input [6]: [cd_demo_sk#23, cd_gender#29, cd_marital_status#30, cd_dep_count#26, cd_dep_employed_count#27, cd_dep_college_count#28]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q35a/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q35a/explain.txt
index 34ee387d4a..46dc6c9178 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q35a/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q35a/explain.txt
@@ -165,7 +165,7 @@ Condition : isnotnull(ca_address_sk#20)
(26) CometProject
Input [2]: [ca_address_sk#20, ca_state#21]
-Arguments: [ca_address_sk#20, ca_state#22], [ca_address_sk#20, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, ca_state#21, 2, true, false, true) AS ca_state#22]
+Arguments: [ca_address_sk#20, ca_state#22], [ca_address_sk#20, static_invoke(CharVarcharCodegenUtils.readSidePadding(ca_state#21, 2)) AS ca_state#22]
(27) CometBroadcastExchange
Input [2]: [ca_address_sk#20, ca_state#22]
@@ -193,7 +193,7 @@ Condition : isnotnull(cd_demo_sk#23)
(32) CometProject
Input [6]: [cd_demo_sk#23, cd_gender#24, cd_marital_status#25, cd_dep_count#26, cd_dep_employed_count#27, cd_dep_college_count#28]
-Arguments: [cd_demo_sk#23, cd_gender#29, cd_marital_status#30, cd_dep_count#26, cd_dep_employed_count#27, cd_dep_college_count#28], [cd_demo_sk#23, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, cd_gender#24, 1, true, false, true) AS cd_gender#29, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, cd_marital_status#25, 1, true, false, true) AS cd_marital_status#30, cd_dep_count#26, cd_dep_employed_count#27, cd_dep_college_count#28]
+Arguments: [cd_demo_sk#23, cd_gender#29, cd_marital_status#30, cd_dep_count#26, cd_dep_employed_count#27, cd_dep_college_count#28], [cd_demo_sk#23, static_invoke(CharVarcharCodegenUtils.readSidePadding(cd_gender#24, 1)) AS cd_gender#29, static_invoke(CharVarcharCodegenUtils.readSidePadding(cd_marital_status#25, 1)) AS cd_marital_status#30, cd_dep_count#26, cd_dep_employed_count#27, cd_dep_college_count#28]
(33) CometBroadcastExchange
Input [6]: [cd_demo_sk#23, cd_gender#29, cd_marital_status#30, cd_dep_count#26, cd_dep_employed_count#27, cd_dep_college_count#28]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q36a/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q36a/explain.txt
index b7e6bb9231..7884a9d295 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q36a/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q36a/explain.txt
@@ -103,7 +103,7 @@ Condition : isnotnull(i_item_sk#9)
(11) CometProject
Input [3]: [i_item_sk#9, i_class#10, i_category#11]
-Arguments: [i_item_sk#9, i_class#12, i_category#13], [i_item_sk#9, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_class#10, 50, true, false, true) AS i_class#12, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_category#11, 50, true, false, true) AS i_category#13]
+Arguments: [i_item_sk#9, i_class#12, i_category#13], [i_item_sk#9, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_class#10, 50)) AS i_class#12, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_category#11, 50)) AS i_category#13]
(12) CometBroadcastExchange
Input [3]: [i_item_sk#9, i_class#12, i_category#13]
@@ -122,12 +122,12 @@ Arguments: [ss_store_sk#2, ss_ext_sales_price#3, ss_net_profit#4, i_class#12, i_
Output [2]: [s_store_sk#14, s_state#15]
Batched: true
Location [not included in comparison]/{warehouse_dir}/store]
-PushedFilters: [IsNotNull(s_store_sk)]
+PushedFilters: [IsNotNull(s_state), IsNotNull(s_store_sk)]
ReadSchema: struct
(16) CometFilter
Input [2]: [s_store_sk#14, s_state#15]
-Condition : ((staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, s_state#15, 2, true, false, true) = TN) AND isnotnull(s_store_sk#14))
+Condition : ((isnotnull(s_state#15) AND (static_invoke(CharVarcharCodegenUtils.readSidePadding(s_state#15, 2)) = TN)) AND isnotnull(s_store_sk#14))
(17) CometProject
Input [2]: [s_store_sk#14, s_state#15]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q47/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q47/explain.txt
index c4bb77830f..e76b0e6f16 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q47/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q47/explain.txt
@@ -61,11 +61,11 @@ ReadSchema: struct
(2) CometFilter
Input [3]: [i_item_sk#1, i_brand#2, i_category#3]
-Condition : ((isnotnull(i_item_sk#1) AND isnotnull(staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_category#3, 50, true, false, true))) AND isnotnull(staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_brand#2, 50, true, false, true)))
+Condition : ((isnotnull(i_item_sk#1) AND isnotnull(static_invoke(CharVarcharCodegenUtils.readSidePadding(i_category#3, 50)))) AND isnotnull(static_invoke(CharVarcharCodegenUtils.readSidePadding(i_brand#2, 50))))
(3) CometProject
Input [3]: [i_item_sk#1, i_brand#2, i_category#3]
-Arguments: [i_item_sk#1, i_brand#4, i_category#5], [i_item_sk#1, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_brand#2, 50, true, false, true) AS i_brand#4, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_category#3, 50, true, false, true) AS i_category#5]
+Arguments: [i_item_sk#1, i_brand#4, i_category#5], [i_item_sk#1, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_brand#2, 50)) AS i_brand#4, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_category#3, 50)) AS i_category#5]
(4) CometScan parquet spark_catalog.default.store_sales
Output [4]: [ss_item_sk#6, ss_store_sk#7, ss_sales_price#8, ss_sold_date_sk#9]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q57/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q57/explain.txt
index 75e0c3a6db..60da578817 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q57/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q57/explain.txt
@@ -61,11 +61,11 @@ ReadSchema: struct
(2) CometFilter
Input [3]: [i_item_sk#1, i_brand#2, i_category#3]
-Condition : ((isnotnull(i_item_sk#1) AND isnotnull(staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_category#3, 50, true, false, true))) AND isnotnull(staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_brand#2, 50, true, false, true)))
+Condition : ((isnotnull(i_item_sk#1) AND isnotnull(static_invoke(CharVarcharCodegenUtils.readSidePadding(i_category#3, 50)))) AND isnotnull(static_invoke(CharVarcharCodegenUtils.readSidePadding(i_brand#2, 50))))
(3) CometProject
Input [3]: [i_item_sk#1, i_brand#2, i_category#3]
-Arguments: [i_item_sk#1, i_brand#4, i_category#5], [i_item_sk#1, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_brand#2, 50, true, false, true) AS i_brand#4, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_category#3, 50, true, false, true) AS i_category#5]
+Arguments: [i_item_sk#1, i_brand#4, i_category#5], [i_item_sk#1, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_brand#2, 50)) AS i_brand#4, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_category#3, 50)) AS i_category#5]
(4) CometScan parquet spark_catalog.default.catalog_sales
Output [4]: [cs_call_center_sk#6, cs_item_sk#7, cs_sales_price#8, cs_sold_date_sk#9]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q5a/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q5a/explain.txt
index c0830c3b0b..50f6648b6e 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q5a/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q5a/explain.txt
@@ -173,7 +173,7 @@ Condition : isnotnull(s_store_sk#24)
(16) CometProject
Input [2]: [s_store_sk#24, s_store_id#25]
-Arguments: [s_store_sk#24, s_store_id#26], [s_store_sk#24, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, s_store_id#25, 16, true, false, true) AS s_store_id#26]
+Arguments: [s_store_sk#24, s_store_id#26], [s_store_sk#24, static_invoke(CharVarcharCodegenUtils.readSidePadding(s_store_id#25, 16)) AS s_store_id#26]
(17) CometBroadcastExchange
Input [2]: [s_store_sk#24, s_store_id#26]
@@ -273,7 +273,7 @@ Condition : isnotnull(cp_catalog_page_sk#66)
(37) CometProject
Input [2]: [cp_catalog_page_sk#66, cp_catalog_page_id#67]
-Arguments: [cp_catalog_page_sk#66, cp_catalog_page_id#68], [cp_catalog_page_sk#66, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, cp_catalog_page_id#67, 16, true, false, true) AS cp_catalog_page_id#68]
+Arguments: [cp_catalog_page_sk#66, cp_catalog_page_id#68], [cp_catalog_page_sk#66, static_invoke(CharVarcharCodegenUtils.readSidePadding(cp_catalog_page_id#67, 16)) AS cp_catalog_page_id#68]
(38) CometBroadcastExchange
Input [2]: [cp_catalog_page_sk#66, cp_catalog_page_id#68]
@@ -392,7 +392,7 @@ Condition : isnotnull(web_site_sk#113)
(62) CometProject
Input [2]: [web_site_sk#113, web_site_id#114]
-Arguments: [web_site_sk#113, web_site_id#115], [web_site_sk#113, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, web_site_id#114, 16, true, false, true) AS web_site_id#115]
+Arguments: [web_site_sk#113, web_site_id#115], [web_site_sk#113, static_invoke(CharVarcharCodegenUtils.readSidePadding(web_site_id#114, 16)) AS web_site_id#115]
(63) CometBroadcastExchange
Input [2]: [web_site_sk#113, web_site_id#115]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q6/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q6/explain.txt
index 8eaf2ae184..0b6894f23c 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q6/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q6/explain.txt
@@ -58,7 +58,7 @@ Condition : isnotnull(ca_address_sk#1)
(3) CometProject
Input [2]: [ca_address_sk#1, ca_state#2]
-Arguments: [ca_address_sk#1, ca_state#3], [ca_address_sk#1, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, ca_state#2, 2, true, false, true) AS ca_state#3]
+Arguments: [ca_address_sk#1, ca_state#3], [ca_address_sk#1, static_invoke(CharVarcharCodegenUtils.readSidePadding(ca_state#2, 2)) AS ca_state#3]
(4) CometScan parquet spark_catalog.default.customer
Output [2]: [c_customer_sk#4, c_current_addr_sk#5]
@@ -144,12 +144,12 @@ Input [2]: [ca_state#3, ss_item_sk#6]
Output [3]: [i_item_sk#14, i_current_price#15, i_category#16]
Batched: true
Location [not included in comparison]/{warehouse_dir}/item]
-PushedFilters: [IsNotNull(i_current_price), IsNotNull(i_item_sk)]
+PushedFilters: [IsNotNull(i_current_price), IsNotNull(i_category), IsNotNull(i_item_sk)]
ReadSchema: struct
(22) CometFilter
Input [3]: [i_item_sk#14, i_current_price#15, i_category#16]
-Condition : (isnotnull(i_current_price#15) AND isnotnull(i_item_sk#14))
+Condition : ((isnotnull(i_current_price#15) AND isnotnull(i_category#16)) AND isnotnull(i_item_sk#14))
(23) CometColumnarToRow [codegen id : 3]
Input [3]: [i_item_sk#14, i_current_price#15, i_category#16]
@@ -162,11 +162,11 @@ ReadSchema: struct
(25) CometFilter
Input [2]: [i_current_price#17, i_category#18]
-Condition : isnotnull(staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_category#18, 50, true, false, true))
+Condition : isnotnull(static_invoke(CharVarcharCodegenUtils.readSidePadding(i_category#18, 50)))
(26) CometProject
Input [2]: [i_current_price#17, i_category#18]
-Arguments: [i_category#19, i_current_price#17], [staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_category#18, 50, true, false, true) AS i_category#19, i_current_price#17]
+Arguments: [i_category#19, i_current_price#17], [static_invoke(CharVarcharCodegenUtils.readSidePadding(i_category#18, 50)) AS i_category#19, i_current_price#17]
(27) CometColumnarToRow [codegen id : 1]
Input [2]: [i_category#19, i_current_price#17]
@@ -201,7 +201,7 @@ Input [2]: [avg(i_current_price)#25, i_category#19]
Arguments: HashedRelationBroadcastMode(List(input[1, string, true]),false), [plan_id=2]
(34) BroadcastHashJoin [codegen id : 3]
-Left keys [1]: [staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_category#16, 50, true, false, true)]
+Left keys [1]: [static_invoke(CharVarcharCodegenUtils.readSidePadding(i_category#16, 50))]
Right keys [1]: [i_category#19]
Join type: Inner
Join condition: (cast(i_current_price#15 as decimal(14,7)) > (1.2 * avg(i_current_price)#25))
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q64/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q64/explain.txt
index c5e3046383..3711dddc1d 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q64/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q64/explain.txt
@@ -361,11 +361,11 @@ ReadSchema: struct
(38) CometFilter
Input [3]: [s_store_sk#39, s_store_name#40, s_zip#41]
-Condition : ((isnotnull(s_store_sk#39) AND isnotnull(s_store_name#40)) AND isnotnull(staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, s_zip#41, 10, true, false, true)))
+Condition : ((isnotnull(s_store_sk#39) AND isnotnull(s_store_name#40)) AND isnotnull(static_invoke(CharVarcharCodegenUtils.readSidePadding(s_zip#41, 10))))
(39) CometProject
Input [3]: [s_store_sk#39, s_store_name#40, s_zip#41]
-Arguments: [s_store_sk#39, s_store_name#40, s_zip#42], [s_store_sk#39, s_store_name#40, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, s_zip#41, 10, true, false, true) AS s_zip#42]
+Arguments: [s_store_sk#39, s_store_name#40, s_zip#42], [s_store_sk#39, s_store_name#40, static_invoke(CharVarcharCodegenUtils.readSidePadding(s_zip#41, 10)) AS s_zip#42]
(40) CometColumnarToRow [codegen id : 5]
Input [3]: [s_store_sk#39, s_store_name#40, s_zip#42]
@@ -462,11 +462,11 @@ ReadSchema: struct
(60) CometFilter
Input [2]: [cd_demo_sk#53, cd_marital_status#54]
-Condition : (isnotnull(cd_demo_sk#53) AND isnotnull(staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, cd_marital_status#54, 1, true, false, true)))
+Condition : (isnotnull(cd_demo_sk#53) AND isnotnull(static_invoke(CharVarcharCodegenUtils.readSidePadding(cd_marital_status#54, 1))))
(61) CometProject
Input [2]: [cd_demo_sk#53, cd_marital_status#54]
-Arguments: [cd_demo_sk#53, cd_marital_status#55], [cd_demo_sk#53, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, cd_marital_status#54, 1, true, false, true) AS cd_marital_status#55]
+Arguments: [cd_demo_sk#53, cd_marital_status#55], [cd_demo_sk#53, static_invoke(CharVarcharCodegenUtils.readSidePadding(cd_marital_status#54, 1)) AS cd_marital_status#55]
(62) CometColumnarToRow [codegen id : 9]
Input [2]: [cd_demo_sk#53, cd_marital_status#55]
@@ -580,7 +580,7 @@ Condition : isnotnull(ca_address_sk#63)
(86) CometProject
Input [5]: [ca_address_sk#63, ca_street_number#64, ca_street_name#65, ca_city#66, ca_zip#67]
-Arguments: [ca_address_sk#63, ca_street_number#68, ca_street_name#65, ca_city#66, ca_zip#69], [ca_address_sk#63, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, ca_street_number#64, 10, true, false, true) AS ca_street_number#68, ca_street_name#65, ca_city#66, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, ca_zip#67, 10, true, false, true) AS ca_zip#69]
+Arguments: [ca_address_sk#63, ca_street_number#68, ca_street_name#65, ca_city#66, ca_zip#69], [ca_address_sk#63, static_invoke(CharVarcharCodegenUtils.readSidePadding(ca_street_number#64, 10)) AS ca_street_number#68, ca_street_name#65, ca_city#66, static_invoke(CharVarcharCodegenUtils.readSidePadding(ca_zip#67, 10)) AS ca_zip#69]
(87) CometColumnarToRow [codegen id : 14]
Input [5]: [ca_address_sk#63, ca_street_number#68, ca_street_name#65, ca_city#66, ca_zip#69]
@@ -662,11 +662,11 @@ ReadSchema: struct= 64.00)) AND (i_current_price#78 <= 74.00)) AND (i_current_price#78 >= 65.00)) AND (i_current_price#78 <= 79.00)) AND isnotnull(i_item_sk#77))
+Condition : ((((((isnotnull(i_current_price#78) AND static_invoke(CharVarcharCodegenUtils.readSidePadding(i_color#79, 20)) IN (purple ,burlywood ,indian ,spring ,floral ,medium )) AND (i_current_price#78 >= 64.00)) AND (i_current_price#78 <= 74.00)) AND (i_current_price#78 >= 65.00)) AND (i_current_price#78 <= 79.00)) AND isnotnull(i_item_sk#77))
(105) CometProject
Input [4]: [i_item_sk#77, i_current_price#78, i_color#79, i_product_name#80]
-Arguments: [i_item_sk#77, i_product_name#81], [i_item_sk#77, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_product_name#80, 50, true, false, true) AS i_product_name#81]
+Arguments: [i_item_sk#77, i_product_name#81], [i_item_sk#77, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_product_name#80, 50)) AS i_product_name#81]
(106) CometColumnarToRow [codegen id : 18]
Input [2]: [i_item_sk#77, i_product_name#81]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q67a/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q67a/explain.txt
index 5415b43243..4ff65a44ec 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q67a/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q67a/explain.txt
@@ -145,7 +145,7 @@ Condition : isnotnull(s_store_sk#12)
(11) CometProject
Input [2]: [s_store_sk#12, s_store_id#13]
-Arguments: [s_store_sk#12, s_store_id#14], [s_store_sk#12, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, s_store_id#13, 16, true, false, true) AS s_store_id#14]
+Arguments: [s_store_sk#12, s_store_id#14], [s_store_sk#12, static_invoke(CharVarcharCodegenUtils.readSidePadding(s_store_id#13, 16)) AS s_store_id#14]
(12) CometBroadcastExchange
Input [2]: [s_store_sk#12, s_store_id#14]
@@ -173,7 +173,7 @@ Condition : isnotnull(i_item_sk#15)
(17) CometProject
Input [5]: [i_item_sk#15, i_brand#16, i_class#17, i_category#18, i_product_name#19]
-Arguments: [i_item_sk#15, i_brand#20, i_class#21, i_category#22, i_product_name#23], [i_item_sk#15, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_brand#16, 50, true, false, true) AS i_brand#20, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_class#17, 50, true, false, true) AS i_class#21, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_category#18, 50, true, false, true) AS i_category#22, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_product_name#19, 50, true, false, true) AS i_product_name#23]
+Arguments: [i_item_sk#15, i_brand#20, i_class#21, i_category#22, i_product_name#23], [i_item_sk#15, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_brand#16, 50)) AS i_brand#20, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_class#17, 50)) AS i_class#21, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_category#18, 50)) AS i_category#22, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_product_name#19, 50)) AS i_product_name#23]
(18) CometBroadcastExchange
Input [5]: [i_item_sk#15, i_brand#20, i_class#21, i_category#22, i_product_name#23]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q70a/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q70a/explain.txt
index fad7171441..389c582162 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q70a/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q70a/explain.txt
@@ -150,7 +150,7 @@ Condition : isnotnull(s_store_sk#14)
(17) CometProject
Input [2]: [s_store_sk#14, s_state#15]
-Arguments: [s_store_sk#14, s_state#16], [s_store_sk#14, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, s_state#15, 2, true, false, true) AS s_state#16]
+Arguments: [s_store_sk#14, s_state#16], [s_store_sk#14, static_invoke(CharVarcharCodegenUtils.readSidePadding(s_state#15, 2)) AS s_state#16]
(18) CometBroadcastExchange
Input [2]: [s_store_sk#14, s_state#16]
@@ -226,13 +226,13 @@ Input [1]: [s_state#16]
Arguments: HashedRelationBroadcastMode(List(input[0, string, true]),false), [plan_id=2]
(35) BroadcastHashJoin [codegen id : 4]
-Left keys [1]: [staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, s_state#9, 2, true, false, true)]
+Left keys [1]: [static_invoke(CharVarcharCodegenUtils.readSidePadding(s_state#9, 2))]
Right keys [1]: [s_state#16]
Join type: LeftSemi
Join condition: None
(36) Project [codegen id : 4]
-Output [3]: [s_store_sk#7, s_county#8, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, s_state#9, 2, true, false, true) AS s_state#23]
+Output [3]: [s_store_sk#7, s_county#8, static_invoke(CharVarcharCodegenUtils.readSidePadding(s_state#9, 2)) AS s_state#23]
Input [3]: [s_store_sk#7, s_county#8, s_state#9]
(37) BroadcastExchange
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q72/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q72/explain.txt
index 86b9e95825..41346ec9a6 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q72/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q72/explain.txt
@@ -154,12 +154,12 @@ Arguments: [cs_ship_date_sk#1, cs_bill_cdemo_sk#2, cs_bill_hdemo_sk#3, cs_item_s
Output [2]: [cd_demo_sk#18, cd_marital_status#19]
Batched: true
Location [not included in comparison]/{warehouse_dir}/customer_demographics]
-PushedFilters: [IsNotNull(cd_demo_sk)]
+PushedFilters: [IsNotNull(cd_marital_status), IsNotNull(cd_demo_sk)]
ReadSchema: struct
(19) CometFilter
Input [2]: [cd_demo_sk#18, cd_marital_status#19]
-Condition : ((staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, cd_marital_status#19, 1, true, false, true) = M) AND isnotnull(cd_demo_sk#18))
+Condition : ((isnotnull(cd_marital_status#19) AND (static_invoke(CharVarcharCodegenUtils.readSidePadding(cd_marital_status#19, 1)) = M)) AND isnotnull(cd_demo_sk#18))
(20) CometProject
Input [2]: [cd_demo_sk#18, cd_marital_status#19]
@@ -182,12 +182,12 @@ Arguments: [cs_ship_date_sk#1, cs_bill_hdemo_sk#3, cs_item_sk#4, cs_promo_sk#5,
Output [2]: [hd_demo_sk#20, hd_buy_potential#21]
Batched: true
Location [not included in comparison]/{warehouse_dir}/household_demographics]
-PushedFilters: [IsNotNull(hd_demo_sk)]
+PushedFilters: [IsNotNull(hd_buy_potential), IsNotNull(hd_demo_sk)]
ReadSchema: struct
(25) CometFilter
Input [2]: [hd_demo_sk#20, hd_buy_potential#21]
-Condition : ((staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, hd_buy_potential#21, 15, true, false, true) = 1001-5000 ) AND isnotnull(hd_demo_sk#20))
+Condition : ((isnotnull(hd_buy_potential#21) AND (static_invoke(CharVarcharCodegenUtils.readSidePadding(hd_buy_potential#21, 15)) = 1001-5000 )) AND isnotnull(hd_demo_sk#20))
(26) CometProject
Input [2]: [hd_demo_sk#20, hd_buy_potential#21]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q74/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q74/explain.txt
index f135e724f9..2dfad89a8d 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q74/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q74/explain.txt
@@ -89,11 +89,11 @@ ReadSchema: struct
(4) CometFilter
Input [6]: [i_item_sk#7, i_brand_id#8, i_class_id#9, i_category_id#10, i_category#11, i_manufact_id#12]
-Condition : ((((((staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_category#11, 50, true, false, true) = Books ) AND isnotnull(i_item_sk#7)) AND isnotnull(i_brand_id#8)) AND isnotnull(i_class_id#9)) AND isnotnull(i_category_id#10)) AND isnotnull(i_manufact_id#12))
+Condition : ((((((isnotnull(i_category#11) AND (static_invoke(CharVarcharCodegenUtils.readSidePadding(i_category#11, 50)) = Books )) AND isnotnull(i_item_sk#7)) AND isnotnull(i_brand_id#8)) AND isnotnull(i_class_id#9)) AND isnotnull(i_category_id#10)) AND isnotnull(i_manufact_id#12))
(5) CometProject
Input [6]: [i_item_sk#7, i_brand_id#8, i_class_id#9, i_category_id#10, i_category#11, i_manufact_id#12]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q80a/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q80a/explain.txt
index c4aca09d1f..b88670e86c 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q80a/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q80a/explain.txt
@@ -221,7 +221,7 @@ Condition : isnotnull(s_store_sk#16)
(20) CometProject
Input [2]: [s_store_sk#16, s_store_id#17]
-Arguments: [s_store_sk#16, s_store_id#18], [s_store_sk#16, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, s_store_id#17, 16, true, false, true) AS s_store_id#18]
+Arguments: [s_store_sk#16, s_store_id#18], [s_store_sk#16, static_invoke(CharVarcharCodegenUtils.readSidePadding(s_store_id#17, 16)) AS s_store_id#18]
(21) CometBroadcastExchange
Input [2]: [s_store_sk#16, s_store_id#18]
@@ -268,12 +268,12 @@ Arguments: [ss_promo_sk#3, ss_ext_sales_price#5, ss_net_profit#6, sr_return_amt#
Output [2]: [p_promo_sk#21, p_channel_tv#22]
Batched: true
Location [not included in comparison]/{warehouse_dir}/promotion]
-PushedFilters: [IsNotNull(p_promo_sk)]
+PushedFilters: [IsNotNull(p_channel_tv), IsNotNull(p_promo_sk)]
ReadSchema: struct
(31) CometFilter
Input [2]: [p_promo_sk#21, p_channel_tv#22]
-Condition : ((staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, p_channel_tv#22, 1, true, false, true) = N) AND isnotnull(p_promo_sk#21))
+Condition : ((isnotnull(p_channel_tv#22) AND (static_invoke(CharVarcharCodegenUtils.readSidePadding(p_channel_tv#22, 1)) = N)) AND isnotnull(p_promo_sk#21))
(32) CometProject
Input [2]: [p_promo_sk#21, p_channel_tv#22]
@@ -393,7 +393,7 @@ Condition : isnotnull(cp_catalog_page_sk#55)
(57) CometProject
Input [2]: [cp_catalog_page_sk#55, cp_catalog_page_id#56]
-Arguments: [cp_catalog_page_sk#55, cp_catalog_page_id#57], [cp_catalog_page_sk#55, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, cp_catalog_page_id#56, 16, true, false, true) AS cp_catalog_page_id#57]
+Arguments: [cp_catalog_page_sk#55, cp_catalog_page_id#57], [cp_catalog_page_sk#55, static_invoke(CharVarcharCodegenUtils.readSidePadding(cp_catalog_page_id#56, 16)) AS cp_catalog_page_id#57]
(58) CometBroadcastExchange
Input [2]: [cp_catalog_page_sk#55, cp_catalog_page_id#57]
@@ -533,7 +533,7 @@ Condition : isnotnull(web_site_sk#92)
(88) CometProject
Input [2]: [web_site_sk#92, web_site_id#93]
-Arguments: [web_site_sk#92, web_site_id#94], [web_site_sk#92, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, web_site_id#93, 16, true, false, true) AS web_site_id#94]
+Arguments: [web_site_sk#92, web_site_id#94], [web_site_sk#92, static_invoke(CharVarcharCodegenUtils.readSidePadding(web_site_id#93, 16)) AS web_site_id#94]
(89) CometBroadcastExchange
Input [2]: [web_site_sk#92, web_site_id#94]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q86a/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q86a/explain.txt
index 8e121660fe..78efbaa903 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q86a/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q86a/explain.txt
@@ -97,7 +97,7 @@ Condition : isnotnull(i_item_sk#7)
(11) CometProject
Input [3]: [i_item_sk#7, i_class#8, i_category#9]
-Arguments: [i_item_sk#7, i_class#10, i_category#11], [i_item_sk#7, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_class#8, 50, true, false, true) AS i_class#10, staticinvoke(class org.apache.spark.sql.catalyst.util.CharVarcharCodegenUtils, StringType, readSidePadding, i_category#9, 50, true, false, true) AS i_category#11]
+Arguments: [i_item_sk#7, i_class#10, i_category#11], [i_item_sk#7, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_class#8, 50)) AS i_class#10, static_invoke(CharVarcharCodegenUtils.readSidePadding(i_category#9, 50)) AS i_category#11]
(12) CometBroadcastExchange
Input [3]: [i_item_sk#7, i_class#10, i_category#11]
diff --git a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q98/explain.txt b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q98/explain.txt
index aefdd0a6c0..2b76dd6a4a 100644
--- a/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q98/explain.txt
+++ b/spark/src/test/resources/tpcds-plan-stability/approved-plans-v2_7-spark4_0/q98/explain.txt
@@ -49,11 +49,11 @@ ReadSchema: struct "true") {
Seq(true, false).foreach { dictionaryEnabled =>
withTempDir { dir =>
diff --git a/spark/src/test/scala/org/apache/comet/CometCastSuite.scala b/spark/src/test/scala/org/apache/comet/CometCastSuite.scala
index bd6b2d468f..b3fa3db8cf 100644
--- a/spark/src/test/scala/org/apache/comet/CometCastSuite.scala
+++ b/spark/src/test/scala/org/apache/comet/CometCastSuite.scala
@@ -32,6 +32,7 @@ import org.apache.spark.sql.functions.col
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types.{DataType, DataTypes, DecimalType, StructField, StructType}
+import org.apache.comet.CometSparkSessionExtensions.isSpark40Plus
import org.apache.comet.expressions.{CometCast, CometEvalMode, Compatible}
class CometCastSuite extends CometTestBase with AdaptiveSparkPlanHelper {
@@ -560,6 +561,8 @@ class CometCastSuite extends CometTestBase with AdaptiveSparkPlanHelper {
// CAST from StringType
test("cast StringType to BooleanType") {
+ // TODO fix for Spark 4.0.0
+ assume(!isSpark40Plus)
val testValues =
(Seq("TRUE", "True", "true", "FALSE", "False", "false", "1", "0", "", null) ++
gen.generateStrings(dataSize, "truefalseTRUEFALSEyesno10" + whitespaceChars, 8)).toDF("a")
@@ -600,6 +603,8 @@ class CometCastSuite extends CometTestBase with AdaptiveSparkPlanHelper {
)
test("cast StringType to ByteType") {
+ // TODO fix for Spark 4.0.0
+ assume(!isSpark40Plus)
// test with hand-picked values
castTest(castStringToIntegralInputs.toDF("a"), DataTypes.ByteType)
// fuzz test
@@ -607,6 +612,8 @@ class CometCastSuite extends CometTestBase with AdaptiveSparkPlanHelper {
}
test("cast StringType to ShortType") {
+ // TODO fix for Spark 4.0.0
+ assume(!isSpark40Plus)
// test with hand-picked values
castTest(castStringToIntegralInputs.toDF("a"), DataTypes.ShortType)
// fuzz test
@@ -614,6 +621,8 @@ class CometCastSuite extends CometTestBase with AdaptiveSparkPlanHelper {
}
test("cast StringType to IntegerType") {
+ // TODO fix for Spark 4.0.0
+ assume(!isSpark40Plus)
// test with hand-picked values
castTest(castStringToIntegralInputs.toDF("a"), DataTypes.IntegerType)
// fuzz test
@@ -621,6 +630,8 @@ class CometCastSuite extends CometTestBase with AdaptiveSparkPlanHelper {
}
test("cast StringType to LongType") {
+ // TODO fix for Spark 4.0.0
+ assume(!isSpark40Plus)
// test with hand-picked values
castTest(castStringToIntegralInputs.toDF("a"), DataTypes.LongType)
// fuzz test
@@ -682,6 +693,8 @@ class CometCastSuite extends CometTestBase with AdaptiveSparkPlanHelper {
}
test("cast StringType to DateType") {
+ // TODO fix for Spark 4.0.0
+ assume(!isSpark40Plus)
val validDates = Seq(
"262142-01-01",
"262142-01-01 ",
diff --git a/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala b/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala
index d4976f3cb3..6da833bf69 100644
--- a/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala
+++ b/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala
@@ -2093,6 +2093,8 @@ class CometExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelper {
}
test("to_json") {
+ // TODO fix for Spark 4.0.0
+ assume(!isSpark40Plus)
Seq(true, false).foreach { dictionaryEnabled =>
withParquetTable(
(0 until 100).map(i => {
@@ -2116,6 +2118,8 @@ class CometExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelper {
}
test("to_json escaping of field names and string values") {
+ // TODO fix for Spark 4.0.0
+ assume(!isSpark40Plus)
val gen = new DataGenerator(new Random(42))
val chars = "\\'\"abc\t\r\n\f\b"
Seq(true, false).foreach { dictionaryEnabled =>
@@ -2143,6 +2147,8 @@ class CometExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelper {
}
test("to_json unicode") {
+ // TODO fix for Spark 4.0.0
+ assume(!isSpark40Plus)
Seq(true, false).foreach { dictionaryEnabled =>
withParquetTable(
(0 until 100).map(i => {
diff --git a/spark/src/test/scala/org/apache/comet/CometFuzzTestSuite.scala b/spark/src/test/scala/org/apache/comet/CometFuzzTestSuite.scala
index a1b1812b31..d1f55cbe1e 100644
--- a/spark/src/test/scala/org/apache/comet/CometFuzzTestSuite.scala
+++ b/spark/src/test/scala/org/apache/comet/CometFuzzTestSuite.scala
@@ -38,6 +38,7 @@ import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.internal.SQLConf.ParquetOutputTimestampType
import org.apache.spark.sql.types._
+import org.apache.comet.CometSparkSessionExtensions.isSpark40Plus
import org.apache.comet.testing.{DataGenOptions, ParquetGenerator}
class CometFuzzTestSuite extends CometTestBase with AdaptiveSparkPlanHelper {
@@ -271,6 +272,8 @@ class CometFuzzTestSuite extends CometTestBase with AdaptiveSparkPlanHelper {
}
test("decode") {
+ // https://github.com/apache/datafusion-comet/issues/1942
+ assume(!isSpark40Plus)
val df = spark.read.parquet(filename)
df.createOrReplaceTempView("t1")
// We want to make sure that the schema generator wasn't modified to accidentally omit
diff --git a/spark/src/test/scala/org/apache/comet/exec/CometColumnarShuffleSuite.scala b/spark/src/test/scala/org/apache/comet/exec/CometColumnarShuffleSuite.scala
index fe3e3dae69..b5f074cc8d 100644
--- a/spark/src/test/scala/org/apache/comet/exec/CometColumnarShuffleSuite.scala
+++ b/spark/src/test/scala/org/apache/comet/exec/CometColumnarShuffleSuite.scala
@@ -40,6 +40,7 @@ import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types._
import org.apache.comet.CometConf
+import org.apache.comet.CometSparkSessionExtensions.isSpark40Plus
abstract class CometColumnarShuffleSuite extends CometTestBase with AdaptiveSparkPlanHelper {
protected val adaptiveExecutionEnabled: Boolean
@@ -322,54 +323,72 @@ abstract class CometColumnarShuffleSuite extends CometTestBase with AdaptiveSpar
}
test("columnar shuffle on map [bool]") {
+ // https://github.com/apache/datafusion-comet/issues/1941
+ assume(!isSpark40Plus)
// https://github.com/apache/datafusion-comet/issues/1538
assume(CometConf.COMET_NATIVE_SCAN_IMPL.get() != CometConf.SCAN_NATIVE_DATAFUSION)
columnarShuffleOnMapTest(50, Seq(true, false))
}
test("columnar shuffle on map [byte]") {
+ // https://github.com/apache/datafusion-comet/issues/1941
+ assume(!isSpark40Plus)
// https://github.com/apache/datafusion-comet/issues/1538
assume(CometConf.COMET_NATIVE_SCAN_IMPL.get() != CometConf.SCAN_NATIVE_DATAFUSION)
columnarShuffleOnMapTest(50, Seq(0.toByte, 1.toByte))
}
test("columnar shuffle on map [short]") {
+ // https://github.com/apache/datafusion-comet/issues/1941
+ assume(!isSpark40Plus)
// https://github.com/apache/datafusion-comet/issues/1538
assume(CometConf.COMET_NATIVE_SCAN_IMPL.get() != CometConf.SCAN_NATIVE_DATAFUSION)
columnarShuffleOnMapTest(50, Seq(0.toShort, 1.toShort))
}
test("columnar shuffle on map [int]") {
+ // https://github.com/apache/datafusion-comet/issues/1941
+ assume(!isSpark40Plus)
// https://github.com/apache/datafusion-comet/issues/1538
assume(CometConf.COMET_NATIVE_SCAN_IMPL.get() != CometConf.SCAN_NATIVE_DATAFUSION)
columnarShuffleOnMapTest(50, Seq(0, 1))
}
test("columnar shuffle on map [long]") {
+ // https://github.com/apache/datafusion-comet/issues/1941
+ assume(!isSpark40Plus)
// https://github.com/apache/datafusion-comet/issues/1538
assume(CometConf.COMET_NATIVE_SCAN_IMPL.get() != CometConf.SCAN_NATIVE_DATAFUSION)
columnarShuffleOnMapTest(50, Seq(0.toLong, 1.toLong))
}
test("columnar shuffle on map [float]") {
+ // https://github.com/apache/datafusion-comet/issues/1941
+ assume(!isSpark40Plus)
// https://github.com/apache/datafusion-comet/issues/1538
assume(CometConf.COMET_NATIVE_SCAN_IMPL.get() != CometConf.SCAN_NATIVE_DATAFUSION)
columnarShuffleOnMapTest(50, Seq(0.toFloat, 1.toFloat))
}
test("columnar shuffle on map [double]") {
+ // https://github.com/apache/datafusion-comet/issues/1941
+ assume(!isSpark40Plus)
// https://github.com/apache/datafusion-comet/issues/1538
assume(CometConf.COMET_NATIVE_SCAN_IMPL.get() != CometConf.SCAN_NATIVE_DATAFUSION)
columnarShuffleOnMapTest(50, Seq(0.toDouble, 1.toDouble))
}
test("columnar shuffle on map [date]") {
+ // https://github.com/apache/datafusion-comet/issues/1941
+ assume(!isSpark40Plus)
// https://github.com/apache/datafusion-comet/issues/1538
assume(CometConf.COMET_NATIVE_SCAN_IMPL.get() != CometConf.SCAN_NATIVE_DATAFUSION)
columnarShuffleOnMapTest(50, Seq(new java.sql.Date(0.toLong), new java.sql.Date(1.toLong)))
}
test("columnar shuffle on map [timestamp]") {
+ // https://github.com/apache/datafusion-comet/issues/1941
+ assume(!isSpark40Plus)
// https://github.com/apache/datafusion-comet/issues/1538
assume(CometConf.COMET_NATIVE_SCAN_IMPL.get() != CometConf.SCAN_NATIVE_DATAFUSION)
columnarShuffleOnMapTest(
@@ -378,6 +397,8 @@ abstract class CometColumnarShuffleSuite extends CometTestBase with AdaptiveSpar
}
test("columnar shuffle on map [decimal]") {
+ // https://github.com/apache/datafusion-comet/issues/1941
+ assume(!isSpark40Plus)
// https://github.com/apache/datafusion-comet/issues/1538
assume(CometConf.COMET_NATIVE_SCAN_IMPL.get() != CometConf.SCAN_NATIVE_DATAFUSION)
columnarShuffleOnMapTest(
@@ -386,12 +407,16 @@ abstract class CometColumnarShuffleSuite extends CometTestBase with AdaptiveSpar
}
test("columnar shuffle on map [string]") {
+ // https://github.com/apache/datafusion-comet/issues/1941
+ assume(!isSpark40Plus)
// https://github.com/apache/datafusion-comet/issues/1538
assume(CometConf.COMET_NATIVE_SCAN_IMPL.get() != CometConf.SCAN_NATIVE_DATAFUSION)
columnarShuffleOnMapTest(50, Seq(0.toString, 1.toString))
}
test("columnar shuffle on map [binary]") {
+ // https://github.com/apache/datafusion-comet/issues/1941
+ assume(!isSpark40Plus)
// https://github.com/apache/datafusion-comet/issues/1538
assume(CometConf.COMET_NATIVE_SCAN_IMPL.get() != CometConf.SCAN_NATIVE_DATAFUSION)
columnarShuffleOnMapTest(50, Seq(0.toString.getBytes(), 1.toString.getBytes()))
diff --git a/spark/src/test/scala/org/apache/comet/exec/CometExec3_4PlusSuite.scala b/spark/src/test/scala/org/apache/comet/exec/CometExec3_4PlusSuite.scala
index 71060de642..85ae58f447 100644
--- a/spark/src/test/scala/org/apache/comet/exec/CometExec3_4PlusSuite.scala
+++ b/spark/src/test/scala/org/apache/comet/exec/CometExec3_4PlusSuite.scala
@@ -26,7 +26,7 @@ import scala.util.Random
import org.scalactic.source.Position
import org.scalatest.Tag
-import org.apache.spark.sql.{Column, CometTestBase}
+import org.apache.spark.sql.CometTestBase
import org.apache.spark.sql.catalyst.FunctionIdentifier
import org.apache.spark.sql.catalyst.expressions.{BloomFilterMightContain, Expression, ExpressionInfo}
import org.apache.spark.sql.functions.{col, lit}
@@ -167,9 +167,8 @@ class CometExec3_4PlusSuite extends CometTestBase {
.toDF("col1", "col2")
.write
.insertInto(table)
- val df = spark
- .table(table)
- .select(new Column(BloomFilterMightContain(lit(bfBytes).expr, col("col1").expr)))
+ val expr = BloomFilterMightContain(lit(bfBytes).expr, col("col1").expr)
+ val df = spark.table(table).select(getColumnFromExpression(expr))
checkSparkAnswerAndOperator(df)
// check with scalar subquery
checkSparkAnswerAndOperator(s"""
@@ -188,5 +187,4 @@ class CometExec3_4PlusSuite extends CometTestBase {
bf.writeTo(os)
(longs, os.toByteArray)
}
-
}
diff --git a/spark/src/test/scala/org/apache/comet/exec/CometExecSuite.scala b/spark/src/test/scala/org/apache/comet/exec/CometExecSuite.scala
index 5c458c27bb..33a5e0e99b 100644
--- a/spark/src/test/scala/org/apache/comet/exec/CometExecSuite.scala
+++ b/spark/src/test/scala/org/apache/comet/exec/CometExecSuite.scala
@@ -1859,6 +1859,8 @@ class CometExecSuite extends CometTestBase {
}
test("SparkToColumnar eliminate redundant in AQE") {
+ // TODO fix for Spark 4.0.0
+ assume(!isSpark40Plus)
withSQLConf(
SQLConf.ADAPTIVE_EXECUTION_ENABLED.key -> "true",
CometConf.COMET_SHUFFLE_MODE.key -> "jvm") {
@@ -2029,6 +2031,8 @@ class CometExecSuite extends CometTestBase {
}
test("SparkToColumnar override node name for row input") {
+ // TODO fix for Spark 4.0.0
+ assume(!isSpark40Plus)
withSQLConf(
SQLConf.ADAPTIVE_EXECUTION_ENABLED.key -> "true",
CometConf.COMET_SHUFFLE_MODE.key -> "jvm") {
diff --git a/spark/src/test/scala/org/apache/spark/sql/CometSQLQueryTestHelper.scala b/spark/src/test/scala/org/apache/spark/sql/CometSQLQueryTestHelper.scala
index bf5ed4396e..c9735c2c74 100644
--- a/spark/src/test/scala/org/apache/spark/sql/CometSQLQueryTestHelper.scala
+++ b/spark/src/test/scala/org/apache/spark/sql/CometSQLQueryTestHelper.scala
@@ -56,7 +56,8 @@ trait CometSQLQueryTestHelper {
case _: DescribeCommandBase | _: DescribeColumnCommand | _: DescribeRelation |
_: DescribeColumn =>
true
- case PhysicalOperation(_, _, Sort(_, true, _)) => true
+ case PhysicalOperation(_, _, s: Sort) if s.global => true
+
case _ => plan.children.iterator.exists(isSorted)
}
diff --git a/spark/src/test/scala/org/apache/spark/sql/CometTestBase.scala b/spark/src/test/scala/org/apache/spark/sql/CometTestBase.scala
index a2663cf0b9..9d51c69196 100644
--- a/spark/src/test/scala/org/apache/spark/sql/CometTestBase.scala
+++ b/spark/src/test/scala/org/apache/spark/sql/CometTestBase.scala
@@ -58,7 +58,8 @@ abstract class CometTestBase
with SQLTestUtils
with BeforeAndAfterEach
with AdaptiveSparkPlanHelper
- with ShimCometSparkSessionExtensions {
+ with ShimCometSparkSessionExtensions
+ with ShimCometTestBase {
import testImplicits._
protected val shuffleManager: String =
@@ -150,11 +151,11 @@ abstract class CometTestBase
var expected: Array[Row] = Array.empty
var sparkPlan = null.asInstanceOf[SparkPlan]
withSQLConf(CometConf.COMET_ENABLED.key -> "false") {
- val dfSpark = Dataset.ofRows(spark, df.logicalPlan)
+ val dfSpark = datasetOfRows(spark, df.logicalPlan)
expected = dfSpark.collect()
sparkPlan = dfSpark.queryExecution.executedPlan
}
- val dfComet = Dataset.ofRows(spark, df.logicalPlan)
+ val dfComet = datasetOfRows(spark, df.logicalPlan)
checkAnswer(dfComet, expected)
(sparkPlan, dfComet.queryExecution.executedPlan)
}
@@ -230,10 +231,10 @@ abstract class CometTestBase
protected def checkSparkAnswerWithTol(df: => DataFrame, absTol: Double): DataFrame = {
var expected: Array[Row] = Array.empty
withSQLConf(CometConf.COMET_ENABLED.key -> "false") {
- val dfSpark = Dataset.ofRows(spark, df.logicalPlan)
+ val dfSpark = datasetOfRows(spark, df.logicalPlan)
expected = dfSpark.collect()
}
- val dfComet = Dataset.ofRows(spark, df.logicalPlan)
+ val dfComet = datasetOfRows(spark, df.logicalPlan)
checkAnswerWithTol(dfComet, expected, absTol: Double)
dfComet
}
@@ -242,9 +243,9 @@ abstract class CometTestBase
df: => DataFrame): (Option[Throwable], Option[Throwable]) = {
var expected: Option[Throwable] = None
withSQLConf(CometConf.COMET_ENABLED.key -> "false") {
- expected = Try(Dataset.ofRows(spark, df.logicalPlan).collect()).failed.toOption
+ expected = Try(datasetOfRows(spark, df.logicalPlan).collect()).failed.toOption
}
- val actual = Try(Dataset.ofRows(spark, df.logicalPlan).collect()).failed.toOption
+ val actual = Try(datasetOfRows(spark, df.logicalPlan).collect()).failed.toOption
(expected, actual)
}
@@ -255,10 +256,10 @@ abstract class CometTestBase
var expected: Array[Row] = Array.empty
var dfSpark: Dataset[Row] = null
withSQLConf(CometConf.COMET_ENABLED.key -> "false", EXTENDED_EXPLAIN_PROVIDERS_KEY -> "") {
- dfSpark = Dataset.ofRows(spark, df.logicalPlan)
+ dfSpark = datasetOfRows(spark, df.logicalPlan)
expected = dfSpark.collect()
}
- val dfComet = Dataset.ofRows(spark, df.logicalPlan)
+ val dfComet = datasetOfRows(spark, df.logicalPlan)
checkAnswer(dfComet, expected)
if (checkExplainString) {
val diff = StringUtils.difference(
@@ -280,8 +281,8 @@ abstract class CometTestBase
}
}
- private var _spark: SparkSession = _
- protected implicit def spark: SparkSession = _spark
+ private var _spark: SparkSessionType = _
+ override protected implicit def spark: SparkSessionType = _spark
protected implicit def sqlContext: SQLContext = _spark.sqlContext
override protected def sparkContext: SparkContext = {
@@ -300,8 +301,9 @@ abstract class CometTestBase
SparkContext.getOrCreate(conf)
}
- protected def createSparkSession: SparkSession = {
- SparkSession.cleanupAnyExistingSession()
+ protected def createSparkSession: SparkSessionType = {
+ SparkSession.clearActiveSession()
+ SparkSession.clearDefaultSession()
SparkSession
.builder()
diff --git a/spark/src/test/scala/org/apache/spark/sql/benchmark/CometTPCDSMicroBenchmark.scala b/spark/src/test/scala/org/apache/spark/sql/benchmark/CometTPCDSMicroBenchmark.scala
index 0839790ae7..7e3c91f87b 100644
--- a/spark/src/test/scala/org/apache/spark/sql/benchmark/CometTPCDSMicroBenchmark.scala
+++ b/spark/src/test/scala/org/apache/spark/sql/benchmark/CometTPCDSMicroBenchmark.scala
@@ -97,8 +97,8 @@ object CometTPCDSMicroBenchmark extends CometTPCQueryBenchmarkBase {
cometSpark.sql(queryString).queryExecution.analyzed.foreach {
case SubqueryAlias(alias, _: LogicalRelation) =>
queryRelations.add(alias.name)
- case LogicalRelation(_, _, Some(catalogTable), _) =>
- queryRelations.add(catalogTable.identifier.table)
+ case rel: LogicalRelation if rel.catalogTable.isDefined =>
+ queryRelations.add(rel.catalogTable.get.identifier.table)
case HiveTableRelation(tableMeta, _, _, _, _) =>
queryRelations.add(tableMeta.identifier.table)
case _ =>
diff --git a/spark/src/test/scala/org/apache/spark/sql/benchmark/CometTPCQueryBenchmarkBase.scala b/spark/src/test/scala/org/apache/spark/sql/benchmark/CometTPCQueryBenchmarkBase.scala
index 7e9bdbc9ef..50be485a84 100644
--- a/spark/src/test/scala/org/apache/spark/sql/benchmark/CometTPCQueryBenchmarkBase.scala
+++ b/spark/src/test/scala/org/apache/spark/sql/benchmark/CometTPCQueryBenchmarkBase.scala
@@ -54,8 +54,8 @@ trait CometTPCQueryBenchmarkBase extends SqlBasedBenchmark with CometTPCQueryBas
cometSpark.sql(queryString).queryExecution.analyzed.foreach {
case SubqueryAlias(alias, _: LogicalRelation) =>
queryRelations.add(alias.name)
- case LogicalRelation(_, _, Some(catalogTable), _) =>
- queryRelations.add(catalogTable.identifier.table)
+ case rel: LogicalRelation if rel.catalogTable.isDefined =>
+ queryRelations.add(rel.catalogTable.get.identifier.table)
case HiveTableRelation(tableMeta, _, _, _, _) =>
queryRelations.add(tableMeta.identifier.table)
case _ =>
diff --git a/spark/src/test/scala/org/apache/spark/sql/comet/ParquetDatetimeRebaseSuite.scala b/spark/src/test/scala/org/apache/spark/sql/comet/ParquetDatetimeRebaseSuite.scala
index 6d9826e0cb..a988467076 100644
--- a/spark/src/test/scala/org/apache/spark/sql/comet/ParquetDatetimeRebaseSuite.scala
+++ b/spark/src/test/scala/org/apache/spark/sql/comet/ParquetDatetimeRebaseSuite.scala
@@ -23,7 +23,7 @@ import org.scalactic.source.Position
import org.scalatest.Tag
import org.apache.spark.SparkException
-import org.apache.spark.sql.{CometTestBase, DataFrame, Dataset, Row}
+import org.apache.spark.sql.{CometTestBase, DataFrame, Row}
import org.apache.spark.sql.internal.SQLConf
import org.apache.comet.CometConf
@@ -121,7 +121,7 @@ abstract class ParquetDatetimeRebaseSuite extends CometTestBase {
val previousPropertyValue = Option.apply(System.getProperty(SPARK_TESTING))
System.setProperty(SPARK_TESTING, "true")
- val dfSpark = Dataset.ofRows(spark, df.logicalPlan)
+ val dfSpark = datasetOfRows(spark, extractLogicalPlan(df))
expected = dfSpark.collect()
previousPropertyValue match {
@@ -130,7 +130,7 @@ abstract class ParquetDatetimeRebaseSuite extends CometTestBase {
}
}
- val dfComet = Dataset.ofRows(spark, df.logicalPlan)
+ val dfComet = datasetOfRows(spark, extractLogicalPlan(df))
checkAnswer(dfComet, expected)
}
}
diff --git a/spark/src/test/scala/org/apache/spark/sql/comet/ParquetEncryptionITCase.scala b/spark/src/test/scala/org/apache/spark/sql/comet/ParquetEncryptionITCase.scala
index 672c78d66b..fd5ecef960 100644
--- a/spark/src/test/scala/org/apache/spark/sql/comet/ParquetEncryptionITCase.scala
+++ b/spark/src/test/scala/org/apache/spark/sql/comet/ParquetEncryptionITCase.scala
@@ -30,11 +30,11 @@ import org.scalatest.Tag
import org.scalatestplus.junit.JUnitRunner
import org.apache.spark.{DebugFilesystem, SparkConf}
-import org.apache.spark.sql.{CometTestBase, SparkSession, SQLContext}
+import org.apache.spark.sql.{CometTestBase, SQLContext}
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SQLTestUtils
-import org.apache.comet.{CometConf, CometSparkSessionExtensions, IntegrationTestSuite}
+import org.apache.comet.{CometConf, IntegrationTestSuite}
/**
* A integration test suite that tests parquet modular encryption usage.
@@ -138,13 +138,8 @@ class ParquetEncryptionITCase extends CometTestBase with SQLTestUtils {
conf
}
- protected override def createSparkSession: SparkSession = {
- SparkSession
- .builder()
- .config(sparkConf)
- .master("local[1]")
- .withExtensions(new CometSparkSessionExtensions)
- .getOrCreate()
+ protected override def createSparkSession: SparkSessionType = {
+ createSparkSessionWithExtensions(sparkConf)
}
override protected def test(testName: String, testTags: Tag*)(testFun: => Any)(implicit
@@ -166,8 +161,8 @@ class ParquetEncryptionITCase extends CometTestBase with SQLTestUtils {
super.beforeAll()
}
- private var _spark: SparkSession = _
- protected implicit override def spark: SparkSession = _spark
+ private var _spark: SparkSessionType = _
+ protected implicit override def spark: SparkSessionType = _spark
protected implicit override def sqlContext: SQLContext = _spark.sqlContext
/**
diff --git a/spark/src/test/spark-3.4/org/apache/sql/ShimCometTestBase.scala b/spark/src/test/spark-3.4/org/apache/sql/ShimCometTestBase.scala
new file mode 100644
index 0000000000..b8ecfacb31
--- /dev/null
+++ b/spark/src/test/spark-3.4/org/apache/sql/ShimCometTestBase.scala
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.spark.sql
+
+import org.apache.spark.SparkConf
+import org.apache.spark.sql.catalyst.expressions.Expression
+import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
+
+trait ShimCometTestBase {
+ type SparkSessionType = SparkSession
+
+ def createSparkSessionWithExtensions(conf: SparkConf): SparkSessionType = {
+ SparkSession
+ .builder()
+ .config(conf)
+ .master("local[1]")
+ .withExtensions(new org.apache.comet.CometSparkSessionExtensions)
+ .getOrCreate()
+ }
+
+ def datasetOfRows(spark: SparkSession, plan: LogicalPlan): DataFrame = {
+ Dataset.ofRows(spark, plan)
+ }
+
+ def getColumnFromExpression(expr: Expression): Column = {
+ new Column(expr)
+ }
+
+ def extractLogicalPlan(df: DataFrame): LogicalPlan = {
+ df.logicalPlan
+ }
+}
diff --git a/spark/src/test/spark-3.5/org/apache/spark/sql/ShimCometTestBase.scala b/spark/src/test/spark-3.5/org/apache/spark/sql/ShimCometTestBase.scala
new file mode 100644
index 0000000000..f2b4195565
--- /dev/null
+++ b/spark/src/test/spark-3.5/org/apache/spark/sql/ShimCometTestBase.scala
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.spark.sql
+
+import org.apache.spark.SparkConf
+import org.apache.spark.sql.catalyst.expressions.Expression
+import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
+
+trait ShimCometTestBase {
+ type SparkSessionType = SparkSession
+
+ def createSparkSessionWithExtensions(conf: SparkConf): SparkSessionType = {
+ SparkSession
+ .builder()
+ .config(conf)
+ .master("local[1]")
+ .withExtensions(new org.apache.comet.CometSparkSessionExtensions)
+ .getOrCreate()
+ }
+
+ def datasetOfRows(spark: SparkSession, plan: LogicalPlan): DataFrame = {
+ Dataset.ofRows(spark, plan)
+ }
+
+ def getColumnFromExpression(expr: Expression): Column = {
+ new Column(expr)
+ }
+
+ def extractLogicalPlan(df: DataFrame): LogicalPlan = {
+ df.logicalPlan
+ }
+
+}
diff --git a/spark/src/test/spark-4.0/org/apache/spark/sql/ShimCometTestBase.scala b/spark/src/test/spark-4.0/org/apache/spark/sql/ShimCometTestBase.scala
new file mode 100644
index 0000000000..8fb2e69705
--- /dev/null
+++ b/spark/src/test/spark-4.0/org/apache/spark/sql/ShimCometTestBase.scala
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.spark.sql
+
+import org.apache.spark.SparkConf
+import org.apache.spark.sql.catalyst.expressions.Expression
+import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
+import org.apache.spark.sql.classic.{Dataset, ExpressionColumnNode, SparkSession}
+
+trait ShimCometTestBase {
+ type SparkSessionType = SparkSession
+
+ def createSparkSessionWithExtensions(conf: SparkConf): SparkSessionType = {
+ SparkSession
+ .builder()
+ .config(conf)
+ .master("local[1]")
+ .withExtensions(new org.apache.comet.CometSparkSessionExtensions)
+ .getOrCreate()
+ }
+
+ def datasetOfRows(spark: SparkSession, plan: LogicalPlan): DataFrame = {
+ Dataset.ofRows(spark, plan)
+ }
+
+ def getColumnFromExpression(expr: Expression): Column = {
+ new Column(ExpressionColumnNode.apply(expr))
+ }
+
+ def extractLogicalPlan(df: DataFrame): LogicalPlan = {
+ df.queryExecution.analyzed
+ }
+}