From 4c2eecd7524db806d0984b2d4c94a0c5056f8817 Mon Sep 17 00:00:00 2001 From: Eren Avsarogullari Date: Sat, 24 Feb 2024 10:09:12 -0800 Subject: [PATCH 1/5] fix: Cast string to boolean not compatible with Spark --- .../execution/datafusion/expressions/cast.rs | 35 +++++++++++-- .../apache/comet/exec/CometExecSuite.scala | 49 ++++++++++++++++++- 2 files changed, 80 insertions(+), 4 deletions(-) diff --git a/core/src/execution/datafusion/expressions/cast.rs b/core/src/execution/datafusion/expressions/cast.rs index d8450686df..da2faaf61a 100644 --- a/core/src/execution/datafusion/expressions/cast.rs +++ b/core/src/execution/datafusion/expressions/cast.rs @@ -27,7 +27,7 @@ use arrow::{ record_batch::RecordBatch, util::display::FormatOptions, }; -use arrow_array::ArrayRef; +use arrow_array::{Array, ArrayRef, BooleanArray, GenericStringArray, OffsetSizeTrait}; use arrow_schema::{DataType, Schema}; use datafusion::logical_expr::ColumnarValue; use datafusion_common::{Result as DataFusionResult, ScalarValue}; @@ -75,8 +75,37 @@ impl Cast { fn cast_array(&self, array: ArrayRef) -> DataFusionResult { let array = array_with_timezone(array, self.timezone.clone(), Some(&self.data_type)); let from_type = array.data_type(); - let cast_result = cast_with_options(&array, &self.data_type, &CAST_OPTIONS)?; - Ok(spark_cast(cast_result, from_type, &self.data_type)) + let to_type = &self.data_type; + let cast_result = match (from_type, to_type) { + (DataType::Utf8, DataType::Boolean) => Self::spark_cast_utf8_to_boolean::(&array), + (DataType::LargeUtf8, DataType::Boolean) => Self::spark_cast_utf8_to_boolean::(&array), + _ => cast_with_options(&array, &self.data_type, &CAST_OPTIONS)? + }; + let result = spark_cast(cast_result, from_type, &self.data_type); + Ok(result) + } + + fn spark_cast_utf8_to_boolean(from: &dyn Array) -> ArrayRef + where + OffsetSize: OffsetSizeTrait, + { + let array = from + .as_any() + .downcast_ref::>() + .unwrap(); + + let output_array = array + .iter() + .map(|value| match value { + Some(value) => match value.to_ascii_lowercase().trim() { + "t" | "true" | "y" | "yes" | "1" => Some(true), + "f" | "false" | "n" | "no" | "0" => Some(false), + _ => None + }, + _ => None + }).collect::(); + + Arc::new(output_array) } } diff --git a/spark/src/test/scala/org/apache/comet/exec/CometExecSuite.scala b/spark/src/test/scala/org/apache/comet/exec/CometExecSuite.scala index 0414671c2c..089cb76957 100644 --- a/spark/src/test/scala/org/apache/comet/exec/CometExecSuite.scala +++ b/spark/src/test/scala/org/apache/comet/exec/CometExecSuite.scala @@ -20,6 +20,7 @@ package org.apache.comet.exec import scala.collection.JavaConverters._ +import scala.collection.convert.ImplicitConversions.`collection AsScalaIterable` import scala.collection.mutable import scala.util.Random @@ -37,9 +38,10 @@ import org.apache.spark.sql.execution.{CollectLimitExec, ProjectExec, UnionExec} import org.apache.spark.sql.execution.exchange.BroadcastExchangeExec import org.apache.spark.sql.execution.joins.{BroadcastNestedLoopJoinExec, CartesianProductExec, SortMergeJoinExec} import org.apache.spark.sql.execution.window.WindowExec -import org.apache.spark.sql.functions.{date_add, expr} +import org.apache.spark.sql.functions.{col, date_add, expr} import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.internal.SQLConf.SESSION_LOCAL_TIMEZONE +import org.apache.spark.sql.types.DataTypes import org.apache.spark.unsafe.types.UTF8String import org.apache.comet.CometConf @@ -218,6 +220,51 @@ class CometExecSuite extends CometTestBase { } } + test("test cast utf8 to boolean as compatible with Spark") { + withSQLConf( + CometConf.COMET_ENABLED.key -> "true", + CometConf.COMET_EXEC_ALL_OPERATOR_ENABLED.key -> "true") { + withTable("test_table1", "test_table2", "test_table3", "test_table4") { + // Supported boolean values as true by both Arrow and Spark + val inputDF = Seq("t", "true", "y", "yes", "1", "T", "TrUe", "Y", "YES").toDF("c1") + inputDF.write.format("parquet").saveAsTable("test_table1") + val resultDF = this.spark + .table("test_table1") + .withColumn("converted", col("c1").cast(DataTypes.BooleanType)) + val resultArr = resultDF.collectAsList().toList + resultArr.foreach(x => assert(x.get(1) == true)) + + // Supported boolean values as false by both Arrow and Spark + val inputDF2 = Seq("f", "false", "n", "no", "0", "F", "FaLSe", "N", "No").toDF("c1") + inputDF2.write.format("parquet").saveAsTable("test_table2") + val resultDF2 = this.spark + .table("test_table2") + .withColumn("converted", col("c1").cast(DataTypes.BooleanType)) + val resultArr2 = resultDF2.collectAsList().toList + resultArr2.foreach(x => assert(x.get(1) == false)) + + // Supported boolean values by Arrow but not Spark + val inputDF3 = + Seq("TR", "FA", "tr", "tru", "ye", "on", "fa", "fal", "fals", "of", "off").toDF("c1") + inputDF3.write.format("parquet").saveAsTable("test_table3") + val resultDF3 = this.spark + .table("test_table3") + .withColumn("converted", col("c1").cast(DataTypes.BooleanType)) + val resultArr3 = resultDF3.collectAsList().toList + resultArr3.foreach(x => assert(x.get(1) == null)) + + // Invalid boolean casting values for Arrow and Spark + val inputDF4 = Seq("car", "Truck").toDF("c1") + inputDF4.write.format("parquet").saveAsTable("test_table4") + val resultDF4 = this.spark + .table("test_table4") + .withColumn("converted", col("c1").cast(DataTypes.BooleanType)) + val resultArr4 = resultDF4.collectAsList().toList + resultArr4.foreach(x => assert(x.get(1) == null)) + } + } + } + test( "fix: ReusedExchangeExec + CometShuffleExchangeExec under QueryStageExec " + "should be CometRoot") { From 2cdf0c06a0f89bb55410b47977d952824959abef Mon Sep 17 00:00:00 2001 From: Eren Avsarogullari Date: Sat, 24 Feb 2024 11:26:18 -0800 Subject: [PATCH 2/5] Addressed review comments --- .../apache/comet/CometExpressionSuite.scala | 52 ++++++++++++++++++- .../apache/comet/exec/CometExecSuite.scala | 49 +---------------- 2 files changed, 51 insertions(+), 50 deletions(-) diff --git a/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala b/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala index 66ee2752ef..0effa9667b 100644 --- a/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala +++ b/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala @@ -21,13 +21,15 @@ package org.apache.comet import java.util +import scala.collection.convert.ImplicitConversions.`collection AsScalaIterable` + import org.apache.hadoop.fs.Path import org.apache.spark.sql.{CometTestBase, DataFrame, Row} import org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanHelper -import org.apache.spark.sql.functions.expr +import org.apache.spark.sql.functions.{col, expr} import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.internal.SQLConf.SESSION_LOCAL_TIMEZONE -import org.apache.spark.sql.types.{Decimal, DecimalType, StructType} +import org.apache.spark.sql.types.{DataTypes, Decimal, DecimalType, StructType} import org.apache.comet.CometSparkSessionExtensions.{isSpark32, isSpark34Plus} @@ -1302,4 +1304,50 @@ class CometExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelper { } } } + + test("test cast utf8 to boolean as compatible with Spark") { + withSQLConf( + CometConf.COMET_ENABLED.key -> "true", + CometConf.COMET_EXEC_ALL_OPERATOR_ENABLED.key -> "true") { + withTable("test_table1", "test_table2", "test_table3", "test_table4") { + // Supported boolean values as true by both Arrow and Spark + val inputDF = Seq("t", "true", "y", "yes", "1", "T", "TrUe", "Y", "YES").toDF("c1") + inputDF.write.format("parquet").saveAsTable("test_table1") + val resultDF = this.spark + .table("test_table1") + .withColumn("converted", col("c1").cast(DataTypes.BooleanType)) + val resultArr = resultDF.collectAsList().toList + resultArr.foreach(x => assert(x.get(1) == true)) + + // Supported boolean values as false by both Arrow and Spark + val inputDF2 = Seq("f", "false", "n", "no", "0", "F", "FaLSe", "N", "No").toDF("c1") + inputDF2.write.format("parquet").saveAsTable("test_table2") + val resultDF2 = this.spark + .table("test_table2") + .withColumn("converted", col("c1").cast(DataTypes.BooleanType)) + val resultArr2 = resultDF2.collectAsList().toList + resultArr2.foreach(x => assert(x.get(1) == false)) + + // Supported boolean values by Arrow but not Spark + val inputDF3 = + Seq("TR", "FA", "tr", "tru", "ye", "on", "fa", "fal", "fals", "of", "off").toDF("c1") + inputDF3.write.format("parquet").saveAsTable("test_table3") + val resultDF3 = this.spark + .table("test_table3") + .withColumn("converted", col("c1").cast(DataTypes.BooleanType)) + val resultArr3 = resultDF3.collectAsList().toList + resultArr3.foreach(x => assert(x.get(1) == null)) + + // Invalid boolean casting values for Arrow and Spark + val inputDF4 = Seq("car", "Truck").toDF("c1") + inputDF4.write.format("parquet").saveAsTable("test_table4") + val resultDF4 = this.spark + .table("test_table4") + .withColumn("converted", col("c1").cast(DataTypes.BooleanType)) + val resultArr4 = resultDF4.collectAsList().toList + resultArr4.foreach(x => assert(x.get(1) == null)) + } + } + } + } diff --git a/spark/src/test/scala/org/apache/comet/exec/CometExecSuite.scala b/spark/src/test/scala/org/apache/comet/exec/CometExecSuite.scala index 089cb76957..0414671c2c 100644 --- a/spark/src/test/scala/org/apache/comet/exec/CometExecSuite.scala +++ b/spark/src/test/scala/org/apache/comet/exec/CometExecSuite.scala @@ -20,7 +20,6 @@ package org.apache.comet.exec import scala.collection.JavaConverters._ -import scala.collection.convert.ImplicitConversions.`collection AsScalaIterable` import scala.collection.mutable import scala.util.Random @@ -38,10 +37,9 @@ import org.apache.spark.sql.execution.{CollectLimitExec, ProjectExec, UnionExec} import org.apache.spark.sql.execution.exchange.BroadcastExchangeExec import org.apache.spark.sql.execution.joins.{BroadcastNestedLoopJoinExec, CartesianProductExec, SortMergeJoinExec} import org.apache.spark.sql.execution.window.WindowExec -import org.apache.spark.sql.functions.{col, date_add, expr} +import org.apache.spark.sql.functions.{date_add, expr} import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.internal.SQLConf.SESSION_LOCAL_TIMEZONE -import org.apache.spark.sql.types.DataTypes import org.apache.spark.unsafe.types.UTF8String import org.apache.comet.CometConf @@ -220,51 +218,6 @@ class CometExecSuite extends CometTestBase { } } - test("test cast utf8 to boolean as compatible with Spark") { - withSQLConf( - CometConf.COMET_ENABLED.key -> "true", - CometConf.COMET_EXEC_ALL_OPERATOR_ENABLED.key -> "true") { - withTable("test_table1", "test_table2", "test_table3", "test_table4") { - // Supported boolean values as true by both Arrow and Spark - val inputDF = Seq("t", "true", "y", "yes", "1", "T", "TrUe", "Y", "YES").toDF("c1") - inputDF.write.format("parquet").saveAsTable("test_table1") - val resultDF = this.spark - .table("test_table1") - .withColumn("converted", col("c1").cast(DataTypes.BooleanType)) - val resultArr = resultDF.collectAsList().toList - resultArr.foreach(x => assert(x.get(1) == true)) - - // Supported boolean values as false by both Arrow and Spark - val inputDF2 = Seq("f", "false", "n", "no", "0", "F", "FaLSe", "N", "No").toDF("c1") - inputDF2.write.format("parquet").saveAsTable("test_table2") - val resultDF2 = this.spark - .table("test_table2") - .withColumn("converted", col("c1").cast(DataTypes.BooleanType)) - val resultArr2 = resultDF2.collectAsList().toList - resultArr2.foreach(x => assert(x.get(1) == false)) - - // Supported boolean values by Arrow but not Spark - val inputDF3 = - Seq("TR", "FA", "tr", "tru", "ye", "on", "fa", "fal", "fals", "of", "off").toDF("c1") - inputDF3.write.format("parquet").saveAsTable("test_table3") - val resultDF3 = this.spark - .table("test_table3") - .withColumn("converted", col("c1").cast(DataTypes.BooleanType)) - val resultArr3 = resultDF3.collectAsList().toList - resultArr3.foreach(x => assert(x.get(1) == null)) - - // Invalid boolean casting values for Arrow and Spark - val inputDF4 = Seq("car", "Truck").toDF("c1") - inputDF4.write.format("parquet").saveAsTable("test_table4") - val resultDF4 = this.spark - .table("test_table4") - .withColumn("converted", col("c1").cast(DataTypes.BooleanType)) - val resultArr4 = resultDF4.collectAsList().toList - resultArr4.foreach(x => assert(x.get(1) == null)) - } - } - } - test( "fix: ReusedExchangeExec + CometShuffleExchangeExec under QueryStageExec " + "should be CometRoot") { From 9e1c3111862369807db3c4bce4a3e1d376e9d652 Mon Sep 17 00:00:00 2001 From: Eren Avsarogullari Date: Sat, 24 Feb 2024 20:46:01 -0800 Subject: [PATCH 3/5] New UT is integrated with the legacy API --- .../execution/datafusion/expressions/cast.rs | 17 +++-- .../apache/comet/CometExpressionSuite.scala | 64 ++++++------------- 2 files changed, 30 insertions(+), 51 deletions(-) diff --git a/core/src/execution/datafusion/expressions/cast.rs b/core/src/execution/datafusion/expressions/cast.rs index da2faaf61a..a3ff1e4028 100644 --- a/core/src/execution/datafusion/expressions/cast.rs +++ b/core/src/execution/datafusion/expressions/cast.rs @@ -78,16 +78,18 @@ impl Cast { let to_type = &self.data_type; let cast_result = match (from_type, to_type) { (DataType::Utf8, DataType::Boolean) => Self::spark_cast_utf8_to_boolean::(&array), - (DataType::LargeUtf8, DataType::Boolean) => Self::spark_cast_utf8_to_boolean::(&array), - _ => cast_with_options(&array, &self.data_type, &CAST_OPTIONS)? + (DataType::LargeUtf8, DataType::Boolean) => { + Self::spark_cast_utf8_to_boolean::(&array) + } + _ => cast_with_options(&array, &self.data_type, &CAST_OPTIONS)?, }; let result = spark_cast(cast_result, from_type, &self.data_type); Ok(result) } fn spark_cast_utf8_to_boolean(from: &dyn Array) -> ArrayRef - where - OffsetSize: OffsetSizeTrait, + where + OffsetSize: OffsetSizeTrait, { let array = from .as_any() @@ -100,10 +102,11 @@ impl Cast { Some(value) => match value.to_ascii_lowercase().trim() { "t" | "true" | "y" | "yes" | "1" => Some(true), "f" | "false" | "n" | "no" | "0" => Some(false), - _ => None + _ => None, }, - _ => None - }).collect::(); + _ => None, + }) + .collect::(); Arc::new(output_array) } diff --git a/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala b/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala index 0effa9667b..6413531b4e 100644 --- a/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala +++ b/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala @@ -21,15 +21,13 @@ package org.apache.comet import java.util -import scala.collection.convert.ImplicitConversions.`collection AsScalaIterable` - import org.apache.hadoop.fs.Path import org.apache.spark.sql.{CometTestBase, DataFrame, Row} import org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanHelper -import org.apache.spark.sql.functions.{col, expr} +import org.apache.spark.sql.functions.expr import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.internal.SQLConf.SESSION_LOCAL_TIMEZONE -import org.apache.spark.sql.types.{DataTypes, Decimal, DecimalType, StructType} +import org.apache.spark.sql.types.{Decimal, DecimalType, StructType} import org.apache.comet.CometSparkSessionExtensions.{isSpark32, isSpark34Plus} @@ -1306,48 +1304,26 @@ class CometExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelper { } test("test cast utf8 to boolean as compatible with Spark") { - withSQLConf( - CometConf.COMET_ENABLED.key -> "true", - CometConf.COMET_EXEC_ALL_OPERATOR_ENABLED.key -> "true") { - withTable("test_table1", "test_table2", "test_table3", "test_table4") { - // Supported boolean values as true by both Arrow and Spark - val inputDF = Seq("t", "true", "y", "yes", "1", "T", "TrUe", "Y", "YES").toDF("c1") - inputDF.write.format("parquet").saveAsTable("test_table1") - val resultDF = this.spark - .table("test_table1") - .withColumn("converted", col("c1").cast(DataTypes.BooleanType)) - val resultArr = resultDF.collectAsList().toList - resultArr.foreach(x => assert(x.get(1) == true)) - - // Supported boolean values as false by both Arrow and Spark - val inputDF2 = Seq("f", "false", "n", "no", "0", "F", "FaLSe", "N", "No").toDF("c1") - inputDF2.write.format("parquet").saveAsTable("test_table2") - val resultDF2 = this.spark - .table("test_table2") - .withColumn("converted", col("c1").cast(DataTypes.BooleanType)) - val resultArr2 = resultDF2.collectAsList().toList - resultArr2.foreach(x => assert(x.get(1) == false)) - - // Supported boolean values by Arrow but not Spark - val inputDF3 = - Seq("TR", "FA", "tr", "tru", "ye", "on", "fa", "fal", "fals", "of", "off").toDF("c1") - inputDF3.write.format("parquet").saveAsTable("test_table3") - val resultDF3 = this.spark - .table("test_table3") - .withColumn("converted", col("c1").cast(DataTypes.BooleanType)) - val resultArr3 = resultDF3.collectAsList().toList - resultArr3.foreach(x => assert(x.get(1) == null)) - - // Invalid boolean casting values for Arrow and Spark - val inputDF4 = Seq("car", "Truck").toDF("c1") - inputDF4.write.format("parquet").saveAsTable("test_table4") - val resultDF4 = this.spark - .table("test_table4") - .withColumn("converted", col("c1").cast(DataTypes.BooleanType)) - val resultArr4 = resultDF4.collectAsList().toList - resultArr4.foreach(x => assert(x.get(1) == null)) + def testConvertedColumn(inputValues: Seq[String]): Unit = { + val table = "test_table" + withTable(table) { + val values = inputValues.map(x => s"('$x')").mkString(",") + sql(s"create table $table(base_column char(20)) using parquet") + sql(s"insert into $table values $values") + checkSparkAnswerAndOperator( + s"select base_column, cast(base_column as boolean) as converted_column from $table") } } + + // Supported boolean values as true by both Arrow and Spark + testConvertedColumn(inputValues = Seq("t", "true", "y", "yes", "1", "T", "TrUe", "Y", "YES")) + // Supported boolean values as false by both Arrow and Spark + testConvertedColumn(inputValues = Seq("f", "false", "n", "no", "0", "F", "FaLSe", "N", "No")) + // Supported boolean values by Arrow but not Spark + testConvertedColumn(inputValues = + Seq("TR", "FA", "tr", "tru", "ye", "on", "fa", "fal", "fals", "of", "off")) + // Invalid boolean casting values for Arrow and Spark + testConvertedColumn(inputValues = Seq("car", "Truck")) } } From dbdbf5aa3cea163c48dfab26f4d72f666195b113 Mon Sep 17 00:00:00 2001 From: Eren Avsarogullari Date: Sat, 24 Feb 2024 23:06:16 -0800 Subject: [PATCH 4/5] Adressed review comments --- core/src/execution/datafusion/expressions/cast.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/core/src/execution/datafusion/expressions/cast.rs b/core/src/execution/datafusion/expressions/cast.rs index a3ff1e4028..447c277832 100644 --- a/core/src/execution/datafusion/expressions/cast.rs +++ b/core/src/execution/datafusion/expressions/cast.rs @@ -73,17 +73,17 @@ impl Cast { } fn cast_array(&self, array: ArrayRef) -> DataFusionResult { - let array = array_with_timezone(array, self.timezone.clone(), Some(&self.data_type)); - let from_type = array.data_type(); let to_type = &self.data_type; + let array = array_with_timezone(array, self.timezone.clone(), Some(to_type)); + let from_type = array.data_type(); let cast_result = match (from_type, to_type) { (DataType::Utf8, DataType::Boolean) => Self::spark_cast_utf8_to_boolean::(&array), (DataType::LargeUtf8, DataType::Boolean) => { Self::spark_cast_utf8_to_boolean::(&array) } - _ => cast_with_options(&array, &self.data_type, &CAST_OPTIONS)?, + _ => cast_with_options(&array, to_type, &CAST_OPTIONS)?, }; - let result = spark_cast(cast_result, from_type, &self.data_type); + let result = spark_cast(cast_result, from_type, to_type); Ok(result) } From 2fd644841987a59c7e904b3c671f20b0792d609b Mon Sep 17 00:00:00 2001 From: Eren Avsarogullari Date: Sun, 25 Feb 2024 10:56:38 -0800 Subject: [PATCH 5/5] Minor refactoring --- .../org/apache/comet/CometExpressionSuite.scala | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala b/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala index 6413531b4e..3f29e950ec 100644 --- a/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala +++ b/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala @@ -1304,26 +1304,26 @@ class CometExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelper { } test("test cast utf8 to boolean as compatible with Spark") { - def testConvertedColumn(inputValues: Seq[String]): Unit = { + def testCastedColumn(inputValues: Seq[String]): Unit = { val table = "test_table" withTable(table) { val values = inputValues.map(x => s"('$x')").mkString(",") sql(s"create table $table(base_column char(20)) using parquet") sql(s"insert into $table values $values") checkSparkAnswerAndOperator( - s"select base_column, cast(base_column as boolean) as converted_column from $table") + s"select base_column, cast(base_column as boolean) as casted_column from $table") } } // Supported boolean values as true by both Arrow and Spark - testConvertedColumn(inputValues = Seq("t", "true", "y", "yes", "1", "T", "TrUe", "Y", "YES")) + testCastedColumn(inputValues = Seq("t", "true", "y", "yes", "1", "T", "TrUe", "Y", "YES")) // Supported boolean values as false by both Arrow and Spark - testConvertedColumn(inputValues = Seq("f", "false", "n", "no", "0", "F", "FaLSe", "N", "No")) + testCastedColumn(inputValues = Seq("f", "false", "n", "no", "0", "F", "FaLSe", "N", "No")) // Supported boolean values by Arrow but not Spark - testConvertedColumn(inputValues = + testCastedColumn(inputValues = Seq("TR", "FA", "tr", "tru", "ye", "on", "fa", "fal", "fals", "of", "off")) // Invalid boolean casting values for Arrow and Spark - testConvertedColumn(inputValues = Seq("car", "Truck")) + testCastedColumn(inputValues = Seq("car", "Truck")) } }