Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -30,10 +30,20 @@ class JsonFunctionsValidateSuite extends FunctionsValidateSuite {
checkGlutenOperatorMatch[ProjectExecTransformer]
}

runQueryAndCompare(
"SELECT l_orderkey, get_json_object('{\"a\":\"b\"}', '$.a') " +
"from lineitem limit 1;") {
checkGlutenOperatorMatch[ProjectExecTransformer]
withTempPath {
path =>
Seq[(String)](
("""{"a":"b"}""")
)
.toDF("txt")
.write
.parquet(path.getCanonicalPath)

spark.read.parquet(path.getCanonicalPath).createOrReplaceTempView("tbl")

runQueryAndCompare("select get_json_object(txt, '$.a') from tbl") {
checkGlutenOperatorMatch[ProjectExecTransformer]
}
}

// Invalid UTF-8 encoding.
Expand All @@ -51,12 +61,22 @@ class JsonFunctionsValidateSuite extends FunctionsValidateSuite {
runQueryAndCompare(
s"select *, json_array_length(string_field1) " +
s"from datatab limit 5")(checkGlutenOperatorMatch[ProjectExecTransformer])
runQueryAndCompare(
s"select l_orderkey, json_array_length('[1,2,3,4]') " +
s"from lineitem limit 5")(checkGlutenOperatorMatch[ProjectExecTransformer])
runQueryAndCompare(
s"select l_orderkey, json_array_length(null) " +
s"from lineitem limit 5")(checkGlutenOperatorMatch[ProjectExecTransformer])
withTempPath {
path =>
Seq[(String)](
("[1,2,3,4]"),
(null.asInstanceOf[String])
)
.toDF("txt")
.write
.parquet(path.getCanonicalPath)

spark.read.parquet(path.getCanonicalPath).createOrReplaceTempView("tbl")

runQueryAndCompare("select json_array_length(txt) from tbl") {
checkGlutenOperatorMatch[ProjectExecTransformer]
}
}
}

testWithSpecifiedSparkVersion("from_json function bool", Some("3.4")) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -187,8 +187,20 @@ abstract class ScalarFunctionsValidateSuite extends FunctionsValidateSuite {
}

testWithSpecifiedSparkVersion("null input for array_size", Some("3.3")) {
runQueryAndCompare("SELECT array_size(null)") {
checkGlutenOperatorMatch[ProjectExecTransformer]
withTempPath {
path =>
Seq[(Array[Int])](
(null.asInstanceOf[Array[Int]])
)
.toDF("txt")
.write
.parquet(path.getCanonicalPath)

spark.read.parquet(path.getCanonicalPath).createOrReplaceTempView("tbl")

runQueryAndCompare("select array_size(txt) from tbl") {
checkGlutenOperatorMatch[ProjectExecTransformer]
}
}
}

Expand Down Expand Up @@ -801,8 +813,20 @@ abstract class ScalarFunctionsValidateSuite extends FunctionsValidateSuite {
}

test("Test sum/count function") {
runQueryAndCompare("""SELECT sum(2),count(2) from lineitem""".stripMargin) {
checkGlutenOperatorMatch[BatchScanExecTransformer]
withTempPath {
path =>
Seq[(Integer, Integer)](
(2, 2)
)
.toDF("val1", "val2")
.write
.parquet(path.getCanonicalPath)

spark.read.parquet(path.getCanonicalPath).createOrReplaceTempView("tbl")

runQueryAndCompare("SELECT sum(val1),count(val2) from tbl") {
checkGlutenOperatorMatch[BatchScanExecTransformer]
}
}
}

Expand All @@ -818,9 +842,20 @@ abstract class ScalarFunctionsValidateSuite extends FunctionsValidateSuite {
}

testWithSpecifiedSparkVersion("Test width_bucket function", Some("3.4")) {
runQueryAndCompare("""SELECT width_bucket(2, 0, 4, 3), l_orderkey
| from lineitem limit 100""".stripMargin) {
checkGlutenOperatorMatch[ProjectExecTransformer]
withTempPath {
path =>
Seq[(Integer, Integer, Integer, Integer)](
(2, 0, 4, 3)
)
.toDF("val1", "val2", "val3", "val4")
.write
.parquet(path.getCanonicalPath)

spark.read.parquet(path.getCanonicalPath).createOrReplaceTempView("tbl")

runQueryAndCompare("SELECT width_bucket(val1, val2, val3, val4) from tbl") {
checkGlutenOperatorMatch[BatchScanExecTransformer]
}
}
}

Expand Down
Loading