From a8a8cb487484508298f0b4a818fcae6f4de50fd0 Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Wed, 20 Apr 2022 15:23:12 +0300 Subject: [PATCH 1/2] [SPARK-38949][SQL] Wrap SQL statements by double quotes in error messages In the PR, I propose to wrap any SQL statement in error messages by double quotes "", and apply new implementation of `QueryErrorsBase.toSQLStmt()` to all exceptions in `Query.*Errors` w/ error classes. Also this PR modifies all affected tests, see the list in the section "How was this patch tested?". To improve user experience with Spark SQL by highlighting SQL statements in error massage and make them more visible to users. Yes. The changes might influence on error messages that are visible to users. Before: ```sql The operation DESC PARTITION is not allowed ``` After: ```sql The operation "DESC PARTITION" is not allowed ``` By running affected test suites: ``` $ build/sbt "sql/testOnly *QueryExecutionErrorsSuite" $ build/sbt "sql/testOnly *QueryParsingErrorsSuite" $ build/sbt "sql/testOnly *QueryCompilationErrorsSuite" $ build/sbt "test:testOnly *QueryCompilationErrorsDSv2Suite" $ build/sbt "test:testOnly *ExtractPythonUDFFromJoinConditionSuite" $ build/sbt "testOnly *PlanParserSuite" $ build/sbt "sql/testOnly *SQLQueryTestSuite -- -z transform.sql" $ build/sbt "sql/testOnly *SQLQueryTestSuite -- -z join-lateral.sql" $ build/sbt "sql/testOnly *SQLQueryTestSuite -- -z describe.sql" ``` Closes #36259 from MaxGekk/error-class-apply-toSQLStmt. Authored-by: Max Gekk Signed-off-by: Max Gekk (cherry picked from commit 5aba2b38beae6e1baf6f0c6f9eb3b65cf607fe77) Signed-off-by: Max Gekk --- python/pyspark/sql/tests/test_udf.py | 13 +++--- .../sql/catalyst/parser/AstBuilder.scala | 2 +- .../sql/errors/QueryCompilationErrors.scala | 7 ++- .../spark/sql/errors/QueryErrorsBase.scala | 7 +++ .../sql/errors/QueryExecutionErrors.scala | 4 +- .../spark/sql/errors/QueryParsingErrors.scala | 44 ++++++++++++++----- ...tractPythonUDFFromJoinConditionSuite.scala | 4 +- .../sql/catalyst/parser/PlanParserSuite.scala | 2 +- .../sql-tests/results/join-lateral.sql.out | 4 +- .../sql-tests/results/transform.sql.out | 4 +- .../QueryCompilationErrorsDSv2Suite.scala | 2 +- .../errors/QueryCompilationErrorsSuite.scala | 2 +- .../errors/QueryExecutionErrorsSuite.scala | 4 +- .../sql/errors/QueryParsingErrorsSuite.scala | 31 ++++++------- .../SparkScriptTransformationSuite.scala | 2 +- .../execution/command/DDLParserSuite.scala | 4 +- 16 files changed, 82 insertions(+), 54 deletions(-) diff --git a/python/pyspark/sql/tests/test_udf.py b/python/pyspark/sql/tests/test_udf.py index 805d5a8dfec9a..e40c3ba0d6479 100644 --- a/python/pyspark/sql/tests/test_udf.py +++ b/python/pyspark/sql/tests/test_udf.py @@ -258,15 +258,16 @@ def test_udf_not_supported_in_join_condition(self): def runWithJoinType(join_type, type_string): with self.assertRaisesRegex( AnalysisException, - "Using PythonUDF in join condition of join type %s is not supported" % type_string, + """Using PythonUDF in join condition of join type "%s" is not supported""" + % type_string, ): left.join(right, [f("a", "b"), left.a1 == right.b1], join_type).collect() - runWithJoinType("full", "FullOuter") - runWithJoinType("left", "LeftOuter") - runWithJoinType("right", "RightOuter") - runWithJoinType("leftanti", "LeftAnti") - runWithJoinType("leftsemi", "LeftSemi") + runWithJoinType("full", "FULL OUTER") + runWithJoinType("left", "LEFT OUTER") + runWithJoinType("right", "RIGHT OUTER") + runWithJoinType("leftanti", "LEFT ANTI") + runWithJoinType("leftsemi", "LEFT SEMI") def test_udf_as_join_condition(self): left = self.spark.createDataFrame([Row(a=1, a1=1, a2=1), Row(a=2, a1=2, a2=2)]) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala index d334b5780f78a..817f136b5f89e 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala @@ -1161,7 +1161,7 @@ class AstBuilder extends SqlBaseParserBaseVisitor[AnyRef] with SQLConfHelper wit } if (join.LATERAL != null) { if (!Seq(Inner, Cross, LeftOuter).contains(joinType)) { - throw QueryParsingErrors.unsupportedLateralJoinTypeError(ctx, joinType.toString) + throw QueryParsingErrors.unsupportedLateralJoinTypeError(ctx, joinType.sql) } LateralJoin(left, LateralSubquery(plan(join.right)), joinType, condition) } else { diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala index 65b59655be07c..3a8cd68966656 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala @@ -94,7 +94,9 @@ object QueryCompilationErrors extends QueryErrorsBase { def unsupportedIfNotExistsError(tableName: String): Throwable = { new AnalysisException( errorClass = "UNSUPPORTED_FEATURE", - messageParameters = Array(s"IF NOT EXISTS for the table '$tableName' by INSERT INTO.")) + messageParameters = Array( + s"${toSQLStmt("IF NOT EXISTS")} for the table '$tableName' " + + s"by ${toSQLStmt("INSERT INTO")}.")) } def nonPartitionColError(partitionName: String): Throwable = { @@ -1576,7 +1578,8 @@ object QueryCompilationErrors extends QueryErrorsBase { new AnalysisException( errorClass = "UNSUPPORTED_FEATURE", messageParameters = Array( - s"Using PythonUDF in join condition of join type $joinType is not supported")) + "Using PythonUDF in join condition of join type " + + s"${toSQLStmt(joinType.sql)} is not supported.")) } def conflictingAttributesInJoinConditionError( diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryErrorsBase.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryErrorsBase.scala index 7002f19f9fc84..b115891f370ad 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryErrorsBase.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryErrorsBase.scala @@ -17,6 +17,8 @@ package org.apache.spark.sql.errors +import java.util.Locale + import org.apache.spark.sql.catalyst.expressions.Literal import org.apache.spark.sql.types.{DataType, DoubleType, FloatType} @@ -45,6 +47,11 @@ trait QueryErrorsBase { litToErrorValue(Literal.create(v, t)) } + // Quote sql statements in error messages. + def toSQLStmt(text: String): String = { + "\"" + text.toUpperCase(Locale.ROOT) + "\"" + } + def toSQLType(t: DataType): String = { t.sql } diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala index 3cc8c3a6667f6..b507ea7489ee7 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala @@ -1935,13 +1935,13 @@ object QueryExecutionErrors extends QueryErrorsBase { def repeatedPivotsUnsupportedError(): Throwable = { new SparkUnsupportedOperationException( errorClass = "UNSUPPORTED_FEATURE", - messageParameters = Array("Repeated pivots.")) + messageParameters = Array(s"Repeated ${toSQLStmt("pivot")}s.")) } def pivotNotAfterGroupByUnsupportedError(): Throwable = { new SparkUnsupportedOperationException( errorClass = "UNSUPPORTED_FEATURE", - messageParameters = Array("Pivot not after a groupBy.")) + messageParameters = Array(s"${toSQLStmt("pivot")} not after a ${toSQLStmt("group by")}.")) } def invalidAesKeyLengthError(actualLength: Int): RuntimeException = { diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala index ad0973ccbb44d..39c1944bbba9a 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala @@ -91,13 +91,19 @@ object QueryParsingErrors extends QueryErrorsBase { } def transformNotSupportQuantifierError(ctx: ParserRuleContext): Throwable = { - new ParseException("UNSUPPORTED_FEATURE", - Array("TRANSFORM does not support DISTINCT/ALL in inputs"), ctx) + new ParseException( + errorClass = "UNSUPPORTED_FEATURE", + messageParameters = Array(s"${toSQLStmt("TRANSFORM")} does not support" + + s" ${toSQLStmt("DISTINCT")}/${toSQLStmt("ALL")} in inputs"), + ctx) } def transformWithSerdeUnsupportedError(ctx: ParserRuleContext): Throwable = { - new ParseException("UNSUPPORTED_FEATURE", - Array("TRANSFORM with serde is only supported in hive mode"), ctx) + new ParseException( + errorClass = "UNSUPPORTED_FEATURE", + messageParameters = Array( + s"${toSQLStmt("TRANSFORM")} with serde is only supported in hive mode"), + ctx) } def lateralWithPivotInFromClauseNotAllowedError(ctx: FromClauseContext): Throwable = { @@ -105,19 +111,31 @@ object QueryParsingErrors extends QueryErrorsBase { } def lateralJoinWithNaturalJoinUnsupportedError(ctx: ParserRuleContext): Throwable = { - new ParseException("UNSUPPORTED_FEATURE", Array("LATERAL join with NATURAL join."), ctx) + new ParseException( + errorClass = "UNSUPPORTED_FEATURE", + messageParameters = Array(s"${toSQLStmt("LATERAL")} join with ${toSQLStmt("NATURAL")} join."), + ctx) } def lateralJoinWithUsingJoinUnsupportedError(ctx: ParserRuleContext): Throwable = { - new ParseException("UNSUPPORTED_FEATURE", Array("LATERAL join with USING join."), ctx) + new ParseException( + errorClass = "UNSUPPORTED_FEATURE", + messageParameters = Array(s"${toSQLStmt("LATERAL")} join with ${toSQLStmt("USING")} join."), + ctx) } def unsupportedLateralJoinTypeError(ctx: ParserRuleContext, joinType: String): Throwable = { - new ParseException("UNSUPPORTED_FEATURE", Array(s"LATERAL join type '$joinType'."), ctx) + new ParseException( + errorClass = "UNSUPPORTED_FEATURE", + messageParameters = Array(s"${toSQLStmt("LATERAL")} join type ${toSQLStmt(joinType)}."), + ctx) } def invalidLateralJoinRelationError(ctx: RelationPrimaryContext): Throwable = { - new ParseException("INVALID_SQL_SYNTAX", Array("LATERAL can only be used with subquery."), ctx) + new ParseException( + errorClass = "INVALID_SQL_SYNTAX", + messageParameters = Array(s"${toSQLStmt("LATERAL")} can only be used with subquery."), + ctx) } def repetitiveWindowDefinitionError(name: String, ctx: WindowClauseContext): Throwable = { @@ -136,7 +154,7 @@ object QueryParsingErrors extends QueryErrorsBase { } def naturalCrossJoinUnsupportedError(ctx: RelationContext): Throwable = { - new ParseException("UNSUPPORTED_FEATURE", Array("NATURAL CROSS JOIN."), ctx) + new ParseException("UNSUPPORTED_FEATURE", Array(toSQLStmt("NATURAL CROSS JOIN") + "."), ctx) } def emptyInputForTableSampleError(ctx: ParserRuleContext): Throwable = { @@ -298,14 +316,18 @@ object QueryParsingErrors extends QueryErrorsBase { } def showFunctionsUnsupportedError(identifier: String, ctx: IdentifierContext): Throwable = { - new ParseException(s"SHOW $identifier FUNCTIONS not supported", ctx) + new ParseException( + errorClass = "INVALID_SQL_SYNTAX", + messageParameters = Array( + s"${toSQLStmt("SHOW")} $identifier ${toSQLStmt("FUNCTIONS")} not supported"), + ctx) } def showFunctionsInvalidPatternError(pattern: String, ctx: ParserRuleContext): Throwable = { new ParseException( errorClass = "INVALID_SQL_SYNTAX", messageParameters = Array( - s"Invalid pattern in SHOW FUNCTIONS: $pattern. " + + s"Invalid pattern in ${toSQLStmt("SHOW FUNCTIONS")}: $pattern. " + s"It must be a ${toSQLType(StringType)} literal."), ctx) } diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ExtractPythonUDFFromJoinConditionSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ExtractPythonUDFFromJoinConditionSuite.scala index 65c8f5d300c62..1e58f5c94b073 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ExtractPythonUDFFromJoinConditionSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ExtractPythonUDFFromJoinConditionSuite.scala @@ -187,9 +187,9 @@ class ExtractPythonUDFFromJoinConditionSuite extends PlanTest { condition = Some(unevaluableJoinCond)) Optimize.execute(query.analyze) } - assert(e.message.contentEquals( + assert(e.message == "The feature is not supported: " + - s"Using PythonUDF in join condition of join type $joinType is not supported")) + s"""Using PythonUDF in join condition of join type "${joinType.sql}" is not supported.""") val query2 = testRelationLeft.join( testRelationRight, diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/PlanParserSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/PlanParserSuite.scala index 3e2d917a8932f..7fd375ada9a0f 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/PlanParserSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/PlanParserSuite.scala @@ -1254,7 +1254,7 @@ class PlanParserSuite extends AnalysisTest { | "escapeChar" = "\\") |FROM testData """.stripMargin, - "TRANSFORM with serde is only supported in hive mode") + "\"TRANSFORM\" with serde is only supported in hive mode") } diff --git a/sql/core/src/test/resources/sql-tests/results/join-lateral.sql.out b/sql/core/src/test/resources/sql-tests/results/join-lateral.sql.out index cc1619813dd55..6e47579a9b011 100644 --- a/sql/core/src/test/resources/sql-tests/results/join-lateral.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/join-lateral.sql.out @@ -153,7 +153,7 @@ struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -The feature is not supported: LATERAL join with NATURAL join.(line 1, pos 14) +The feature is not supported: "LATERAL" join with "NATURAL" join.(line 1, pos 14) == SQL == SELECT * FROM t1 NATURAL JOIN LATERAL (SELECT c1 + c2 AS c2) @@ -167,7 +167,7 @@ struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -The feature is not supported: LATERAL join with USING join.(line 1, pos 14) +The feature is not supported: "LATERAL" join with "USING" join.(line 1, pos 14) == SQL == SELECT * FROM t1 JOIN LATERAL (SELECT c1 + c2 AS c2) USING (c2) diff --git a/sql/core/src/test/resources/sql-tests/results/transform.sql.out b/sql/core/src/test/resources/sql-tests/results/transform.sql.out index be57390761ba3..69fe58e1343d1 100644 --- a/sql/core/src/test/resources/sql-tests/results/transform.sql.out +++ b/sql/core/src/test/resources/sql-tests/results/transform.sql.out @@ -719,7 +719,7 @@ struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -The feature is not supported: TRANSFORM does not support DISTINCT/ALL in inputs(line 1, pos 17) +The feature is not supported: "TRANSFORM" does not support "DISTINCT"/"ALL" in inputs(line 1, pos 17) == SQL == SELECT TRANSFORM(DISTINCT b, a, c) @@ -739,7 +739,7 @@ struct<> -- !query output org.apache.spark.sql.catalyst.parser.ParseException -The feature is not supported: TRANSFORM does not support DISTINCT/ALL in inputs(line 1, pos 17) +The feature is not supported: "TRANSFORM" does not support "DISTINCT"/"ALL" in inputs(line 1, pos 17) == SQL == SELECT TRANSFORM(ALL b, a, c) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsDSv2Suite.scala b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsDSv2Suite.scala index bfea3f535dd94..be8e65249202b 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsDSv2Suite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsDSv2Suite.scala @@ -43,7 +43,7 @@ class QueryCompilationErrorsDSv2Suite checkAnswer(spark.table(tbl), spark.emptyDataFrame) assert(e.getMessage === "The feature is not supported: " + - s"IF NOT EXISTS for the table '$tbl' by INSERT INTO.") + s""""IF NOT EXISTS" for the table '$tbl' by "INSERT INTO".""") assert(e.getErrorClass === "UNSUPPORTED_FEATURE") assert(e.getSqlState === "0A000") } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala index cac1ef67fac40..6a7da405fcc03 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala @@ -149,7 +149,7 @@ class QueryCompilationErrorsSuite extends QueryTest with SharedSparkSession { assert(e.getSqlState === "0A000") assert(e.message === "The feature is not supported: " + - "Using PythonUDF in join condition of join type LeftOuter is not supported") + "Using PythonUDF in join condition of join type \"LEFT OUTER\" is not supported.") } test("UNSUPPORTED_FEATURE: Using pandas UDF aggregate expression with pivot") { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala index f73d1e1c3c5b1..9ff57859acb90 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala @@ -156,7 +156,7 @@ class QueryExecutionErrorsSuite extends QueryTest } assert(e1.getErrorClass === "UNSUPPORTED_FEATURE") assert(e1.getSqlState === "0A000") - assert(e1.getMessage === "The feature is not supported: Repeated pivots.") + assert(e1.getMessage === """The feature is not supported: Repeated "PIVOT"s.""") val e2 = intercept[SparkUnsupportedOperationException] { trainingSales @@ -167,7 +167,7 @@ class QueryExecutionErrorsSuite extends QueryTest } assert(e2.getErrorClass === "UNSUPPORTED_FEATURE") assert(e2.getSqlState === "0A000") - assert(e2.getMessage === "The feature is not supported: Pivot not after a groupBy.") + assert(e2.getMessage === """The feature is not supported: "PIVOT" not after a "GROUP BY".""") } test("INCONSISTENT_BEHAVIOR_CROSS_VERSION: " + diff --git a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryParsingErrorsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryParsingErrorsSuite.scala index 5610c4d000bfa..5a47ce5ae73e4 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryParsingErrorsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryParsingErrorsSuite.scala @@ -21,6 +21,8 @@ import org.apache.spark.sql.QueryTest import org.apache.spark.sql.catalyst.parser.ParseException import org.apache.spark.sql.test.SharedSparkSession +// Turn of the length check because most of the tests check entire error messages +// scalastyle:off line.size.limit class QueryParsingErrorsSuite extends QueryTest with SharedSparkSession { def validateParsingError( sqlText: String, @@ -42,7 +44,7 @@ class QueryParsingErrorsSuite extends QueryTest with SharedSparkSession { sqlState = "0A000", message = """ - |The feature is not supported: LATERAL join with NATURAL join.(line 1, pos 14) + |The feature is not supported: "LATERAL" join with "NATURAL" join.(line 1, pos 14) | |== SQL == |SELECT * FROM t1 NATURAL JOIN LATERAL (SELECT c1 + c2 AS c2) @@ -57,7 +59,7 @@ class QueryParsingErrorsSuite extends QueryTest with SharedSparkSession { sqlState = "0A000", message = """ - |The feature is not supported: LATERAL join with USING join.(line 1, pos 14) + |The feature is not supported: "LATERAL" join with "USING" join.(line 1, pos 14) | |== SQL == |SELECT * FROM t1 JOIN LATERAL (SELECT c1 + c2 AS c2) USING (c2) @@ -66,21 +68,17 @@ class QueryParsingErrorsSuite extends QueryTest with SharedSparkSession { } test("UNSUPPORTED_FEATURE: Unsupported LATERAL join type") { - Seq( - ("RIGHT OUTER", "RightOuter"), - ("FULL OUTER", "FullOuter"), - ("LEFT SEMI", "LeftSemi"), - ("LEFT ANTI", "LeftAnti")).foreach { pair => + Seq("RIGHT OUTER", "FULL OUTER", "LEFT SEMI", "LEFT ANTI").foreach { joinType => validateParsingError( - sqlText = s"SELECT * FROM t1 ${pair._1} JOIN LATERAL (SELECT c1 + c2 AS c3) ON c2 = c3", + sqlText = s"SELECT * FROM t1 $joinType JOIN LATERAL (SELECT c1 + c2 AS c3) ON c2 = c3", errorClass = "UNSUPPORTED_FEATURE", sqlState = "0A000", message = s""" - |The feature is not supported: LATERAL join type '${pair._2}'.(line 1, pos 14) + |The feature is not supported: "LATERAL" join type "$joinType".(line 1, pos 14) | |== SQL == - |SELECT * FROM t1 ${pair._1} JOIN LATERAL (SELECT c1 + c2 AS c3) ON c2 = c3 + |SELECT * FROM t1 $joinType JOIN LATERAL (SELECT c1 + c2 AS c3) ON c2 = c3 |--------------^^^ |""".stripMargin) } @@ -101,7 +99,7 @@ class QueryParsingErrorsSuite extends QueryTest with SharedSparkSession { sqlState = "42000", message = s""" - |Invalid SQL syntax: LATERAL can only be used with subquery.(line 1, pos $pos) + |Invalid SQL syntax: "LATERAL" can only be used with subquery.(line 1, pos $pos) | |== SQL == |$sqlText @@ -117,7 +115,7 @@ class QueryParsingErrorsSuite extends QueryTest with SharedSparkSession { sqlState = "0A000", message = """ - |The feature is not supported: NATURAL CROSS JOIN.(line 1, pos 14) + |The feature is not supported: "NATURAL CROSS JOIN".(line 1, pos 14) | |== SQL == |SELECT * FROM a NATURAL CROSS JOIN b @@ -177,8 +175,7 @@ class QueryParsingErrorsSuite extends QueryTest with SharedSparkSession { sqlState = "0A000", message = """ - |The feature is not supported: """.stripMargin + - """TRANSFORM does not support DISTINCT/ALL in inputs(line 1, pos 17) + |The feature is not supported: "TRANSFORM" does not support "DISTINCT"/"ALL" in inputs(line 1, pos 17) | |== SQL == |SELECT TRANSFORM(DISTINCT a) USING 'a' FROM t @@ -194,12 +191,10 @@ class QueryParsingErrorsSuite extends QueryTest with SharedSparkSession { sqlState = "0A000", message = """ - |The feature is not supported: """.stripMargin + - """TRANSFORM with serde is only supported in hive mode(line 1, pos 0) + |The feature is not supported: "TRANSFORM" with serde is only supported in hive mode(line 1, pos 0) | |== SQL == - |SELECT TRANSFORM(a) ROW FORMAT SERDE """.stripMargin + - """'org.apache.hadoop.hive.serde2.OpenCSVSerde' USING 'a' FROM t + |SELECT TRANSFORM(a) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.OpenCSVSerde' USING 'a' FROM t |^^^ |""".stripMargin) } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/SparkScriptTransformationSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/SparkScriptTransformationSuite.scala index 5638743b7633d..1f431e173b3c7 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/SparkScriptTransformationSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/SparkScriptTransformationSuite.scala @@ -56,7 +56,7 @@ class SparkScriptTransformationSuite extends BaseScriptTransformationSuite with |FROM v """.stripMargin) }.getMessage - assert(e.contains("TRANSFORM with serde is only supported in hive mode")) + assert(e.contains("\"TRANSFORM\" with serde is only supported in hive mode")) } } } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLParserSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLParserSuite.scala index 1053cb9f2a772..3e23d7e7f82ce 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLParserSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLParserSuite.scala @@ -484,9 +484,9 @@ class DDLParserSuite extends AnalysisTest with SharedSparkSession { DropFunctionCommand(None, "a", true, true)) intercept("DROP TEMPORARY FUNCTION a.b", - "DROP TEMPORARY FUNCTION requires a single part name") + "\"DROP TEMPORARY FUNCTION\" requires a single part name") intercept("DROP TEMPORARY FUNCTION IF EXISTS a.b", - "DROP TEMPORARY FUNCTION requires a single part name") + "\"DROP TEMPORARY FUNCTION\" requires a single part name") } test("SPARK-32374: create temporary view with properties not allowed") { From 1bdb7552bf9c1543e35cef6773e01adb5830f40c Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Wed, 20 Apr 2022 20:24:44 +0300 Subject: [PATCH 2/2] Fix DDLParserSuite --- .../apache/spark/sql/catalyst/parser/DDLParserSuite.scala | 7 +++---- .../spark/sql/execution/command/DDLParserSuite.scala | 4 ++-- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala index 472506fa9070b..bc5380e27f536 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala @@ -2048,12 +2048,11 @@ class DDLParserSuite extends AnalysisTest { comparePlans( parsePlan("SHOW FUNCTIONS IN db LIKE 'funct*'"), ShowFunctions(UnresolvedNamespace(Seq("db")), true, true, Some("funct*"))) - val sql = "SHOW other FUNCTIONS" - intercept(sql, s"$sql not supported") + intercept("SHOW other FUNCTIONS", "\"SHOW\" other \"FUNCTIONS\" not supported") intercept("SHOW FUNCTIONS IN db f1", - "Invalid pattern in SHOW FUNCTIONS: f1") + "Invalid pattern in \"SHOW FUNCTIONS\": f1") intercept("SHOW FUNCTIONS IN db LIKE f1", - "Invalid pattern in SHOW FUNCTIONS: f1") + "Invalid pattern in \"SHOW FUNCTIONS\": f1") // The legacy syntax. comparePlans( diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLParserSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLParserSuite.scala index 3e23d7e7f82ce..1053cb9f2a772 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLParserSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLParserSuite.scala @@ -484,9 +484,9 @@ class DDLParserSuite extends AnalysisTest with SharedSparkSession { DropFunctionCommand(None, "a", true, true)) intercept("DROP TEMPORARY FUNCTION a.b", - "\"DROP TEMPORARY FUNCTION\" requires a single part name") + "DROP TEMPORARY FUNCTION requires a single part name") intercept("DROP TEMPORARY FUNCTION IF EXISTS a.b", - "\"DROP TEMPORARY FUNCTION\" requires a single part name") + "DROP TEMPORARY FUNCTION requires a single part name") } test("SPARK-32374: create temporary view with properties not allowed") {