From 29e5bae72678e7332b44958683a2f708ec90a35d Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Fri, 13 Nov 2020 19:33:55 +0300 Subject: [PATCH 01/17] Create ShowPartitionsParserSuite --- .../execution/command/DDLParserSuite.scala | 8 ----- .../command/ShowPartitionsParserSuite.scala | 34 +++++++++++++++++++ 2 files changed, 34 insertions(+), 8 deletions(-) create mode 100644 sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowPartitionsParserSuite.scala diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLParserSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLParserSuite.scala index c6a533dfae4d0..1309f848e46c5 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLParserSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLParserSuite.scala @@ -361,14 +361,6 @@ class DDLParserSuite extends AnalysisTest with SharedSparkSession { assert(e.contains("Found duplicate keys 'a'")) } - test("empty values in non-optional partition specs") { - val e = intercept[ParseException] { - parser.parsePlan( - "SHOW PARTITIONS dbx.tab1 PARTITION (a='1', b)") - }.getMessage - assert(e.contains("Found an empty partition key 'b'")) - } - test("Test CTAS #1") { val s1 = """ diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowPartitionsParserSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowPartitionsParserSuite.scala new file mode 100644 index 0000000000000..a658507558bdb --- /dev/null +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowPartitionsParserSuite.scala @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.sql.execution.command + +import org.apache.spark.sql.catalyst.parser.ParseException +import org.apache.spark.sql.execution.SparkSqlParser +import org.apache.spark.sql.internal.SQLConf +import org.apache.spark.sql.test.SharedSparkSession + +class ShowPartitionsParserSuite extends SharedSparkSession { + test("empty values in non-optional partition specs") { + val parser = new SparkSqlParser(new SQLConf) + val e = intercept[ParseException] { + parser.parsePlan( + "SHOW PARTITIONS dbx.tab1 PARTITION (a='1', b)") + }.getMessage + assert(e.contains("Found an empty partition key 'b'")) + } +} From d7bf651ae3eb4da1ca8f711c89455a03b954cc38 Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Fri, 13 Nov 2020 19:58:19 +0300 Subject: [PATCH 02/17] Move tests to ShowPartitionsParserSuite --- .../sql/catalyst/parser/DDLParserSuite.scala | 26 ------------------- .../command/ShowPartitionsParserSuite.scala | 22 +++++++++++++++- 2 files changed, 21 insertions(+), 27 deletions(-) diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala index cddc392cfa2d7..4ac5c8d0561d9 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DDLParserSuite.scala @@ -1629,32 +1629,6 @@ class DDLParserSuite extends AnalysisTest { TruncateTableStatement(Seq("a", "b", "c"), Some(Map("ds" -> "2017-06-10")))) } - test("SHOW PARTITIONS") { - val sql1 = "SHOW PARTITIONS t1" - val sql2 = "SHOW PARTITIONS db1.t1" - val sql3 = "SHOW PARTITIONS t1 PARTITION(partcol1='partvalue', partcol2='partvalue')" - val sql4 = "SHOW PARTITIONS a.b.c" - val sql5 = "SHOW PARTITIONS a.b.c PARTITION(ds='2017-06-10')" - - val parsed1 = parsePlan(sql1) - val expected1 = ShowPartitionsStatement(Seq("t1"), None) - val parsed2 = parsePlan(sql2) - val expected2 = ShowPartitionsStatement(Seq("db1", "t1"), None) - val parsed3 = parsePlan(sql3) - val expected3 = ShowPartitionsStatement(Seq("t1"), - Some(Map("partcol1" -> "partvalue", "partcol2" -> "partvalue"))) - val parsed4 = parsePlan(sql4) - val expected4 = ShowPartitionsStatement(Seq("a", "b", "c"), None) - val parsed5 = parsePlan(sql5) - val expected5 = ShowPartitionsStatement(Seq("a", "b", "c"), Some(Map("ds" -> "2017-06-10"))) - - comparePlans(parsed1, expected1) - comparePlans(parsed2, expected2) - comparePlans(parsed3, expected3) - comparePlans(parsed4, expected4) - comparePlans(parsed5, expected5) - } - test("REFRESH TABLE") { comparePlans( parsePlan("REFRESH TABLE a.b.c"), diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowPartitionsParserSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowPartitionsParserSuite.scala index a658507558bdb..b554197b4a009 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowPartitionsParserSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowPartitionsParserSuite.scala @@ -17,12 +17,32 @@ package org.apache.spark.sql.execution.command +import org.apache.spark.sql.catalyst.analysis.AnalysisTest +import org.apache.spark.sql.catalyst.parser.CatalystSqlParser.parsePlan import org.apache.spark.sql.catalyst.parser.ParseException +import org.apache.spark.sql.catalyst.plans.logical.ShowPartitionsStatement import org.apache.spark.sql.execution.SparkSqlParser import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.test.SharedSparkSession -class ShowPartitionsParserSuite extends SharedSparkSession { +class ShowPartitionsParserSuite extends AnalysisTest with SharedSparkSession { + test("SHOW PARTITIONS") { + Seq( + "SHOW PARTITIONS t1" -> ShowPartitionsStatement(Seq("t1"), None), + "SHOW PARTITIONS db1.t1" -> ShowPartitionsStatement(Seq("db1", "t1"), None), + "SHOW PARTITIONS t1 PARTITION(partcol1='partvalue', partcol2='partvalue')" -> + ShowPartitionsStatement( + Seq("t1"), + Some(Map("partcol1" -> "partvalue", "partcol2" -> "partvalue"))), + "SHOW PARTITIONS a.b.c" -> ShowPartitionsStatement(Seq("a", "b", "c"), None), + "SHOW PARTITIONS a.b.c PARTITION(ds='2017-06-10')" -> + ShowPartitionsStatement(Seq("a", "b", "c"), Some(Map("ds" -> "2017-06-10"))) + ).foreach { case (sql, expected) => + val parsed = parsePlan(sql) + comparePlans(parsed, expected) + } + } + test("empty values in non-optional partition specs") { val parser = new SparkSqlParser(new SQLConf) val e = intercept[ParseException] { From c23048e06df3f5cf347440f33a3c65afa9aa86d1 Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Fri, 13 Nov 2020 22:10:05 +0300 Subject: [PATCH 03/17] Add v1/v2 ShowPartitionsSuite --- .../sql/connector/DataSourceV2SQLSuite.scala | 15 ----- .../command/ShowPartitionsSuiteBase.scala | 47 +++++++++++++ .../command/v1/ShowPartitionsSuite.scala | 44 +++++++++++++ .../command/v2/ShowPartitionsSuite.scala | 66 +++++++++++++++++++ 4 files changed, 157 insertions(+), 15 deletions(-) create mode 100644 sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowPartitionsSuiteBase.scala create mode 100644 sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowPartitionsSuite.scala create mode 100644 sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ShowPartitionsSuite.scala diff --git a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala index db3f11dbda51a..5f7be7c4c565b 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/connector/DataSourceV2SQLSuite.scala @@ -1909,21 +1909,6 @@ class DataSourceV2SQLSuite } } - test("SHOW PARTITIONS") { - val t = "testcat.ns1.ns2.tbl" - withTable(t) { - sql( - s""" - |CREATE TABLE $t (id bigint, data string) - |USING foo - |PARTITIONED BY (id) - """.stripMargin) - - testV1Command("SHOW PARTITIONS", t) - testV1Command("SHOW PARTITIONS", s"$t PARTITION(id='1')") - } - } - test("LOAD DATA INTO TABLE") { val t = "testcat.ns1.ns2.tbl" withTable(t) { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowPartitionsSuiteBase.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowPartitionsSuiteBase.scala new file mode 100644 index 0000000000000..5913177f62020 --- /dev/null +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowPartitionsSuiteBase.scala @@ -0,0 +1,47 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.sql.execution.command + +import org.scalactic.source.Position +import org.scalatest.Tag + +import org.apache.spark.sql.{QueryTest, Row} +import org.apache.spark.sql.test.SQLTestUtils +import org.apache.spark.sql.types.StructType + +trait ShowPartitionsSuiteBase extends QueryTest with SQLTestUtils { + protected def version: String + protected def catalog: String + protected def defaultNamespace: Seq[String] + protected def defaultUsing: String + case class ShowRow(namespace: String, table: String, isTemporary: Boolean) + protected def getRows(showRows: Seq[ShowRow]): Seq[Row] + // Gets the schema of `SHOW TABLES` + protected def showSchema: StructType + + protected def runShowTablesSql(sqlText: String, expected: Seq[ShowRow]): Unit = { + val df = spark.sql(sqlText) + assert(df.schema === showSchema) + checkAnswer(df, getRows(expected)) + } + + override def test(testName: String, testTags: Tag*)(testFun: => Any) + (implicit pos: Position): Unit = { + super.test(s"SHOW PARTITIONS $version: " + testName, testTags: _*)(testFun) + } +} diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowPartitionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowPartitionsSuite.scala new file mode 100644 index 0000000000000..6b31b28ed60a4 --- /dev/null +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowPartitionsSuite.scala @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.sql.execution.command.v1 + +import org.apache.spark.sql.Row +import org.apache.spark.sql.connector.catalog.CatalogManager +import org.apache.spark.sql.execution.command +import org.apache.spark.sql.test.SharedSparkSession +import org.apache.spark.sql.types.{BooleanType, StringType, StructType} + +trait ShowPartitionsSuiteBase extends command.ShowPartitionsSuiteBase { + override def version: String = "V1" + override def catalog: String = CatalogManager.SESSION_CATALOG_NAME + override def defaultNamespace: Seq[String] = Seq("default") + override def defaultUsing: String = "USING parquet" + override def showSchema: StructType = { + new StructType() + .add("database", StringType, nullable = false) + .add("tableName", StringType, nullable = false) + .add("isTemporary", BooleanType, nullable = false) + } + override def getRows(showRows: Seq[ShowRow]): Seq[Row] = { + showRows.map { + case ShowRow(namespace, table, isTemporary) => Row(namespace, table, isTemporary) + } + } +} + +class ShowPartitionsSuite extends ShowPartitionsSuiteBase with SharedSparkSession diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ShowPartitionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ShowPartitionsSuite.scala new file mode 100644 index 0000000000000..56b8d77c13929 --- /dev/null +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ShowPartitionsSuite.scala @@ -0,0 +1,66 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.sql.execution.command.v2 + +import org.apache.spark.SparkConf +import org.apache.spark.sql.{AnalysisException, Row} +import org.apache.spark.sql.connector.InMemoryTableCatalog +import org.apache.spark.sql.execution.command +import org.apache.spark.sql.test.SharedSparkSession +import org.apache.spark.sql.types.{StringType, StructType} + +class ShowPartitionsSuite extends command.ShowPartitionsSuiteBase with SharedSparkSession { + override def version: String = "V2" + override def catalog: String = "test_catalog" + override def defaultNamespace: Seq[String] = Nil + override def defaultUsing: String = "USING _" + override def showSchema: StructType = { + new StructType() + .add("namespace", StringType, nullable = false) + .add("tableName", StringType, nullable = false) + } + override def getRows(showRows: Seq[ShowRow]): Seq[Row] = { + showRows.map { + case ShowRow(namespace, table, _) => Row(namespace, table) + } + } + + override def sparkConf: SparkConf = super.sparkConf + .set(s"spark.sql.catalog.$catalog", classOf[InMemoryTableCatalog].getName) + + test("V2 does not support SHOW PARTITIONS") { + def testV1Command(sqlCommand: String, sqlParams: String): Unit = { + val e = intercept[AnalysisException] { + sql(s"$sqlCommand $sqlParams") + } + assert(e.message.contains(s"$sqlCommand is only supported with v1 tables")) + } + val t = s"$catalog.ns1.ns2.tbl" + withTable(t) { + sql( + s""" + |CREATE TABLE $t (id bigint, data string) + |$defaultUsing + |PARTITIONED BY (id) + """.stripMargin) + + testV1Command("SHOW PARTITIONS", t) + testV1Command("SHOW PARTITIONS", s"$t PARTITION(id='1')") + } + } +} From 87e86b539661478e3641a48db8bc40175fcaf3c1 Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Fri, 13 Nov 2020 22:27:34 +0300 Subject: [PATCH 04/17] Move a view test --- .../apache/spark/sql/execution/SQLViewSuite.scala | 1 - .../execution/command/v1/ShowPartitionsSuite.scala | 12 ++++++++++++ .../execution/command/v2/ShowPartitionsSuite.scala | 2 +- 3 files changed, 13 insertions(+), 2 deletions(-) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala index 87a5cb9f73355..792f920ee0217 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/SQLViewSuite.scala @@ -181,7 +181,6 @@ abstract class SQLViewSuite extends QueryTest with SQLTestUtils { sql(s"SHOW CREATE TABLE $viewName") }.getMessage assert(e3.contains(s"$viewName is a temp view not table or permanent view")) - assertNoSuchTable(s"SHOW PARTITIONS $viewName") val e4 = intercept[AnalysisException] { sql(s"ANALYZE TABLE $viewName COMPUTE STATISTICS") }.getMessage diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowPartitionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowPartitionsSuite.scala index 6b31b28ed60a4..94fc214546516 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowPartitionsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowPartitionsSuite.scala @@ -18,6 +18,7 @@ package org.apache.spark.sql.execution.command.v1 import org.apache.spark.sql.Row +import org.apache.spark.sql.catalyst.analysis.NoSuchTableException import org.apache.spark.sql.connector.catalog.CatalogManager import org.apache.spark.sql.execution.command import org.apache.spark.sql.test.SharedSparkSession @@ -39,6 +40,17 @@ trait ShowPartitionsSuiteBase extends command.ShowPartitionsSuiteBase { case ShowRow(namespace, table, isTemporary) => Row(namespace, table, isTemporary) } } + + test("issue exceptions on the temporary view") { + val viewName = "test_view" + withTempView(viewName) { + spark.range(10).createTempView(viewName) + val errMsg = intercept[NoSuchTableException] { + sql(s"SHOW PARTITIONS $viewName") + }.getMessage + assert(errMsg.contains(s"Table or view '$viewName' not found")) + } + } } class ShowPartitionsSuite extends ShowPartitionsSuiteBase with SharedSparkSession diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ShowPartitionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ShowPartitionsSuite.scala index 56b8d77c13929..55dabe59b2df8 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ShowPartitionsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ShowPartitionsSuite.scala @@ -43,7 +43,7 @@ class ShowPartitionsSuite extends command.ShowPartitionsSuiteBase with SharedSpa override def sparkConf: SparkConf = super.sparkConf .set(s"spark.sql.catalog.$catalog", classOf[InMemoryTableCatalog].getName) - test("V2 does not support SHOW PARTITIONS") { + test("not supported SHOW PARTITIONS") { def testV1Command(sqlCommand: String, sqlParams: String): Unit = { val e = intercept[AnalysisException] { sql(s"$sqlCommand $sqlParams") From 851929b6e60bb5e49fda4e1aac959d55ae9881e7 Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Sat, 14 Nov 2020 13:58:29 +0300 Subject: [PATCH 05/17] Add hive.execution.command.ShowPartitionsSuite --- .../command/ShowPartitionsSuite.scala | 26 +++++++++++++++++++ 1 file changed, 26 insertions(+) create mode 100644 sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowPartitionsSuite.scala diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowPartitionsSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowPartitionsSuite.scala new file mode 100644 index 0000000000000..a92478faf0e16 --- /dev/null +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowPartitionsSuite.scala @@ -0,0 +1,26 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.sql.hive.execution.command + +import org.apache.spark.sql.execution.command.v1 +import org.apache.spark.sql.hive.test.TestHiveSingleton + +class ShowPartitionsSuite extends v1.ShowPartitionsSuiteBase with TestHiveSingleton { + override def version: String = "Hive V1" + override def defaultUsing: String = "USING HIVE" +} From 82432c8ba526ba896a644e0e5f3272b3d47c95a4 Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Sat, 14 Nov 2020 17:39:34 +0300 Subject: [PATCH 06/17] Move tests from HiveCommandSuite --- .../command/ShowPartitionsSuiteBase.scala | 13 +-- .../command/v1/ShowPartitionsSuite.scala | 37 ++++++-- .../command/v2/ShowPartitionsSuite.scala | 13 +-- .../sql/hive/execution/HiveCommandSuite.scala | 86 +------------------ .../command/ShowPartitionsSuite.scala | 78 +++++++++++++++++ 5 files changed, 109 insertions(+), 118 deletions(-) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowPartitionsSuiteBase.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowPartitionsSuiteBase.scala index 5913177f62020..413e170326eea 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowPartitionsSuiteBase.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowPartitionsSuiteBase.scala @@ -20,25 +20,14 @@ package org.apache.spark.sql.execution.command import org.scalactic.source.Position import org.scalatest.Tag -import org.apache.spark.sql.{QueryTest, Row} +import org.apache.spark.sql.QueryTest import org.apache.spark.sql.test.SQLTestUtils -import org.apache.spark.sql.types.StructType trait ShowPartitionsSuiteBase extends QueryTest with SQLTestUtils { protected def version: String protected def catalog: String protected def defaultNamespace: Seq[String] protected def defaultUsing: String - case class ShowRow(namespace: String, table: String, isTemporary: Boolean) - protected def getRows(showRows: Seq[ShowRow]): Seq[Row] - // Gets the schema of `SHOW TABLES` - protected def showSchema: StructType - - protected def runShowTablesSql(sqlText: String, expected: Seq[ShowRow]): Unit = { - val df = spark.sql(sqlText) - assert(df.schema === showSchema) - checkAnswer(df, getRows(expected)) - } override def test(testName: String, testTags: Tag*)(testFun: => Any) (implicit pos: Position): Unit = { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowPartitionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowPartitionsSuite.scala index 94fc214546516..613ffa1e90329 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowPartitionsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowPartitionsSuite.scala @@ -22,22 +22,41 @@ import org.apache.spark.sql.catalyst.analysis.NoSuchTableException import org.apache.spark.sql.connector.catalog.CatalogManager import org.apache.spark.sql.execution.command import org.apache.spark.sql.test.SharedSparkSession -import org.apache.spark.sql.types.{BooleanType, StringType, StructType} trait ShowPartitionsSuiteBase extends command.ShowPartitionsSuiteBase { override def version: String = "V1" override def catalog: String = CatalogManager.SESSION_CATALOG_NAME override def defaultNamespace: Seq[String] = Seq("default") override def defaultUsing: String = "USING parquet" - override def showSchema: StructType = { - new StructType() - .add("database", StringType, nullable = false) - .add("tableName", StringType, nullable = false) - .add("isTemporary", BooleanType, nullable = false) + + protected def createDateTable(table: String): Unit = { + sql(s""" + |CREATE TABLE $table (price int, qty int, year int, month int) + |$defaultUsing + |partitioned by (year, month)""".stripMargin) + sql(s"INSERT INTO $table PARTITION(year = 2015, month = 1) SELECT 1, 1") + sql(s"INSERT INTO $table PARTITION(year = 2015, month = 2) SELECT 2, 2") + sql(s"INSERT INTO $table PARTITION(year = 2016, month = 2) SELECT 3, 3") + sql(s"INSERT INTO $table PARTITION(year = 2016, month = 3) SELECT 3, 3") } - override def getRows(showRows: Seq[ShowRow]): Seq[Row] = { - showRows.map { - case ShowRow(namespace, table, isTemporary) => Row(namespace, table, isTemporary) + + test("show everything") { + val table = "dateTable" + withTable(table) { + createDateTable(table) + checkAnswer( + sql(s"show partitions $table"), + Row("year=2015/month=1") :: + Row("year=2015/month=2") :: + Row("year=2016/month=2") :: + Row("year=2016/month=3") :: Nil) + + checkAnswer( + sql(s"show partitions default.$table"), + Row("year=2015/month=1") :: + Row("year=2015/month=2") :: + Row("year=2016/month=2") :: + Row("year=2016/month=3") :: Nil) } } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ShowPartitionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ShowPartitionsSuite.scala index 55dabe59b2df8..296f5a9e09871 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ShowPartitionsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ShowPartitionsSuite.scala @@ -18,27 +18,16 @@ package org.apache.spark.sql.execution.command.v2 import org.apache.spark.SparkConf -import org.apache.spark.sql.{AnalysisException, Row} +import org.apache.spark.sql.AnalysisException import org.apache.spark.sql.connector.InMemoryTableCatalog import org.apache.spark.sql.execution.command import org.apache.spark.sql.test.SharedSparkSession -import org.apache.spark.sql.types.{StringType, StructType} class ShowPartitionsSuite extends command.ShowPartitionsSuiteBase with SharedSparkSession { override def version: String = "V2" override def catalog: String = "test_catalog" override def defaultNamespace: Seq[String] = Nil override def defaultUsing: String = "USING _" - override def showSchema: StructType = { - new StructType() - .add("namespace", StringType, nullable = false) - .add("tableName", StringType, nullable = false) - } - override def getRows(showRows: Seq[ShowRow]): Seq[Row] = { - showRows.map { - case ShowRow(namespace, table, _) => Row(namespace, table) - } - } override def sparkConf: SparkConf = super.sparkConf .set(s"spark.sql.catalog.$catalog", classOf[InMemoryTableCatalog].getName) diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala index a78fd506b752e..ec398a55e5095 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala @@ -22,9 +22,8 @@ import java.io.File import com.google.common.io.Files import org.apache.hadoop.fs.{FileContext, FsConstants, Path} -import org.apache.spark.sql.{AnalysisException, QueryTest, Row, SaveMode} +import org.apache.spark.sql.{AnalysisException, QueryTest, Row} import org.apache.spark.sql.catalyst.TableIdentifier -import org.apache.spark.sql.catalyst.analysis.NoSuchTableException import org.apache.spark.sql.catalyst.catalog.{CatalogStorageFormat, CatalogTable, CatalogTableType} import org.apache.spark.sql.execution.command.LoadDataCommand import org.apache.spark.sql.hive.test.TestHiveSingleton @@ -33,7 +32,6 @@ import org.apache.spark.sql.test.SQLTestUtils import org.apache.spark.sql.types.StructType class HiveCommandSuite extends QueryTest with SQLTestUtils with TestHiveSingleton { - import testImplicits._ protected override def beforeAll(): Unit = { super.beforeAll() @@ -393,88 +391,6 @@ class HiveCommandSuite extends QueryTest with SQLTestUtils with TestHiveSingleto } } - - test("show partitions - show everything") { - checkAnswer( - sql("show partitions parquet_tab4"), - Row("year=2015/month=1") :: - Row("year=2015/month=2") :: - Row("year=2016/month=2") :: - Row("year=2016/month=3") :: Nil) - - checkAnswer( - sql("show partitions default.parquet_tab4"), - Row("year=2015/month=1") :: - Row("year=2015/month=2") :: - Row("year=2016/month=2") :: - Row("year=2016/month=3") :: Nil) - } - - test("show partitions - show everything more than 5 part keys") { - checkAnswer( - sql("show partitions parquet_tab5"), - Row("year=2016/month=3/hour=10/minute=10/sec=10/extra=1") :: - Row("year=2016/month=4/hour=10/minute=10/sec=10/extra=1") :: Nil) - } - - test("show partitions - filter") { - checkAnswer( - sql("show partitions default.parquet_tab4 PARTITION(year=2015)"), - Row("year=2015/month=1") :: - Row("year=2015/month=2") :: Nil) - - checkAnswer( - sql("show partitions default.parquet_tab4 PARTITION(year=2015, month=1)"), - Row("year=2015/month=1") :: Nil) - - checkAnswer( - sql("show partitions default.parquet_tab4 PARTITION(month=2)"), - Row("year=2015/month=2") :: - Row("year=2016/month=2") :: Nil) - } - - test("show partitions - empty row") { - withTempView("parquet_temp") { - sql( - """ - |CREATE TEMPORARY VIEW parquet_temp (c1 INT, c2 STRING) - |USING org.apache.spark.sql.parquet.DefaultSource - """.stripMargin) - // An empty sequence of row is returned for session temporary table. - intercept[NoSuchTableException] { - sql("SHOW PARTITIONS parquet_temp") - } - - val message1 = intercept[AnalysisException] { - sql("SHOW PARTITIONS parquet_tab3") - }.getMessage - assert(message1.contains("not allowed on a table that is not partitioned")) - - val message2 = intercept[AnalysisException] { - sql("SHOW PARTITIONS parquet_tab4 PARTITION(abcd=2015, xyz=1)") - }.getMessage - assert(message2.contains("Non-partitioning column(s) [abcd, xyz] are specified")) - - val message3 = intercept[AnalysisException] { - sql("SHOW PARTITIONS parquet_view1") - }.getMessage - assert(message3.contains("is not allowed on a view")) - } - } - - test("show partitions - datasource") { - withTable("part_datasrc") { - val df = (1 to 3).map(i => (i, s"val_$i", i * 2)).toDF("a", "b", "c") - df.write - .partitionBy("a") - .format("parquet") - .mode(SaveMode.Overwrite) - .saveAsTable("part_datasrc") - - assert(sql("SHOW PARTITIONS part_datasrc").count() == 3) - } - } - test("SPARK-25918: LOAD DATA LOCAL INPATH should handle a relative path") { val localFS = FileContext.getLocalFSFileContext() val workingDir = localFS.getWorkingDirectory diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowPartitionsSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowPartitionsSuite.scala index a92478faf0e16..38e577866195e 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowPartitionsSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowPartitionsSuite.scala @@ -17,10 +17,88 @@ package org.apache.spark.sql.hive.execution.command +import org.apache.spark.sql.{AnalysisException, Row, SaveMode} +import org.apache.spark.sql.catalyst.analysis.NoSuchTableException import org.apache.spark.sql.execution.command.v1 import org.apache.spark.sql.hive.test.TestHiveSingleton class ShowPartitionsSuite extends v1.ShowPartitionsSuiteBase with TestHiveSingleton { override def version: String = "Hive V1" override def defaultUsing: String = "USING HIVE" + + override protected def createDateTable(table: String): Unit = { + sql(s""" + |CREATE TABLE $table (price int, qty int) + |partitioned by (year int, month int)""".stripMargin) + sql(s"INSERT INTO $table PARTITION(year = 2015, month = 1) SELECT 1, 1") + sql(s"INSERT INTO $table PARTITION(year = 2015, month = 2) SELECT 2, 2") + sql(s"INSERT INTO $table PARTITION(year = 2016, month = 2) SELECT 3, 3") + sql(s"INSERT INTO $table PARTITION(year = 2016, month = 3) SELECT 3, 3") + } + + ignore("show partitions - show everything more than 5 part keys") { + checkAnswer( + sql("show partitions parquet_tab5"), + Row("year=2016/month=3/hour=10/minute=10/sec=10/extra=1") :: + Row("year=2016/month=4/hour=10/minute=10/sec=10/extra=1") :: Nil) + } + + ignore("show partitions - filter") { + checkAnswer( + sql("show partitions default.parquet_tab4 PARTITION(year=2015)"), + Row("year=2015/month=1") :: + Row("year=2015/month=2") :: Nil) + + checkAnswer( + sql("show partitions default.parquet_tab4 PARTITION(year=2015, month=1)"), + Row("year=2015/month=1") :: Nil) + + checkAnswer( + sql("show partitions default.parquet_tab4 PARTITION(month=2)"), + Row("year=2015/month=2") :: + Row("year=2016/month=2") :: Nil) + } + + ignore("show partitions - empty row") { + withTempView("parquet_temp") { + sql( + """ + |CREATE TEMPORARY VIEW parquet_temp (c1 INT, c2 STRING) + |USING org.apache.spark.sql.parquet.DefaultSource + """.stripMargin) + // An empty sequence of row is returned for session temporary table. + intercept[NoSuchTableException] { + sql("SHOW PARTITIONS parquet_temp") + } + + val message1 = intercept[AnalysisException] { + sql("SHOW PARTITIONS parquet_tab3") + }.getMessage + assert(message1.contains("not allowed on a table that is not partitioned")) + + val message2 = intercept[AnalysisException] { + sql("SHOW PARTITIONS parquet_tab4 PARTITION(abcd=2015, xyz=1)") + }.getMessage + assert(message2.contains("Non-partitioning column(s) [abcd, xyz] are specified")) + + val message3 = intercept[AnalysisException] { + sql("SHOW PARTITIONS parquet_view1") + }.getMessage + assert(message3.contains("is not allowed on a view")) + } + } + + ignore("show partitions - datasource") { + import spark.implicits._ + withTable("part_datasrc") { + val df = (1 to 3).map(i => (i, s"val_$i", i * 2)).toDF("a", "b", "c") + df.write + .partitionBy("a") + .format("parquet") + .mode(SaveMode.Overwrite) + .saveAsTable("part_datasrc") + + assert(sql("SHOW PARTITIONS part_datasrc").count() == 3) + } + } } From 2351b6459eafa324cb67971d433aa18f842a9e7a Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Sat, 14 Nov 2020 17:48:54 +0300 Subject: [PATCH 07/17] Move "filter by partitions" to v1 ShowPartitionsSuite --- .../command/v1/ShowPartitionsSuite.scala | 18 ++++++++++++++++++ .../command/ShowPartitionsSuite.scala | 16 ---------------- 2 files changed, 18 insertions(+), 16 deletions(-) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowPartitionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowPartitionsSuite.scala index 613ffa1e90329..3be08f5504ccb 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowPartitionsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowPartitionsSuite.scala @@ -60,6 +60,24 @@ trait ShowPartitionsSuiteBase extends command.ShowPartitionsSuiteBase { } } + test("filter by partitions") { + val table = "dateTable" + withTable(table) { + createDateTable(table) + checkAnswer( + sql(s"show partitions default.$table PARTITION(year=2015)"), + Row("year=2015/month=1") :: + Row("year=2015/month=2") :: Nil) + checkAnswer( + sql(s"show partitions default.$table PARTITION(year=2015, month=1)"), + Row("year=2015/month=1") :: Nil) + checkAnswer( + sql(s"show partitions default.$table PARTITION(month=2)"), + Row("year=2015/month=2") :: + Row("year=2016/month=2") :: Nil) + } + } + test("issue exceptions on the temporary view") { val viewName = "test_view" withTempView(viewName) { diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowPartitionsSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowPartitionsSuite.scala index 38e577866195e..2dae0b9533738 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowPartitionsSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowPartitionsSuite.scala @@ -43,22 +43,6 @@ class ShowPartitionsSuite extends v1.ShowPartitionsSuiteBase with TestHiveSingle Row("year=2016/month=4/hour=10/minute=10/sec=10/extra=1") :: Nil) } - ignore("show partitions - filter") { - checkAnswer( - sql("show partitions default.parquet_tab4 PARTITION(year=2015)"), - Row("year=2015/month=1") :: - Row("year=2015/month=2") :: Nil) - - checkAnswer( - sql("show partitions default.parquet_tab4 PARTITION(year=2015, month=1)"), - Row("year=2015/month=1") :: Nil) - - checkAnswer( - sql("show partitions default.parquet_tab4 PARTITION(month=2)"), - Row("year=2015/month=2") :: - Row("year=2016/month=2") :: Nil) - } - ignore("show partitions - empty row") { withTempView("parquet_temp") { sql( From cc89024bff3f1b9115b0ff743c03b0b685bd59d9 Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Sat, 14 Nov 2020 18:02:46 +0300 Subject: [PATCH 08/17] Move the test "show partitions from a datasource" --- .../command/v1/ShowPartitionsSuite.scala | 18 ++++++++++++++++-- .../command/ShowPartitionsSuite.scala | 16 +--------------- 2 files changed, 17 insertions(+), 17 deletions(-) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowPartitionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowPartitionsSuite.scala index 3be08f5504ccb..ab2876ef073ac 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowPartitionsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowPartitionsSuite.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.execution.command.v1 -import org.apache.spark.sql.Row +import org.apache.spark.sql.{Row, SaveMode} import org.apache.spark.sql.catalyst.analysis.NoSuchTableException import org.apache.spark.sql.connector.catalog.CatalogManager import org.apache.spark.sql.execution.command @@ -90,4 +90,18 @@ trait ShowPartitionsSuiteBase extends command.ShowPartitionsSuiteBase { } } -class ShowPartitionsSuite extends ShowPartitionsSuiteBase with SharedSparkSession +class ShowPartitionsSuite extends ShowPartitionsSuiteBase with SharedSparkSession { + test("show partitions from a datasource") { + import testImplicits._ + withTable("part_datasrc") { + val df = (1 to 3).map(i => (i, s"val_$i", i * 2)).toDF("a", "b", "c") + df.write + .partitionBy("a") + .format("parquet") + .mode(SaveMode.Overwrite) + .saveAsTable("part_datasrc") + + assert(sql("SHOW PARTITIONS part_datasrc").count() == 3) + } + } +} diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowPartitionsSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowPartitionsSuite.scala index 2dae0b9533738..854431d64b624 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowPartitionsSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowPartitionsSuite.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.hive.execution.command -import org.apache.spark.sql.{AnalysisException, Row, SaveMode} +import org.apache.spark.sql.{AnalysisException, Row} import org.apache.spark.sql.catalyst.analysis.NoSuchTableException import org.apache.spark.sql.execution.command.v1 import org.apache.spark.sql.hive.test.TestHiveSingleton @@ -71,18 +71,4 @@ class ShowPartitionsSuite extends v1.ShowPartitionsSuiteBase with TestHiveSingle assert(message3.contains("is not allowed on a view")) } } - - ignore("show partitions - datasource") { - import spark.implicits._ - withTable("part_datasrc") { - val df = (1 to 3).map(i => (i, s"val_$i", i * 2)).toDF("a", "b", "c") - df.write - .partitionBy("a") - .format("parquet") - .mode(SaveMode.Overwrite) - .saveAsTable("part_datasrc") - - assert(sql("SHOW PARTITIONS part_datasrc").count() == 3) - } - } } From a9bcdbb91c19473456c34e7d20a0a12e9f7e2506 Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Sat, 14 Nov 2020 19:04:32 +0300 Subject: [PATCH 09/17] de-dup code --- .../command/v1/ShowPartitionsSuite.scala | 37 +++++++++++++++++++ .../sql/hive/execution/HiveCommandSuite.scala | 16 -------- .../command/ShowPartitionsSuite.scala | 20 ++++------ 3 files changed, 45 insertions(+), 28 deletions(-) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowPartitionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowPartitionsSuite.scala index ab2876ef073ac..b6c29363d3b15 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowPartitionsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowPartitionsSuite.scala @@ -34,16 +34,40 @@ trait ShowPartitionsSuiteBase extends command.ShowPartitionsSuiteBase { |CREATE TABLE $table (price int, qty int, year int, month int) |$defaultUsing |partitioned by (year, month)""".stripMargin) + } + + protected def fillDateTable(table: String): Unit = { sql(s"INSERT INTO $table PARTITION(year = 2015, month = 1) SELECT 1, 1") sql(s"INSERT INTO $table PARTITION(year = 2015, month = 2) SELECT 2, 2") sql(s"INSERT INTO $table PARTITION(year = 2016, month = 2) SELECT 3, 3") sql(s"INSERT INTO $table PARTITION(year = 2016, month = 3) SELECT 3, 3") } + protected def createWideTable(table: String): Unit = { + sql(s""" + |CREATE TABLE $table ( + | price int, qty int, + | year int, month int, hour int, minute int, sec int, extra int) + |$defaultUsing + |PARTITIONED BY (year, month, hour, minute, sec, extra)""".stripMargin) + } + + protected def fillWideTable(table: String): Unit = { + sql(s""" + |INSERT INTO $table + |PARTITION(year = 2016, month = 3, hour = 10, minute = 10, sec = 10, extra = 1) SELECT 3, 3 + """.stripMargin) + sql(s""" + |INSERT INTO $table + |PARTITION(year = 2016, month = 4, hour = 10, minute = 10, sec = 10, extra = 1) SELECT 3, 3 + """.stripMargin) + } + test("show everything") { val table = "dateTable" withTable(table) { createDateTable(table) + fillDateTable(table) checkAnswer( sql(s"show partitions $table"), Row("year=2015/month=1") :: @@ -64,6 +88,7 @@ trait ShowPartitionsSuiteBase extends command.ShowPartitionsSuiteBase { val table = "dateTable" withTable(table) { createDateTable(table) + fillDateTable(table) checkAnswer( sql(s"show partitions default.$table PARTITION(year=2015)"), Row("year=2015/month=1") :: @@ -78,6 +103,18 @@ trait ShowPartitionsSuiteBase extends command.ShowPartitionsSuiteBase { } } + test("show everything more than 5 part keys") { + val table = "wideTable" + withTable(table) { + createWideTable(table) + fillWideTable(table) + checkAnswer( + sql(s"show partitions $table"), + Row("year=2016/month=3/hour=10/minute=10/sec=10/extra=1") :: + Row("year=2016/month=4/hour=10/minute=10/sec=10/extra=1") :: Nil) + } + } + test("issue exceptions on the temporary view") { val viewName = "test_view" withTempView(viewName) { diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala index ec398a55e5095..91564d4b50841 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala @@ -62,21 +62,6 @@ class HiveCommandSuite extends QueryTest with SQLTestUtils with TestHiveSingleto sql("INSERT INTO parquet_tab4 PARTITION(year = 2015, month = 2) SELECT 2, 2") sql("INSERT INTO parquet_tab4 PARTITION(year = 2016, month = 2) SELECT 3, 3") sql("INSERT INTO parquet_tab4 PARTITION(year = 2016, month = 3) SELECT 3, 3") - sql( - """ - |CREATE TABLE parquet_tab5 (price int, qty int) - |PARTITIONED BY (year int, month int, hour int, minute int, sec int, extra int) - """.stripMargin) - sql( - """ - |INSERT INTO parquet_tab5 - |PARTITION(year = 2016, month = 3, hour = 10, minute = 10, sec = 10, extra = 1) SELECT 3, 3 - """.stripMargin) - sql( - """ - |INSERT INTO parquet_tab5 - |PARTITION(year = 2016, month = 4, hour = 10, minute = 10, sec = 10, extra = 1) SELECT 3, 3 - """.stripMargin) sql("CREATE VIEW parquet_view1 as select * from parquet_tab4") } @@ -87,7 +72,6 @@ class HiveCommandSuite extends QueryTest with SQLTestUtils with TestHiveSingleto sql("DROP TABLE IF EXISTS parquet_tab3") sql("DROP VIEW IF EXISTS parquet_view1") sql("DROP TABLE IF EXISTS parquet_tab4") - sql("DROP TABLE IF EXISTS parquet_tab5") } finally { super.afterAll() } diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowPartitionsSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowPartitionsSuite.scala index 854431d64b624..7234e9d5e7845 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowPartitionsSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowPartitionsSuite.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.hive.execution.command -import org.apache.spark.sql.{AnalysisException, Row} +import org.apache.spark.sql.AnalysisException import org.apache.spark.sql.catalyst.analysis.NoSuchTableException import org.apache.spark.sql.execution.command.v1 import org.apache.spark.sql.hive.test.TestHiveSingleton @@ -28,19 +28,15 @@ class ShowPartitionsSuite extends v1.ShowPartitionsSuiteBase with TestHiveSingle override protected def createDateTable(table: String): Unit = { sql(s""" - |CREATE TABLE $table (price int, qty int) - |partitioned by (year int, month int)""".stripMargin) - sql(s"INSERT INTO $table PARTITION(year = 2015, month = 1) SELECT 1, 1") - sql(s"INSERT INTO $table PARTITION(year = 2015, month = 2) SELECT 2, 2") - sql(s"INSERT INTO $table PARTITION(year = 2016, month = 2) SELECT 3, 3") - sql(s"INSERT INTO $table PARTITION(year = 2016, month = 3) SELECT 3, 3") + |CREATE TABLE $table (price int, qty int) + |partitioned by (year int, month int)""".stripMargin) } - ignore("show partitions - show everything more than 5 part keys") { - checkAnswer( - sql("show partitions parquet_tab5"), - Row("year=2016/month=3/hour=10/minute=10/sec=10/extra=1") :: - Row("year=2016/month=4/hour=10/minute=10/sec=10/extra=1") :: Nil) + override protected def createWideTable(table: String): Unit = { + sql(s""" + |CREATE TABLE $table (price int, qty int) + |PARTITIONED BY (year int, month int, hour int, minute int, sec int, extra int) + """.stripMargin) } ignore("show partitions - empty row") { From 38d3c67869782cc7ed7470b2c2f22b96865498c0 Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Sat, 14 Nov 2020 20:05:48 +0300 Subject: [PATCH 10/17] Move the test "non-partitioning columns" --- .../execution/command/v1/ShowPartitionsSuite.scala | 14 +++++++++++++- .../execution/command/ShowPartitionsSuite.scala | 5 ----- 2 files changed, 13 insertions(+), 6 deletions(-) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowPartitionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowPartitionsSuite.scala index b6c29363d3b15..a85c29a76cde4 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowPartitionsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowPartitionsSuite.scala @@ -17,7 +17,7 @@ package org.apache.spark.sql.execution.command.v1 -import org.apache.spark.sql.{Row, SaveMode} +import org.apache.spark.sql.{AnalysisException, Row, SaveMode} import org.apache.spark.sql.catalyst.analysis.NoSuchTableException import org.apache.spark.sql.connector.catalog.CatalogManager import org.apache.spark.sql.execution.command @@ -115,6 +115,18 @@ trait ShowPartitionsSuiteBase extends command.ShowPartitionsSuiteBase { } } + test("non-partitioning columns") { + val table = "dateTable" + withTable(table) { + createDateTable(table) + fillDateTable(table) + val errMsg = intercept[AnalysisException] { + sql(s"SHOW PARTITIONS $table PARTITION(abcd=2015, xyz=1)") + }.getMessage + assert(errMsg.contains("Non-partitioning column(s) [abcd, xyz] are specified")) + } + } + test("issue exceptions on the temporary view") { val viewName = "test_view" withTempView(viewName) { diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowPartitionsSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowPartitionsSuite.scala index 7234e9d5e7845..d5f4a11fab8a5 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowPartitionsSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowPartitionsSuite.scala @@ -56,11 +56,6 @@ class ShowPartitionsSuite extends v1.ShowPartitionsSuiteBase with TestHiveSingle }.getMessage assert(message1.contains("not allowed on a table that is not partitioned")) - val message2 = intercept[AnalysisException] { - sql("SHOW PARTITIONS parquet_tab4 PARTITION(abcd=2015, xyz=1)") - }.getMessage - assert(message2.contains("Non-partitioning column(s) [abcd, xyz] are specified")) - val message3 = intercept[AnalysisException] { sql("SHOW PARTITIONS parquet_view1") }.getMessage From f86f1599eb4c75eaa19d4b6621aba0dc97650611 Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Sat, 14 Nov 2020 20:14:21 +0300 Subject: [PATCH 11/17] Fix "show partitions of not partitioned table" --- .../execution/command/v1/ShowPartitionsSuite.scala | 11 +++++++++++ .../spark/sql/hive/execution/HiveCommandSuite.scala | 2 -- .../hive/execution/command/ShowPartitionsSuite.scala | 5 ----- 3 files changed, 11 insertions(+), 7 deletions(-) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowPartitionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowPartitionsSuite.scala index a85c29a76cde4..f7a53b0b8447e 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowPartitionsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowPartitionsSuite.scala @@ -127,6 +127,17 @@ trait ShowPartitionsSuiteBase extends command.ShowPartitionsSuiteBase { } } + test("show partitions of not partitioned table") { + val table = "not_partitioned_table" + withTable(table) { + sql(s"CREATE TABLE $table (col1 int) $defaultUsing") + val errMsg = intercept[AnalysisException] { + sql(s"SHOW PARTITIONS $table") + }.getMessage + assert(errMsg.contains("not allowed on a table that is not partitioned")) + } + } + test("issue exceptions on the temporary view") { val viewName = "test_view" withTempView(viewName) { diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala index 91564d4b50841..d3398842afb21 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala @@ -56,7 +56,6 @@ class HiveCommandSuite extends QueryTest with SQLTestUtils with TestHiveSingleto |STORED AS PARQUET |TBLPROPERTIES('prop1Key'="prop1Val", '`prop2Key`'="prop2Val") """.stripMargin) - sql("CREATE TABLE parquet_tab3(col1 int, `col 2` int) USING hive") sql("CREATE TABLE parquet_tab4 (price int, qty int) partitioned by (year int, month int)") sql("INSERT INTO parquet_tab4 PARTITION(year = 2015, month = 1) SELECT 1, 1") sql("INSERT INTO parquet_tab4 PARTITION(year = 2015, month = 2) SELECT 2, 2") @@ -69,7 +68,6 @@ class HiveCommandSuite extends QueryTest with SQLTestUtils with TestHiveSingleto try { sql("DROP TABLE IF EXISTS parquet_tab1") sql("DROP TABLE IF EXISTS parquet_tab2") - sql("DROP TABLE IF EXISTS parquet_tab3") sql("DROP VIEW IF EXISTS parquet_view1") sql("DROP TABLE IF EXISTS parquet_tab4") } finally { diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowPartitionsSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowPartitionsSuite.scala index d5f4a11fab8a5..542d88871ac91 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowPartitionsSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowPartitionsSuite.scala @@ -51,11 +51,6 @@ class ShowPartitionsSuite extends v1.ShowPartitionsSuiteBase with TestHiveSingle sql("SHOW PARTITIONS parquet_temp") } - val message1 = intercept[AnalysisException] { - sql("SHOW PARTITIONS parquet_tab3") - }.getMessage - assert(message1.contains("not allowed on a table that is not partitioned")) - val message3 = intercept[AnalysisException] { sql("SHOW PARTITIONS parquet_view1") }.getMessage From 76e6399fda4df9b1f0e6278d30850ad829f4afa9 Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Sat, 14 Nov 2020 20:25:27 +0300 Subject: [PATCH 12/17] Move the test "show partitions of a view" --- .../command/v1/ShowPartitionsSuite.scala | 15 +++++++++++++++ .../execution/command/ShowPartitionsSuite.scala | 5 ----- 2 files changed, 15 insertions(+), 5 deletions(-) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowPartitionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowPartitionsSuite.scala index f7a53b0b8447e..2db4d19b89b5e 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowPartitionsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowPartitionsSuite.scala @@ -138,6 +138,21 @@ trait ShowPartitionsSuiteBase extends command.ShowPartitionsSuiteBase { } } + test("show partitions of a view") { + val table = "dateTable" + withTable(table) { + createDateTable(table) + val view = "view1" + withView(view) { + sql(s"CREATE VIEW $view as select * from $table") + val errMsg = intercept[AnalysisException] { + sql(s"SHOW PARTITIONS $view") + }.getMessage + assert(errMsg.contains("is not allowed on a view")) + } + } + } + test("issue exceptions on the temporary view") { val viewName = "test_view" withTempView(viewName) { diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowPartitionsSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowPartitionsSuite.scala index 542d88871ac91..7cf044bea1747 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowPartitionsSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowPartitionsSuite.scala @@ -50,11 +50,6 @@ class ShowPartitionsSuite extends v1.ShowPartitionsSuiteBase with TestHiveSingle intercept[NoSuchTableException] { sql("SHOW PARTITIONS parquet_temp") } - - val message3 = intercept[AnalysisException] { - sql("SHOW PARTITIONS parquet_view1") - }.getMessage - assert(message3.contains("is not allowed on a view")) } } } From 8707d3ee3c744de47451c190e6023159f0babab3 Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Sat, 14 Nov 2020 20:39:50 +0300 Subject: [PATCH 13/17] Fix v1/ShowPartitionsSuite --- .../command/v1/ShowPartitionsSuite.scala | 18 +++++++++++++++++- .../command/ShowPartitionsSuite.scala | 16 ---------------- 2 files changed, 17 insertions(+), 17 deletions(-) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowPartitionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowPartitionsSuite.scala index 2db4d19b89b5e..966033c57e4b1 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowPartitionsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowPartitionsSuite.scala @@ -153,7 +153,7 @@ trait ShowPartitionsSuiteBase extends command.ShowPartitionsSuiteBase { } } - test("issue exceptions on the temporary view") { + test("show partitions of a temporary view") { val viewName = "test_view" withTempView(viewName) { spark.range(10).createTempView(viewName) @@ -166,6 +166,22 @@ trait ShowPartitionsSuiteBase extends command.ShowPartitionsSuiteBase { } class ShowPartitionsSuite extends ShowPartitionsSuiteBase with SharedSparkSession { + // The test is placed here because it fails with `USING HIVE`: + // org.apache.spark.sql.AnalysisException: + // Hive data source can only be used with tables, you can't use it with CREATE TEMP VIEW USING + test("issue exceptions on the temporary view") { + val viewName = "test_view" + withTempView(viewName) { + sql(s""" + |CREATE TEMPORARY VIEW $viewName (c1 INT, c2 STRING) + |$defaultUsing""".stripMargin) + val errMsg = intercept[NoSuchTableException] { + sql(s"SHOW PARTITIONS $viewName") + }.getMessage + assert(errMsg.contains(s"Table or view '$viewName' not found")) + } + } + test("show partitions from a datasource") { import testImplicits._ withTable("part_datasrc") { diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowPartitionsSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowPartitionsSuite.scala index 7cf044bea1747..53d4fe109e5c8 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowPartitionsSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowPartitionsSuite.scala @@ -17,8 +17,6 @@ package org.apache.spark.sql.hive.execution.command -import org.apache.spark.sql.AnalysisException -import org.apache.spark.sql.catalyst.analysis.NoSuchTableException import org.apache.spark.sql.execution.command.v1 import org.apache.spark.sql.hive.test.TestHiveSingleton @@ -38,18 +36,4 @@ class ShowPartitionsSuite extends v1.ShowPartitionsSuiteBase with TestHiveSingle |PARTITIONED BY (year int, month int, hour int, minute int, sec int, extra int) """.stripMargin) } - - ignore("show partitions - empty row") { - withTempView("parquet_temp") { - sql( - """ - |CREATE TEMPORARY VIEW parquet_temp (c1 INT, c2 STRING) - |USING org.apache.spark.sql.parquet.DefaultSource - """.stripMargin) - // An empty sequence of row is returned for session temporary table. - intercept[NoSuchTableException] { - sql("SHOW PARTITIONS parquet_temp") - } - } - } } From 8fe17a0ae9b4708f0034e0ae32bb5a024bad2d92 Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Sat, 14 Nov 2020 20:52:50 +0300 Subject: [PATCH 14/17] Add TODO --- .../spark/sql/execution/command/v2/ShowPartitionsSuite.scala | 1 + 1 file changed, 1 insertion(+) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ShowPartitionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ShowPartitionsSuite.scala index 296f5a9e09871..8a63cd49e89e9 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ShowPartitionsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ShowPartitionsSuite.scala @@ -32,6 +32,7 @@ class ShowPartitionsSuite extends command.ShowPartitionsSuiteBase with SharedSpa override def sparkConf: SparkConf = super.sparkConf .set(s"spark.sql.catalog.$catalog", classOf[InMemoryTableCatalog].getName) + // TODO(SPARK-33452): Create a V2 SHOW PARTITIONS execution node test("not supported SHOW PARTITIONS") { def testV1Command(sqlCommand: String, sqlParams: String): Unit = { val e = intercept[AnalysisException] { From cd04107f7687f37a158e56730883bab5c828af20 Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Mon, 16 Nov 2020 12:13:18 +0300 Subject: [PATCH 15/17] not partitioned -> non-partitioned --- .../spark/sql/execution/command/v1/ShowPartitionsSuite.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowPartitionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowPartitionsSuite.scala index 966033c57e4b1..7ea909f3f6f1d 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowPartitionsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowPartitionsSuite.scala @@ -127,7 +127,7 @@ trait ShowPartitionsSuiteBase extends command.ShowPartitionsSuiteBase { } } - test("show partitions of not partitioned table") { + test("show partitions of non-partitioned table") { val table = "not_partitioned_table" withTable(table) { sql(s"CREATE TABLE $table (col1 int) $defaultUsing") From 47925f23f00f34d3c78b691077a8f7acbd57ad47 Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Mon, 16 Nov 2020 12:27:25 +0300 Subject: [PATCH 16/17] Don't override table creation in Hive: USING HIVE --- .../command/v1/ShowPartitionsSuite.scala | 44 +++++++------------ .../command/ShowPartitionsSuite.scala | 13 ------ 2 files changed, 15 insertions(+), 42 deletions(-) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowPartitionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowPartitionsSuite.scala index 7ea909f3f6f1d..bcc71e9b7241c 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowPartitionsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v1/ShowPartitionsSuite.scala @@ -29,45 +29,21 @@ trait ShowPartitionsSuiteBase extends command.ShowPartitionsSuiteBase { override def defaultNamespace: Seq[String] = Seq("default") override def defaultUsing: String = "USING parquet" - protected def createDateTable(table: String): Unit = { + private def createDateTable(table: String): Unit = { sql(s""" |CREATE TABLE $table (price int, qty int, year int, month int) |$defaultUsing |partitioned by (year, month)""".stripMargin) - } - - protected def fillDateTable(table: String): Unit = { sql(s"INSERT INTO $table PARTITION(year = 2015, month = 1) SELECT 1, 1") sql(s"INSERT INTO $table PARTITION(year = 2015, month = 2) SELECT 2, 2") sql(s"INSERT INTO $table PARTITION(year = 2016, month = 2) SELECT 3, 3") sql(s"INSERT INTO $table PARTITION(year = 2016, month = 3) SELECT 3, 3") } - protected def createWideTable(table: String): Unit = { - sql(s""" - |CREATE TABLE $table ( - | price int, qty int, - | year int, month int, hour int, minute int, sec int, extra int) - |$defaultUsing - |PARTITIONED BY (year, month, hour, minute, sec, extra)""".stripMargin) - } - - protected def fillWideTable(table: String): Unit = { - sql(s""" - |INSERT INTO $table - |PARTITION(year = 2016, month = 3, hour = 10, minute = 10, sec = 10, extra = 1) SELECT 3, 3 - """.stripMargin) - sql(s""" - |INSERT INTO $table - |PARTITION(year = 2016, month = 4, hour = 10, minute = 10, sec = 10, extra = 1) SELECT 3, 3 - """.stripMargin) - } - test("show everything") { val table = "dateTable" withTable(table) { createDateTable(table) - fillDateTable(table) checkAnswer( sql(s"show partitions $table"), Row("year=2015/month=1") :: @@ -88,7 +64,6 @@ trait ShowPartitionsSuiteBase extends command.ShowPartitionsSuiteBase { val table = "dateTable" withTable(table) { createDateTable(table) - fillDateTable(table) checkAnswer( sql(s"show partitions default.$table PARTITION(year=2015)"), Row("year=2015/month=1") :: @@ -106,8 +81,20 @@ trait ShowPartitionsSuiteBase extends command.ShowPartitionsSuiteBase { test("show everything more than 5 part keys") { val table = "wideTable" withTable(table) { - createWideTable(table) - fillWideTable(table) + sql(s""" + |CREATE TABLE $table ( + | price int, qty int, + | year int, month int, hour int, minute int, sec int, extra int) + |$defaultUsing + |PARTITIONED BY (year, month, hour, minute, sec, extra)""".stripMargin) + sql(s""" + |INSERT INTO $table + |PARTITION(year = 2016, month = 3, hour = 10, minute = 10, sec = 10, extra = 1) SELECT 3, 3 + """.stripMargin) + sql(s""" + |INSERT INTO $table + |PARTITION(year = 2016, month = 4, hour = 10, minute = 10, sec = 10, extra = 1) SELECT 3, 3 + """.stripMargin) checkAnswer( sql(s"show partitions $table"), Row("year=2016/month=3/hour=10/minute=10/sec=10/extra=1") :: @@ -119,7 +106,6 @@ trait ShowPartitionsSuiteBase extends command.ShowPartitionsSuiteBase { val table = "dateTable" withTable(table) { createDateTable(table) - fillDateTable(table) val errMsg = intercept[AnalysisException] { sql(s"SHOW PARTITIONS $table PARTITION(abcd=2015, xyz=1)") }.getMessage diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowPartitionsSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowPartitionsSuite.scala index 53d4fe109e5c8..a92478faf0e16 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowPartitionsSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/command/ShowPartitionsSuite.scala @@ -23,17 +23,4 @@ import org.apache.spark.sql.hive.test.TestHiveSingleton class ShowPartitionsSuite extends v1.ShowPartitionsSuiteBase with TestHiveSingleton { override def version: String = "Hive V1" override def defaultUsing: String = "USING HIVE" - - override protected def createDateTable(table: String): Unit = { - sql(s""" - |CREATE TABLE $table (price int, qty int) - |partitioned by (year int, month int)""".stripMargin) - } - - override protected def createWideTable(table: String): Unit = { - sql(s""" - |CREATE TABLE $table (price int, qty int) - |PARTITIONED BY (year int, month int, hour int, minute int, sec int, extra int) - """.stripMargin) - } } From 59f2b389c70741844d140d8fd8efbcb120676670 Mon Sep 17 00:00:00 2001 From: Max Gekk Date: Mon, 16 Nov 2020 13:05:30 +0300 Subject: [PATCH 17/17] Fix ShowPartitionsParserSuite --- .../sql/execution/command/ShowPartitionsParserSuite.scala | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowPartitionsParserSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowPartitionsParserSuite.scala index b554197b4a009..bc75528b9644c 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowPartitionsParserSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ShowPartitionsParserSuite.scala @@ -22,7 +22,6 @@ import org.apache.spark.sql.catalyst.parser.CatalystSqlParser.parsePlan import org.apache.spark.sql.catalyst.parser.ParseException import org.apache.spark.sql.catalyst.plans.logical.ShowPartitionsStatement import org.apache.spark.sql.execution.SparkSqlParser -import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.test.SharedSparkSession class ShowPartitionsParserSuite extends AnalysisTest with SharedSparkSession { @@ -44,9 +43,8 @@ class ShowPartitionsParserSuite extends AnalysisTest with SharedSparkSession { } test("empty values in non-optional partition specs") { - val parser = new SparkSqlParser(new SQLConf) val e = intercept[ParseException] { - parser.parsePlan( + new SparkSqlParser().parsePlan( "SHOW PARTITIONS dbx.tab1 PARTITION (a='1', b)") }.getMessage assert(e.contains("Found an empty partition key 'b'"))