From a031c47e3a73d430b4ed6efcb8bf5edfc5d33705 Mon Sep 17 00:00:00 2001 From: Gengliang Wang Date: Thu, 18 Apr 2019 19:49:27 +0800 Subject: [PATCH 1/2] add HiveClientVersions --- project/SparkBuild.scala | 1 + .../apache/spark/sql/hive/client/HiveClientSuite.scala | 8 +++++--- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala index 656071d3d78c5..f55f187cee5da 100644 --- a/project/SparkBuild.scala +++ b/project/SparkBuild.scala @@ -466,6 +466,7 @@ object SparkParallelTestGrouping { "org.apache.spark.sql.hive.StatisticsSuite", "org.apache.spark.sql.hive.execution.HiveCompatibilitySuite", "org.apache.spark.sql.hive.client.VersionsSuite", + "org.apache.spark.sql.hive.client.HiveClientVersions", "org.apache.spark.sql.hive.HiveExternalCatalogVersionsSuite", "org.apache.spark.ml.classification.LogisticRegressionSuite", "org.apache.spark.ml.classification.LinearSVCSuite", diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/HiveClientSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/HiveClientSuite.scala index f3d8c2ad440ff..57a02aa5e0c50 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/HiveClientSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/HiveClientSuite.scala @@ -29,6 +29,7 @@ import org.apache.spark.sql.catalyst.catalog._ import org.apache.spark.sql.catalyst.dsl.expressions._ import org.apache.spark.sql.catalyst.expressions._ import org.apache.spark.sql.types.{BooleanType, IntegerType, LongType, StructType} +import org.apache.spark.util.Utils // TODO: Refactor this to `HivePartitionFilteringSuite` class HiveClientSuite(version: String) @@ -39,8 +40,9 @@ class HiveClientSuite(version: String) private val testPartitionCount = 3 * 5 * 4 private def init(tryDirectSql: Boolean): HiveClient = { + val location = Some(Utils.createTempDir().toURI) val storageFormat = CatalogStorageFormat( - locationUri = None, + locationUri = location, inputFormat = None, outputFormat = None, serde = None, @@ -54,11 +56,11 @@ class HiveClientSuite(version: String) new StructType().add("value", "int").add("ds", "int").add("h", "int").add("chunk", "string") val table = CatalogTable( identifier = TableIdentifier("test", Some("default")), - tableType = CatalogTableType.MANAGED, + tableType = CatalogTableType.EXTERNAL, schema = tableSchema, partitionColumnNames = Seq("ds", "h", "chunk"), storage = CatalogStorageFormat( - locationUri = None, + locationUri = location, inputFormat = Some(classOf[TextInputFormat].getName), outputFormat = Some(classOf[HiveIgnoreKeyTextOutputFormat[_, _]].getName), serde = Some(classOf[LazySimpleSerDe].getName()), From dd5747033b9871b03ff786193f98a13e29268f76 Mon Sep 17 00:00:00 2001 From: Gengliang Wang Date: Thu, 18 Apr 2019 22:11:04 +0800 Subject: [PATCH 2/2] set hive.metastore.warehouse.dir --- .../apache/spark/sql/hive/client/HiveClientSuite.scala | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/HiveClientSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/HiveClientSuite.scala index 57a02aa5e0c50..bda711200acdb 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/HiveClientSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/client/HiveClientSuite.scala @@ -40,9 +40,8 @@ class HiveClientSuite(version: String) private val testPartitionCount = 3 * 5 * 4 private def init(tryDirectSql: Boolean): HiveClient = { - val location = Some(Utils.createTempDir().toURI) val storageFormat = CatalogStorageFormat( - locationUri = location, + locationUri = None, inputFormat = None, outputFormat = None, serde = None, @@ -51,16 +50,17 @@ class HiveClientSuite(version: String) val hadoopConf = new Configuration() hadoopConf.setBoolean(tryDirectSqlKey, tryDirectSql) + hadoopConf.set("hive.metastore.warehouse.dir", Utils.createTempDir().toURI().toString()) val client = buildClient(hadoopConf) val tableSchema = new StructType().add("value", "int").add("ds", "int").add("h", "int").add("chunk", "string") val table = CatalogTable( identifier = TableIdentifier("test", Some("default")), - tableType = CatalogTableType.EXTERNAL, + tableType = CatalogTableType.MANAGED, schema = tableSchema, partitionColumnNames = Seq("ds", "h", "chunk"), storage = CatalogStorageFormat( - locationUri = location, + locationUri = None, inputFormat = Some(classOf[TextInputFormat].getName), outputFormat = Some(classOf[HiveIgnoreKeyTextOutputFormat[_, _]].getName), serde = Some(classOf[LazySimpleSerDe].getName()),