From 03ded0479524a24024c1e70c8ce0389d2212aee6 Mon Sep 17 00:00:00 2001 From: Wenchen Fan Date: Thu, 1 Sep 2016 16:29:50 +0800 Subject: [PATCH 1/4] separate the management of temp views and metastore tables/views in SessionCatalog --- .../analysis/AlreadyExistException.scala | 4 +- .../sql/catalyst/catalog/SessionCatalog.scala | 288 ++++++++---------- .../catalyst/catalog/TempViewManager.scala | 92 ++++++ .../catalog/SessionCatalogSuite.scala | 88 ++---- .../spark/sql/execution/command/ddl.scala | 60 ++-- .../spark/sql/execution/command/tables.scala | 128 ++++---- .../spark/sql/execution/command/views.scala | 66 ++-- .../spark/sql/execution/datasources/ddl.scala | 2 +- .../spark/sql/internal/CatalogImpl.scala | 21 +- .../apache/spark/sql/SQLContextSuite.scala | 6 +- .../org/apache/spark/sql/SQLQuerySuite.scala | 11 + .../sql/execution/command/DDLSuite.scala | 24 +- .../spark/sql/internal/CatalogSuite.scala | 13 +- .../sql/test/DataFrameReaderWriterSuite.scala | 11 + .../spark/sql/hive/HiveSessionCatalog.scala | 18 +- .../apache/spark/sql/hive/test/TestHive.scala | 2 +- .../sql/hive/execution/HiveCommandSuite.scala | 25 +- .../sql/hive/execution/HiveDDLSuite.scala | 9 +- .../sql/hive/execution/SQLViewSuite.scala | 12 +- .../sql/sources/HadoopFsRelationTest.scala | 18 +- 20 files changed, 481 insertions(+), 417 deletions(-) create mode 100644 sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/TempViewManager.scala diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/AlreadyExistException.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/AlreadyExistException.scala index ec56fe7729c2a..bf7e3a29c55ff 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/AlreadyExistException.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/AlreadyExistException.scala @@ -30,8 +30,8 @@ class DatabaseAlreadyExistsException(db: String) class TableAlreadyExistsException(db: String, table: String) extends AnalysisException(s"Table or view '$table' already exists in database '$db'") -class TempTableAlreadyExistsException(table: String) - extends AnalysisException(s"Temporary table '$table' already exists") +class TempViewAlreadyExistsException(name: String) + extends AnalysisException(s"Temporary view '$name' already exists") class PartitionAlreadyExistsException(db: String, table: String, spec: TablePartitionSpec) extends AnalysisException( diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala index 9fb5db573b70f..a76a15a36885b 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala @@ -72,9 +72,7 @@ class SessionCatalog( this(externalCatalog, new SimpleFunctionRegistry, new SimpleCatalystConf(true)) } - /** List of temporary tables, mapping from table name to their logical plan. */ - @GuardedBy("this") - protected val tempTables = new mutable.HashMap[String, LogicalPlan] + private val tempViews = new TempViewManager // Note: we track current database here because certain operations do not explicitly // specify the database (e.g. DROP TABLE my_table). In these cases we must first @@ -124,14 +122,14 @@ class SessionCatalog( private def requireTableExists(name: TableIdentifier): Unit = { if (!tableExists(name)) { - val db = name.database.getOrElse(currentDb) + val db = name.database.getOrElse(getCurrentDatabase) throw new NoSuchTableException(db = db, table = name.table) } } private def requireTableNotExists(name: TableIdentifier): Unit = { if (tableExists(name)) { - val db = name.database.getOrElse(currentDb) + val db = name.database.getOrElse(getCurrentDatabase) throw new TableAlreadyExistsException(db = db, table = name.table) } } @@ -201,21 +199,23 @@ class SessionCatalog( new Path(new Path(conf.warehousePath), database + ".db").toString } - // ---------------------------------------------------------------------------- - // Tables - // ---------------------------------------------------------------------------- - // There are two kinds of tables, temporary tables and metastore tables. - // Temporary tables are isolated across sessions and do not belong to any - // particular database. Metastore tables can be used across multiple - // sessions as their metadata is persisted in the underlying catalog. - // ---------------------------------------------------------------------------- + // -------------------------------------------------------------------------------- + // Tables/Views + // -------------------------------------------------------------------------------- + // There are two kinds of tables/views, temporary views and metastore tables/views. + // Temporary views are isolated across sessions and do not belong to any particular + // database. Metastore tables/views can be used across multiple sessions as their + // metadata is persisted in the underlying catalog. + // + // Note that, temporary tables are not supported currently. + // -------------------------------------------------------------------------------- - // ---------------------------------------------------- - // | Methods that interact with metastore tables only | - // ---------------------------------------------------- + // ---------------------------------------------------------- + // | Methods that interact with metastore tables/views only | + // ---------------------------------------------------------- /** - * Create a metastore table in the database specified in `tableDefinition`. + * Create a metastore table/view in the database specified in `tableDefinition`. * If no such database is specified, create it in the current database. */ def createTable(tableDefinition: CatalogTable, ignoreIfExists: Boolean): Unit = { @@ -227,7 +227,7 @@ class SessionCatalog( } /** - * Alter the metadata of an existing metastore table identified by `tableDefinition`. + * Alter the metadata of an existing metastore table/view identified by `tableDefinition`. * * If no database is specified in `tableDefinition`, assume the table is in the * current database. @@ -246,33 +246,23 @@ class SessionCatalog( } /** - * Retrieve the metadata of an existing metastore table. - * If no database is specified, assume the table is in the current database. - * If the specified table is not found in the database then a [[NoSuchTableException]] is thrown. + * Retrieve the metadata of an existing metastore table/view. + * If no database is specified, assume the table/view is in the current database. + * If the specified table/view is not found in the database then a [[NoSuchTableException]] is + * thrown. */ def getTableMetadata(name: TableIdentifier): CatalogTable = { val db = formatDatabaseName(name.database.getOrElse(getCurrentDatabase)) val table = formatTableName(name.table) - val tid = TableIdentifier(table) - if (isTemporaryTable(name)) { - CatalogTable( - identifier = tid, - tableType = CatalogTableType.VIEW, - storage = CatalogStorageFormat.empty, - schema = tempTables(table).output.toStructType, - properties = Map(), - viewText = None) - } else { - requireDbExists(db) - requireTableExists(TableIdentifier(table, Some(db))) - externalCatalog.getTable(db, table) - } + requireDbExists(db) + requireTableExists(TableIdentifier(table, Some(db))) + externalCatalog.getTable(db, table) } /** - * Retrieve the metadata of an existing metastore table. + * Retrieve the metadata of an existing metastore table/view. * If no database is specified, assume the table is in the current database. - * If the specified table is not found in the database then return None if it doesn't exist. + * If the specified table/view is not found in the database then return None if it doesn't exist. */ def getTableMetadataOption(name: TableIdentifier): Option[CatalogTable] = { val db = formatDatabaseName(name.database.getOrElse(getCurrentDatabase)) @@ -281,6 +271,46 @@ class SessionCatalog( externalCatalog.getTableOption(db, table) } + /** + * Return whether a table/view with the specified name exists in metastore. + */ + def tableExists(name: TableIdentifier): Boolean = { + val db = formatDatabaseName(name.database.getOrElse(getCurrentDatabase)) + val table = formatTableName(name.table) + externalCatalog.tableExists(db, table) + } + + /** + * Rename a table/view in metastore. + */ + def renameTable(oldName: TableIdentifier, newName: String): Unit = { + val db = formatDatabaseName(oldName.database.getOrElse(getCurrentDatabase)) + requireDbExists(db) + val oldTableName = formatTableName(oldName.table) + val newTableName = formatTableName(newName) + requireTableExists(TableIdentifier(oldTableName, Some(db))) + requireTableNotExists(TableIdentifier(newTableName, Some(db))) + externalCatalog.renameTable(db, oldTableName, newTableName) + } + + /** + * Drop a table/view in metastore. + */ + def dropTable( + name: TableIdentifier, + ignoreIfNotExists: Boolean, + purge: Boolean): Unit = { + val db = formatDatabaseName(name.database.getOrElse(getCurrentDatabase)) + val table = formatTableName(name.table) + requireDbExists(db) + // When ignoreIfNotExists is false, no exception is issued when the table does not exist. + if (externalCatalog.tableExists(db, table)) { + externalCatalog.dropTable(db, table, ignoreIfNotExists = true, purge = purge) + } else if (!ignoreIfNotExists) { + throw new NoSuchTableException(db = db, table = table) + } + } + /** * Load files stored in given path into an existing metastore table. * If no database is specified, assume the table is in the current database. @@ -325,150 +355,105 @@ class SessionCatalog( new Path(new Path(dbLocation), formatTableName(tableIdent.table)).toString } - // ------------------------------------------------------------- - // | Methods that interact with temporary and metastore tables | - // ------------------------------------------------------------- + // ---------------------------------------------- + // | Methods that interact with temporary views | + // ---------------------------------------------- /** - * Create a temporary table. + * Create a temporary view. */ def createTempView( name: String, - tableDefinition: LogicalPlan, - overrideIfExists: Boolean): Unit = synchronized { + viewDefinition: LogicalPlan, + overrideIfExists: Boolean): Unit = { val table = formatTableName(name) - if (tempTables.contains(table) && !overrideIfExists) { - throw new TempTableAlreadyExistsException(name) - } - tempTables.put(table, tableDefinition) + tempViews.create(table, viewDefinition, overrideIfExists) } /** - * Rename a table. - * - * If a database is specified in `oldName`, this will rename the table in that database. - * If no database is specified, this will first attempt to rename a temporary table with - * the same name, then, if that does not exist, rename the table in the current database. + * Rename a temporary view, and returns true if it succeeds, false otherwise. */ - def renameTable(oldName: TableIdentifier, newName: String): Unit = synchronized { - val db = formatDatabaseName(oldName.database.getOrElse(currentDb)) - requireDbExists(db) - val oldTableName = formatTableName(oldName.table) - val newTableName = formatTableName(newName) - if (oldName.database.isDefined || !tempTables.contains(oldTableName)) { - requireTableExists(TableIdentifier(oldTableName, Some(db))) - requireTableNotExists(TableIdentifier(newTableName, Some(db))) - externalCatalog.renameTable(db, oldTableName, newTableName) - } else { - if (tempTables.contains(newTableName)) { - throw new AnalysisException( - s"RENAME TEMPORARY TABLE from '$oldName' to '$newName': destination table already exists") - } - val table = tempTables(oldTableName) - tempTables.remove(oldTableName) - tempTables.put(newTableName, table) - } + def renameTempView(oldName: String, newName: String): Boolean = { + val oldViewName = formatTableName(oldName) + val newViewName = formatTableName(newName) + tempViews.rename(oldViewName, newViewName) } /** - * Drop a table. - * - * If a database is specified in `name`, this will drop the table from that database. - * If no database is specified, this will first attempt to drop a temporary table with - * the same name, then, if that does not exist, drop the table from the current database. + * Drop a temporary view, and returns true if it succeeds, false otherwise. */ - def dropTable( - name: TableIdentifier, - ignoreIfNotExists: Boolean, - purge: Boolean): Unit = synchronized { - val db = formatDatabaseName(name.database.getOrElse(currentDb)) - val table = formatTableName(name.table) - if (name.database.isDefined || !tempTables.contains(table)) { - requireDbExists(db) - // When ignoreIfNotExists is false, no exception is issued when the table does not exist. - // Instead, log it as an error message. - if (tableExists(TableIdentifier(table, Option(db)))) { - externalCatalog.dropTable(db, table, ignoreIfNotExists = true, purge = purge) - } else if (!ignoreIfNotExists) { - throw new NoSuchTableException(db = db, table = table) - } - } else { - tempTables.remove(table) - } + def dropTempView(name: String): Boolean = { + tempViews.remove(formatTableName(name)) + } + + /** + * Alter the definition of an existing temporary view identified by name. + * Returns true if the temp view exists and be updated, false otherwise. + */ + def alterTempView( + name: String, + viewDefinition: LogicalPlan): Boolean = { + val viewName = formatTableName(name) + tempViews.update(viewName, viewDefinition) } + // ------------------------------------------------------------------------- + // | Methods that interact with temporary views and metastore tables/views | + // ------------------------------------------------------------------------- + /** - * Return a [[LogicalPlan]] that represents the given table or view. + * Return a [[LogicalPlan]] that represents the given table/view. * * If a database is specified in `name`, this will return the table/view from that database. - * If no database is specified, this will first attempt to return a temporary table/view with + * If no database is specified, this will first attempt to return a temporary view with * the same name, then, if that does not exist, return the table/view from the current database. * * If the relation is a view, the relation will be wrapped in a [[SubqueryAlias]] which will * track the name of the view. */ def lookupRelation(name: TableIdentifier, alias: Option[String] = None): LogicalPlan = { - synchronized { - val db = formatDatabaseName(name.database.getOrElse(currentDb)) - val table = formatTableName(name.table) - val relationAlias = alias.getOrElse(table) - if (name.database.isDefined || !tempTables.contains(table)) { - val metadata = externalCatalog.getTable(db, table) - val view = Option(metadata.tableType).collect { - case CatalogTableType.VIEW => name - } - SubqueryAlias(relationAlias, SimpleCatalogRelation(db, metadata), view) + val db = formatDatabaseName(name.database.getOrElse(getCurrentDatabase)) + val table = formatTableName(name.table) + + if (name.database.isDefined) { + lookupMetastoreRelation(db, table, alias) + } else { + val maybeTempView = tempViews.get(table) + if (maybeTempView.isDefined) { + SubqueryAlias(alias.getOrElse(table), maybeTempView.get, Some(name)) } else { - SubqueryAlias(relationAlias, tempTables(table), Option(name)) + lookupMetastoreRelation(db, table, alias) } } } - /** - * Return whether a table/view with the specified name exists. - * - * Note: If a database is explicitly specified, then this will return whether the table/view - * exists in that particular database instead. In that case, even if there is a temporary - * table with the same name, we will return false if the specified database does not - * contain the table/view. - */ - def tableExists(name: TableIdentifier): Boolean = synchronized { - val db = formatDatabaseName(name.database.getOrElse(currentDb)) - val table = formatTableName(name.table) - if (isTemporaryTable(name)) { - true - } else { - externalCatalog.tableExists(db, table) + protected def lookupMetastoreRelation( + db: String, + table: String, + alias: Option[String]): LogicalPlan = { + val metadata = externalCatalog.getTable(db, table) + val view = Option(metadata.tableType).collect { + case CatalogTableType.VIEW => TableIdentifier(table, Some(db)) } + SubqueryAlias(alias.getOrElse(table), SimpleCatalogRelation(db, metadata), view) } /** - * Return whether a table with the specified name is a temporary table. - * - * Note: The temporary table cache is checked only when database is not - * explicitly specified. - */ - def isTemporaryTable(name: TableIdentifier): Boolean = synchronized { - name.database.isEmpty && tempTables.contains(formatTableName(name.table)) - } - - /** - * List all tables in the specified database, including temporary tables. + * List all tables/views in the specified database, including temporary views. */ def listTables(db: String): Seq[TableIdentifier] = listTables(db, "*") /** - * List all matching tables in the specified database, including temporary tables. + * List all matching tables/views in the specified database, including temporary views. */ def listTables(db: String, pattern: String): Seq[TableIdentifier] = { val dbName = formatDatabaseName(db) requireDbExists(dbName) - val dbTables = - externalCatalog.listTables(dbName, pattern).map { t => TableIdentifier(t, Some(dbName)) } - synchronized { - val _tempTables = StringUtils.filterPattern(tempTables.keys.toSeq, pattern) - .map { t => TableIdentifier(t) } - dbTables ++ _tempTables + val metastoreTables = externalCatalog.listTables(dbName, pattern).map { t => + TableIdentifier(t, Some(dbName)) + } + metastoreTables ++ tempViews.listNames(pattern).map { t => + TableIdentifier(t, None) } } @@ -476,29 +461,24 @@ class SessionCatalog( * Refresh the cache entry for a metastore table, if any. */ def refreshTable(name: TableIdentifier): Unit = { - // Go through temporary tables and invalidate them. - // If the database is defined, this is definitely not a temp table. - // If the database is not defined, there is a good chance this is a temp table. + // Go through temporary views and invalidate them. + // If the database is defined, this is definitely not a temp view. + // If the database is not defined, there is a good chance this is a temp view. if (name.database.isEmpty) { - tempTables.get(formatTableName(name.table)).foreach(_.refresh()) + tempViews.get(formatTableName(name.table)).foreach(_.refresh()) } } /** - * Drop all existing temporary tables. + * Drop all existing temporary views. * For testing only. */ - def clearTempTables(): Unit = synchronized { - tempTables.clear() - } + def clearTempViews(): Unit = tempViews.clear() /** - * Return a temporary table exactly as it was stored. - * For testing only. + * Return a temporary view exactly as it was stored. */ - private[catalog] def getTempTable(name: String): Option[LogicalPlan] = synchronized { - tempTables.get(formatTableName(name)) - } + def getTempView(name: String): Option[LogicalPlan] = tempViews.get(formatTableName(name)) // ---------------------------------------------------------------------------- // Partitions @@ -893,7 +873,7 @@ class SessionCatalog( listDatabases().filter(_ != DEFAULT_DATABASE).foreach { db => dropDatabase(db, ignoreIfNotExists = false, cascade = true) } - listTables(DEFAULT_DATABASE).foreach { table => + listTables(DEFAULT_DATABASE).filter(_.database.isDefined).foreach { table => dropTable(table, ignoreIfNotExists = false, purge = false) } listFunctions(DEFAULT_DATABASE).map(_._1).foreach { func => @@ -903,7 +883,7 @@ class SessionCatalog( dropTempFunction(func.funcName, ignoreIfNotExists = false) } } - tempTables.clear() + tempViews.clear() functionRegistry.clear() // restore built-in functions FunctionRegistry.builtin.listFunction().foreach { f => diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/TempViewManager.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/TempViewManager.scala new file mode 100644 index 0000000000000..2e8be1419c386 --- /dev/null +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/TempViewManager.scala @@ -0,0 +1,92 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.sql.catalyst.catalog + +import javax.annotation.concurrent.GuardedBy + +import scala.collection.mutable + +import org.apache.spark.sql.AnalysisException +import org.apache.spark.sql.catalyst.analysis.TempViewAlreadyExistsException +import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan +import org.apache.spark.sql.catalyst.util.StringUtils + + +/** + * A thread-safe manager for a list of temp views, providing atomic operations to manage temp views. + */ +class TempViewManager { + + /** List of temporary views, mapping from view name to logical plan. */ + @GuardedBy("this") + private val tempViews = new mutable.HashMap[String, LogicalPlan] + + def get(name: String): Option[LogicalPlan] = synchronized { + tempViews.get(name) + } + + def create( + name: String, + viewDefinition: LogicalPlan, + overrideIfExists: Boolean): Unit = synchronized { + if (!overrideIfExists && tempViews.contains(name)) { + throw new TempViewAlreadyExistsException(name) + } + tempViews.put(name, viewDefinition) + } + + def update( + name: String, + viewDefinition: LogicalPlan): Boolean = synchronized { + // Only update it when the view with the given name exits. + if (tempViews.contains(name)) { + tempViews.put(name, viewDefinition) + true + } else { + false + } + } + + def remove(name: String): Boolean = synchronized { + tempViews.remove(name).isDefined + } + + def rename(oldName: String, newName: String): Boolean = synchronized { + if (tempViews.contains(oldName)) { + if (tempViews.contains(newName)) { + throw new AnalysisException( + s"rename temporary view from '$oldName' to '$newName': destination view already exists") + } + + val viewDefinition = tempViews(oldName) + tempViews.remove(oldName) + tempViews.put(newName, viewDefinition) + true + } else { + false + } + } + + def listNames(pattern: String): Seq[String] = synchronized { + StringUtils.filterPattern(tempViews.keys.toSeq, pattern) + } + + def clear(): Unit = synchronized { + tempViews.clear() + } +} diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala index 012df629bbdef..857366581bcff 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala @@ -201,16 +201,16 @@ class SessionCatalogSuite extends SparkFunSuite { val tempTable2 = Range(1, 20, 2, 10) catalog.createTempView("tbl1", tempTable1, overrideIfExists = false) catalog.createTempView("tbl2", tempTable2, overrideIfExists = false) - assert(catalog.getTempTable("tbl1") == Option(tempTable1)) - assert(catalog.getTempTable("tbl2") == Option(tempTable2)) - assert(catalog.getTempTable("tbl3").isEmpty) + assert(catalog.getTempView("tbl1") == Option(tempTable1)) + assert(catalog.getTempView("tbl2") == Option(tempTable2)) + assert(catalog.getTempView("tbl3").isEmpty) // Temporary table already exists - intercept[TempTableAlreadyExistsException] { + intercept[TempViewAlreadyExistsException] { catalog.createTempView("tbl1", tempTable1, overrideIfExists = false) } // Temporary table already exists but we override it catalog.createTempView("tbl1", tempTable2, overrideIfExists = true) - assert(catalog.getTempTable("tbl1") == Option(tempTable2)) + assert(catalog.getTempView("tbl1") == Option(tempTable2)) } test("drop table") { @@ -245,28 +245,17 @@ class SessionCatalogSuite extends SparkFunSuite { purge = false) } - test("drop temp table") { + test("drop temp view") { val externalCatalog = newBasicCatalog() val sessionCatalog = new SessionCatalog(externalCatalog) val tempTable = Range(1, 10, 2, 10) sessionCatalog.createTempView("tbl1", tempTable, overrideIfExists = false) sessionCatalog.setCurrentDatabase("db2") - assert(sessionCatalog.getTempTable("tbl1") == Some(tempTable)) + assert(sessionCatalog.getTempView("tbl1") == Some(tempTable)) assert(externalCatalog.listTables("db2").toSet == Set("tbl1", "tbl2")) - // If database is not specified, temp table should be dropped first - sessionCatalog.dropTable(TableIdentifier("tbl1"), ignoreIfNotExists = false, purge = false) - assert(sessionCatalog.getTempTable("tbl1") == None) + sessionCatalog.dropTempView("tbl1") + assert(sessionCatalog.getTempView("tbl1").isEmpty) assert(externalCatalog.listTables("db2").toSet == Set("tbl1", "tbl2")) - // If temp table does not exist, the table in the current database should be dropped - sessionCatalog.dropTable(TableIdentifier("tbl1"), ignoreIfNotExists = false, purge = false) - assert(externalCatalog.listTables("db2").toSet == Set("tbl2")) - // If database is specified, temp tables are never dropped - sessionCatalog.createTempView("tbl1", tempTable, overrideIfExists = false) - sessionCatalog.createTable(newTable("tbl1", "db2"), ignoreIfExists = false) - sessionCatalog.dropTable(TableIdentifier("tbl1", Some("db2")), ignoreIfNotExists = false, - purge = false) - assert(sessionCatalog.getTempTable("tbl1") == Some(tempTable)) - assert(externalCatalog.listTables("db2").toSet == Set("tbl2")) } test("rename table") { @@ -297,24 +286,18 @@ class SessionCatalogSuite extends SparkFunSuite { } } - test("rename temp table") { + test("rename temp view") { val externalCatalog = newBasicCatalog() val sessionCatalog = new SessionCatalog(externalCatalog) val tempTable = Range(1, 10, 2, 10) sessionCatalog.createTempView("tbl1", tempTable, overrideIfExists = false) sessionCatalog.setCurrentDatabase("db2") - assert(sessionCatalog.getTempTable("tbl1") == Option(tempTable)) + assert(sessionCatalog.getTempView("tbl1") == Option(tempTable)) assert(externalCatalog.listTables("db2").toSet == Set("tbl1", "tbl2")) - // If database is not specified, temp table should be renamed first - sessionCatalog.renameTable(TableIdentifier("tbl1"), "tbl3") - assert(sessionCatalog.getTempTable("tbl1").isEmpty) - assert(sessionCatalog.getTempTable("tbl3") == Option(tempTable)) + sessionCatalog.renameTempView("tbl1", "tbl3") + assert(sessionCatalog.getTempView("tbl1").isEmpty) + assert(sessionCatalog.getTempView("tbl3") == Option(tempTable)) assert(externalCatalog.listTables("db2").toSet == Set("tbl1", "tbl2")) - // If database is specified, temp tables are never renamed - sessionCatalog.renameTable(TableIdentifier("tbl2", Some("db2")), "tbl4") - assert(sessionCatalog.getTempTable("tbl3") == Option(tempTable)) - assert(sessionCatalog.getTempTable("tbl4").isEmpty) - assert(externalCatalog.listTables("db2").toSet == Set("tbl1", "tbl4")) } test("alter table") { @@ -389,7 +372,7 @@ class SessionCatalogSuite extends SparkFunSuite { assert(sessionCatalog.lookupRelation(TableIdentifier("tbl1")) == SubqueryAlias("tbl1", tempTable1, Some(TableIdentifier("tbl1")))) // Then, if that does not exist, look up the relation in the current database - sessionCatalog.dropTable(TableIdentifier("tbl1"), ignoreIfNotExists = false, purge = false) + sessionCatalog.dropTempView("tbl1") assert(sessionCatalog.lookupRelation(TableIdentifier("tbl1")) == SubqueryAlias("tbl1", SimpleCatalogRelation("db2", metastoreTable1), None)) } @@ -423,48 +406,15 @@ class SessionCatalogSuite extends SparkFunSuite { assert(!catalog.tableExists(TableIdentifier("tbl3", Some("db2")))) assert(!catalog.tableExists(TableIdentifier("tbl1", Some("db1")))) assert(!catalog.tableExists(TableIdentifier("tbl2", Some("db1")))) - // If database is explicitly specified, do not check temporary tables - val tempTable = Range(1, 10, 1, 10) - catalog.createTempView("tbl3", tempTable, overrideIfExists = false) - assert(!catalog.tableExists(TableIdentifier("tbl3", Some("db2")))) // If database is not explicitly specified, check the current database catalog.setCurrentDatabase("db2") assert(catalog.tableExists(TableIdentifier("tbl1"))) assert(catalog.tableExists(TableIdentifier("tbl2"))) - assert(catalog.tableExists(TableIdentifier("tbl3"))) - } - test("tableExists on temporary views") { - val catalog = new SessionCatalog(newBasicCatalog()) - val tempTable = Range(1, 10, 2, 10) - assert(!catalog.tableExists(TableIdentifier("view1"))) - assert(!catalog.tableExists(TableIdentifier("view1", Some("default")))) - catalog.createTempView("view1", tempTable, overrideIfExists = false) - assert(catalog.tableExists(TableIdentifier("view1"))) - assert(!catalog.tableExists(TableIdentifier("view1", Some("default")))) - } - - test("getTableMetadata on temporary views") { - val catalog = new SessionCatalog(newBasicCatalog()) - val tempTable = Range(1, 10, 2, 10) - val m = intercept[AnalysisException] { - catalog.getTableMetadata(TableIdentifier("view1")) - }.getMessage - assert(m.contains("Table or view 'view1' not found in database 'default'")) - - val m2 = intercept[AnalysisException] { - catalog.getTableMetadata(TableIdentifier("view1", Some("default"))) - }.getMessage - assert(m2.contains("Table or view 'view1' not found in database 'default'")) - - catalog.createTempView("view1", tempTable, overrideIfExists = false) - assert(catalog.getTableMetadata(TableIdentifier("view1")).identifier.table == "view1") - assert(catalog.getTableMetadata(TableIdentifier("view1")).schema(0).name == "id") - - val m3 = intercept[AnalysisException] { - catalog.getTableMetadata(TableIdentifier("view1", Some("default"))) - }.getMessage - assert(m3.contains("Table or view 'view1' not found in database 'default'")) + // Even if database is not specified and there exists a same-name temp view, `tableExists` + // should ignore temp views and only check metastore tables/views. + catalog.createTempView("tbl3", Range(1, 10, 1, 10), overrideIfExists = false) + assert(!catalog.tableExists(TableIdentifier("tbl3"))) } test("list tables without pattern") { diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala index bc1c4f85e3315..4e0b61754fd21 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala @@ -189,31 +189,39 @@ case class DropTableCommand( override def run(sparkSession: SparkSession): Seq[Row] = { val catalog = sparkSession.sessionState.catalog - if (!catalog.tableExists(tableName)) { - if (!ifExists) { - val objectName = if (isView) "View" else "Table" - throw new AnalysisException(s"$objectName to drop '$tableName' does not exist") - } - } else { - // If the command DROP VIEW is to drop a table or DROP TABLE is to drop a view - // issue an exception. - catalog.getTableMetadataOption(tableName).map(_.tableType match { - case CatalogTableType.VIEW if !isView => - throw new AnalysisException( - "Cannot drop a view with DROP TABLE. Please use DROP VIEW instead") - case o if o != CatalogTableType.VIEW && isView => - throw new AnalysisException( - s"Cannot drop a table with DROP VIEW. Please use DROP TABLE instead") - case _ => - }) - try { - sparkSession.sharedState.cacheManager.uncacheQuery( - sparkSession.table(tableName.quotedString)) - } catch { - case NonFatal(e) => log.warn(e.toString, e) + + // If the table name contains database part, we should drop a metastore table directly, + // otherwise, try to drop a temp view first, if that not exist, drop metastore table. + val dropMetastoreTable = + tableName.database.isDefined || !catalog.dropTempView(tableName.table) + + if (dropMetastoreTable) { + if (!catalog.tableExists(tableName)) { + if (!ifExists) { + val objectName = if (isView) "View" else "Table" + throw new AnalysisException(s"$objectName to drop '$tableName' does not exist") + } + } else { + // If the command DROP VIEW is to drop a table or DROP TABLE is to drop a view + // issue an exception. + catalog.getTableMetadataOption(tableName).map(_.tableType match { + case CatalogTableType.VIEW if !isView => + throw new AnalysisException( + "Cannot drop a view with DROP TABLE. Please use DROP VIEW instead") + case o if o != CatalogTableType.VIEW && isView => + throw new AnalysisException( + s"Cannot drop a table with DROP VIEW. Please use DROP TABLE instead") + case _ => + }) + try { + sparkSession.sharedState.cacheManager.uncacheQuery( + sparkSession.table(tableName.quotedString)) + } catch { + case NonFatal(e) => log.warn(e.toString, e) + } + catalog.refreshTable(tableName) + catalog.dropTable(tableName, ifExists, purge) } - catalog.refreshTable(tableName) - catalog.dropTable(tableName, ifExists, purge) } Seq.empty[Row] } @@ -470,10 +478,6 @@ case class AlterTableRecoverPartitionsCommand( if (!catalog.tableExists(tableName)) { throw new AnalysisException(s"Table $tableName in $cmd does not exist.") } - if (catalog.isTemporaryTable(tableName)) { - throw new AnalysisException( - s"Operation not allowed: $cmd on temporary tables: $tableName") - } val table = catalog.getTableMetadata(tableName) if (DDLUtils.isDatasourceTable(table)) { throw new AnalysisException( diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala index 027f3588e2922..586b66c87534c 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala @@ -29,13 +29,12 @@ import org.apache.hadoop.fs.Path import org.apache.spark.sql.{AnalysisException, Row, SparkSession} import org.apache.spark.sql.catalyst.TableIdentifier -import org.apache.spark.sql.catalyst.catalog.{BucketSpec, CatalogStorageFormat, CatalogTable, CatalogTableType} +import org.apache.spark.sql.catalyst.catalog._ import org.apache.spark.sql.catalyst.catalog.CatalogTableType._ import org.apache.spark.sql.catalyst.catalog.CatalogTypes.TablePartitionSpec import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeReference} import org.apache.spark.sql.catalyst.util.quoteIdentifier import org.apache.spark.sql.execution.datasources.PartitioningUtils -import org.apache.spark.sql.internal.HiveSerDe import org.apache.spark.sql.types._ import org.apache.spark.util.Utils @@ -60,12 +59,25 @@ case class CreateTableLikeCommand( override def run(sparkSession: SparkSession): Seq[Row] = { val catalog = sparkSession.sessionState.catalog - if (!catalog.tableExists(sourceTable)) { - throw new AnalysisException( - s"Source table in CREATE TABLE LIKE does not exist: '$sourceTable'") - } - val sourceTableDesc = catalog.getTableMetadata(sourceTable) + // If the source table name contains database part, it must be a metastore table/view, we should + // get its table metadata from metastore directly. Otherwise, try to get a temp view matching + // the name first, if that not exists, try metastore table/view. + val sourceTableDesc = if (sourceTable.database.isDefined) { + catalog.getTableMetadata(sourceTable) + } else { + val maybeTempView = catalog.getTempView(sourceTable.table) + if (maybeTempView.isDefined) { + CatalogTable( + identifier = sourceTable, + tableType = CatalogTableType.VIEW, + storage = CatalogStorageFormat.empty, + schema = maybeTempView.get.schema + ) + } else { + catalog.getTableMetadata(sourceTable) + } + } // Storage format val newStorage = @@ -159,12 +171,13 @@ case class AlterTableRenameCommand( override def run(sparkSession: SparkSession): Seq[Row] = { val catalog = sparkSession.sessionState.catalog DDLUtils.verifyAlterTableType(catalog, oldName, isView) - // If this is a temp view, just rename the view. - // Otherwise, if this is a real table, we also need to uncache and invalidate the table. - val isTemporary = catalog.isTemporaryTable(oldName) - if (isTemporary) { - catalog.renameTable(oldName, newName) - } else { + + // If the old table name contains database part, we should rename a metastore table directly, + // otherwise, try to rename a temp view first, if that not exists, rename a metastore table. + val renameMetastoreTable = + oldName.database.isDefined || !catalog.renameTempView(oldName.table, newName) + + if (renameMetastoreTable) { val newTblName = TableIdentifier(newName, oldName.database) // If an exception is thrown here we can just assume the table is uncached; // this can happen with Hive tables when the underlying catalog is in-memory. @@ -339,10 +352,6 @@ case class TruncateTableCommand( if (!catalog.tableExists(tableName)) { throw new AnalysisException(s"Table $tableName in TRUNCATE TABLE does not exist.") } - if (catalog.isTemporaryTable(tableName)) { - throw new AnalysisException( - s"Operation not allowed: TRUNCATE TABLE on temporary tables: $tableName") - } val table = catalog.getTableMetadata(tableName) if (table.tableType == CatalogTableType.EXTERNAL) { throw new AnalysisException( @@ -424,24 +433,33 @@ case class DescribeTableCommand(table: TableIdentifier, isExtended: Boolean, isF val result = new ArrayBuffer[Row] val catalog = sparkSession.sessionState.catalog - if (catalog.isTemporaryTable(table)) { - describeSchema(catalog.lookupRelation(table).schema, result) + if (table.database.isDefined) { + describeMetastoreTable(catalog, result) } else { - val metadata = catalog.getTableMetadata(table) - describeSchema(metadata.schema, result) - - if (isExtended) { - describeExtended(metadata, result) - } else if (isFormatted) { - describeFormatted(metadata, result) + val maybeTempView = catalog.getTempView(table.table) + if (maybeTempView.isDefined) { + describeSchema(maybeTempView.get.schema, result) } else { - describePartitionInfo(metadata, result) + describeMetastoreTable(catalog, result) } } result } + private def describeMetastoreTable(catalog: SessionCatalog, result: ArrayBuffer[Row]): Unit = { + val metadata = catalog.getTableMetadata(table) + describeSchema(metadata.schema, result) + + if (isExtended) { + describeExtended(metadata, result) + } else if (isFormatted) { + describeFormatted(metadata, result) + } else { + describePartitionInfo(metadata, result) + } + } + private def describePartitionInfo(table: CatalogTable, buffer: ArrayBuffer[Row]): Unit = { if (table.partitionColumnNames.nonEmpty) { append(buffer, "# Partition Information", "", "") @@ -517,7 +535,7 @@ case class DescribeTableCommand(table: TableIdentifier, isExtended: Boolean, isF /** - * A command for users to get tables in the given database. + * A command for users to get tables in the given database and temporary views in current session. * If a databaseName is not given, the current database will be used. * The syntax of using this command in SQL is: * {{{ @@ -570,29 +588,21 @@ case class ShowTablePropertiesCommand(table: TableIdentifier, propertyKey: Optio } override def run(sparkSession: SparkSession): Seq[Row] = { - val catalog = sparkSession.sessionState.catalog - - if (catalog.isTemporaryTable(table)) { - Seq.empty[Row] - } else { - val catalogTable = sparkSession.sessionState.catalog.getTableMetadata(table) - - propertyKey match { - case Some(p) => - val propValue = catalogTable - .properties - .getOrElse(p, s"Table ${catalogTable.qualifiedName} does not have property: $p") - Seq(Row(propValue)) - case None => - catalogTable.properties.map(p => Row(p._1, p._2)).toSeq - } + val tableMetadata = sparkSession.sessionState.catalog.getTableMetadata(table) + propertyKey match { + case Some(p) => + val propValue = tableMetadata + .properties + .getOrElse(p, s"Table ${tableMetadata.qualifiedName} does not have property: $p") + Seq(Row(propValue)) + case None => + tableMetadata.properties.map(p => Row(p._1, p._2)).toSeq } } } /** - * A command to list the column names for a table. This function creates a - * [[ShowColumnsCommand]] logical plan. + * A command to list the column names for a table or temp view. * * The syntax of using this command in SQL is: * {{{ @@ -605,9 +615,19 @@ case class ShowColumnsCommand(table: TableIdentifier) extends RunnableCommand { } override def run(sparkSession: SparkSession): Seq[Row] = { - sparkSession.sessionState.catalog.getTableMetadata(table).schema.map { c => - Row(c.name) + val catalog = sparkSession.sessionState.catalog + val schema = if (table.database.isDefined) { + catalog.getTableMetadata(table).schema + } else { + val maybeTempView = catalog.getTempView(table.table) + if (maybeTempView.isDefined) { + maybeTempView.get.schema + } else { + catalog.getTableMetadata(table).schema + } } + + schema.map { c => Row(c.name) } } } @@ -641,12 +661,6 @@ case class ShowPartitionsCommand( override def run(sparkSession: SparkSession): Seq[Row] = { val catalog = sparkSession.sessionState.catalog - - if (catalog.isTemporaryTable(table)) { - throw new AnalysisException( - s"SHOW PARTITIONS is not allowed on a temporary table: ${table.unquotedString}") - } - val tab = catalog.getTableMetadata(table) /** @@ -699,12 +713,6 @@ case class ShowCreateTableCommand(table: TableIdentifier) extends RunnableComman override def run(sparkSession: SparkSession): Seq[Row] = { val catalog = sparkSession.sessionState.catalog - - if (catalog.isTemporaryTable(table)) { - throw new AnalysisException( - s"SHOW CREATE TABLE cannot be applied to temporary table") - } - if (!catalog.tableExists(table)) { throw new AnalysisException(s"Table $table doesn't exist") } diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/views.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/views.scala index 15340ee921f68..9e61c8f29c355 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/views.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/views.scala @@ -104,18 +104,13 @@ case class CreateViewCommand( if (isTemporary) { createTemporaryView(sparkSession, analyzedPlan) } else { - // Adds default database for permanent table if it doesn't exist, so that tableExists() - // only check permanent tables. - val database = name.database.getOrElse(sessionState.catalog.getCurrentDatabase) - val qualifiedName = name.copy(database = Option(database)) - - if (sessionState.catalog.tableExists(qualifiedName)) { - val tableMetadata = sessionState.catalog.getTableMetadata(qualifiedName) + if (sessionState.catalog.tableExists(name)) { + val tableMetadata = sessionState.catalog.getTableMetadata(name) if (allowExisting) { // Handles `CREATE VIEW IF NOT EXISTS v0 AS SELECT ...`. Does nothing when the target view // already exists. } else if (tableMetadata.tableType != CatalogTableType.VIEW) { - throw new AnalysisException(s"$qualifiedName is not a view") + throw new AnalysisException(s"$name is not a view") } else if (replace) { // Handles `CREATE OR REPLACE VIEW v0 AS SELECT ...` sessionState.catalog.alterTable(prepareTable(sparkSession, analyzedPlan)) @@ -123,7 +118,7 @@ case class CreateViewCommand( // Handles `CREATE VIEW v0 AS SELECT ...`. Throws exception when the target view already // exists. throw new AnalysisException( - s"View $qualifiedName already exists. If you want to update the view definition, " + + s"View $name already exists. If you want to update the view definition, " + "please use ALTER VIEW AS or CREATE OR REPLACE VIEW AS") } } else { @@ -217,41 +212,40 @@ case class AlterViewAsCommand( override protected def innerChildren: Seq[QueryPlan[_]] = Seq(query) override def run(session: SparkSession): Seq[Row] = { + val catalog = session.sessionState.catalog // If the plan cannot be analyzed, throw an exception and don't proceed. val qe = session.sessionState.executePlan(query) qe.assertAnalyzed() val analyzedPlan = qe.analyzed - if (session.sessionState.catalog.isTemporaryTable(name)) { - session.sessionState.catalog.createTempView(name.table, analyzedPlan, overrideIfExists = true) - } else { - alterPermanentView(session, analyzedPlan) - } + // If view name contains database part, alter a permanent view directly, otherwise, try to alter + // a temp view first, if that not exist, alter a permanent view. + val alterPermanentView = + name.database.isDefined || !catalog.alterTempView(name.table, analyzedPlan) - Seq.empty[Row] - } - - private def alterPermanentView(session: SparkSession, analyzedPlan: LogicalPlan): Unit = { - val viewMeta = session.sessionState.catalog.getTableMetadata(name) - if (viewMeta.tableType != CatalogTableType.VIEW) { - throw new AnalysisException(s"${viewMeta.identifier} is not a view.") - } + if (alterPermanentView) { + val viewMeta = catalog.getTableMetadata(name) + if (viewMeta.tableType != CatalogTableType.VIEW) { + throw new AnalysisException(s"${viewMeta.identifier} is not a view.") + } - val viewSQL: String = new SQLBuilder(analyzedPlan).toSQL - // Validate the view SQL - make sure we can parse it and analyze it. - // If we cannot analyze the generated query, there is probably a bug in SQL generation. - try { - session.sql(viewSQL).queryExecution.assertAnalyzed() - } catch { - case NonFatal(e) => - throw new RuntimeException(s"Failed to analyze the canonicalized SQL: $viewSQL", e) - } + val viewSQL: String = new SQLBuilder(analyzedPlan).toSQL + // Validate the view SQL - make sure we can parse it and analyze it. + // If we cannot analyze the generated query, there is probably a bug in SQL generation. + try { + session.sql(viewSQL).queryExecution.assertAnalyzed() + } catch { + case NonFatal(e) => + throw new RuntimeException(s"Failed to analyze the canonicalized SQL: $viewSQL", e) + } - val updatedViewMeta = viewMeta.copy( - schema = analyzedPlan.schema, - viewOriginalText = Some(originalText), - viewText = Some(viewSQL)) + val updatedViewMeta = viewMeta.copy( + schema = analyzedPlan.schema, + viewOriginalText = Some(originalText), + viewText = Some(viewSQL)) - session.sessionState.catalog.alterTable(updatedViewMeta) + catalog.alterTable(updatedViewMeta) + } + Seq.empty[Row] } } diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/ddl.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/ddl.scala index 1b1e2123b7c47..9b6f7ed46881c 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/ddl.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/ddl.scala @@ -49,7 +49,7 @@ case class CreateTempViewUsing( if (tableIdent.database.isDefined) { throw new AnalysisException( - s"Temporary table '$tableIdent' should not have specified a database") + s"Temporary view '$tableIdent' should not have specified a database") } def run(sparkSession: SparkSession): Seq[Row] = { diff --git a/sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala b/sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala index 1f87f0e73a3ba..d20f56c70cf8c 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala @@ -134,11 +134,26 @@ class CatalogImpl(sparkSession: SparkSession) extends Catalog { } /** - * Returns a list of columns for the given table in the current database. + * Returns a list of columns for the temp view matching the given name, or for the given table in + * the current database. */ @throws[AnalysisException]("table does not exist") override def listColumns(tableName: String): Dataset[Column] = { - listColumns(TableIdentifier(tableName, None)) + val maybeTempView = sessionCatalog.getTempView(tableName) + if (maybeTempView.isDefined) { + val columns = maybeTempView.get.schema.map { c => + new Column( + name = c.name, + description = c.getComment().orNull, + dataType = c.dataType.catalogString, + nullable = c.nullable, + isPartition = false, + isBucket = false) + } + CatalogImpl.makeDataset(columns, sparkSession) + } else { + listColumns(TableIdentifier(tableName, None)) + } } /** @@ -285,7 +300,7 @@ class CatalogImpl(sparkSession: SparkSession) extends Catalog { */ override def dropTempView(viewName: String): Unit = { sparkSession.sharedState.cacheManager.uncacheQuery(sparkSession.table(viewName)) - sessionCatalog.dropTable(TableIdentifier(viewName), ignoreIfNotExists = true, purge = false) + sessionCatalog.dropTempView(viewName) } /** diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLContextSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLContextSuite.scala index 001c1a1d85313..f599216305fbf 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/SQLContextSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLContextSuite.scala @@ -94,8 +94,7 @@ class SQLContextSuite extends SparkFunSuite with SharedSparkContext { sqlContext.sql("SHOW tables").filter("tableName = 'listtablessuitetable'").collect().toSeq == Row("listtablessuitetable", true) :: Nil) - sqlContext.sessionState.catalog.dropTable( - TableIdentifier("listtablessuitetable"), ignoreIfNotExists = true, purge = false) + sqlContext.sessionState.catalog.dropTempView("listtablessuitetable") assert(sqlContext.tables().filter("tableName = 'listtablessuitetable'").count() === 0) } @@ -111,8 +110,7 @@ class SQLContextSuite extends SparkFunSuite with SharedSparkContext { sqlContext.sql("show TABLES in default").filter("tableName = 'listtablessuitetable'") .collect().toSeq == Row("listtablessuitetable", true) :: Nil) - sqlContext.sessionState.catalog.dropTable( - TableIdentifier("listtablessuitetable"), ignoreIfNotExists = true, purge = false) + sqlContext.sessionState.catalog.dropTempView("listtablessuitetable") assert(sqlContext.tables().filter("tableName = 'listtablessuitetable'").count() === 0) } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala index eac266cba55b8..425298b861d2d 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala @@ -2661,4 +2661,15 @@ class SQLQuerySuite extends QueryTest with SharedSQLContext { data.selectExpr("`part.col1`", "`col.1`")) } } + + test("CREATE TABLE USING if a same-name temp view exists") { + withTable("same_name") { + withTempView("same_name") { + spark.range(10).createTempView("same_name") + sql("CREATE TABLE same_name(i int) USING json") + checkAnswer(spark.table("same_name"), spark.range(10).toDF()) + assert(spark.table("default.same_name").collect().isEmpty) + } + } + } } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala index fd35c987cab59..b1fe077dbaa3b 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala @@ -25,7 +25,7 @@ import org.scalatest.BeforeAndAfterEach import org.apache.spark.internal.config._ import org.apache.spark.sql.{AnalysisException, QueryTest, Row, SaveMode} import org.apache.spark.sql.catalyst.TableIdentifier -import org.apache.spark.sql.catalyst.analysis.{DatabaseAlreadyExistsException, FunctionRegistry, NoSuchPartitionException, NoSuchTableException, TempTableAlreadyExistsException} +import org.apache.spark.sql.catalyst.analysis.{DatabaseAlreadyExistsException, FunctionRegistry, NoSuchPartitionException, NoSuchTableException, TempViewAlreadyExistsException} import org.apache.spark.sql.catalyst.catalog.{BucketSpec, CatalogDatabase, CatalogStorageFormat} import org.apache.spark.sql.catalyst.catalog.{CatalogTable, CatalogTableType} import org.apache.spark.sql.catalyst.catalog.{CatalogTablePartition, SessionCatalog} @@ -642,7 +642,7 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach { Row("1997", "Ford") :: Nil) // Fails if creating a new view with the same name - intercept[TempTableAlreadyExistsException] { + intercept[TempViewAlreadyExistsException] { sql(s"CREATE TEMPORARY VIEW testview USING " + s"org.apache.spark.sql.execution.datasources.csv.CSVFileFormat OPTIONS (PATH '$csvFile')") } @@ -715,7 +715,7 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach { sql("ALTER TABLE tab1 RENAME TO tab2") } assert(e.getMessage.contains( - "RENAME TEMPORARY TABLE from '`tab1`' to 'tab2': destination table already exists")) + "rename temporary view from 'tab1' to 'tab2': destination view already exists")) val catalog = spark.sessionState.catalog assert(catalog.listTables("default") == Seq(TableIdentifier("tab1"), TableIdentifier("tab2"))) @@ -1597,14 +1597,16 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach { } test("truncate table - external table, temporary table, view (not allowed)") { - import testImplicits._ - val path = Utils.createTempDir().getAbsolutePath - (1 to 10).map { i => (i, i) }.toDF("a", "b").createTempView("my_temp_tab") - sql(s"CREATE EXTERNAL TABLE my_ext_tab LOCATION '$path'") - sql(s"CREATE VIEW my_view AS SELECT 1") - assertUnsupported("TRUNCATE TABLE my_temp_tab") - assertUnsupported("TRUNCATE TABLE my_ext_tab") - assertUnsupported("TRUNCATE TABLE my_view") + withTempPath { path => + withTable("my_ext_tab") { + withView("my_view") { + sql(s"CREATE EXTERNAL TABLE my_ext_tab LOCATION '${path.getAbsolutePath}'") + sql(s"CREATE VIEW my_view AS SELECT 1") + assertUnsupported("TRUNCATE TABLE my_ext_tab") + assertUnsupported("TRUNCATE TABLE my_view") + } + } + } } test("truncate table - non-partitioned table (not allowed)") { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala index b221eed7b2426..5d0609d745532 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala @@ -63,7 +63,10 @@ class CatalogSuite } private def dropTable(name: String, db: Option[String] = None): Unit = { - sessionCatalog.dropTable(TableIdentifier(name, db), ignoreIfNotExists = false, purge = false) + val dropMetastoreTable = db.isDefined || !sessionCatalog.dropTempView(name) + if (dropMetastoreTable) { + sessionCatalog.dropTable(TableIdentifier(name, db), ignoreIfNotExists = false, purge = false) + } } private def createFunction(name: String, db: Option[String] = None): Unit = { @@ -322,6 +325,14 @@ class CatalogSuite assert(e2.message == "Cannot create a file-based external data source table without path") } + test("dropTempView if a same-name table exists") { + withTable("same_name") { + sql("CREATE TABLE same_name(i int) USING json") + spark.catalog.dropTempView("same_name") + assert(spark.sessionState.catalog.tableExists(TableIdentifier("same_name"))) + } + } + // TODO: add tests for the rest of them } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/test/DataFrameReaderWriterSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/test/DataFrameReaderWriterSuite.scala index 63b0e4588e4a6..403631c8651d9 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/test/DataFrameReaderWriterSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/test/DataFrameReaderWriterSuite.scala @@ -457,6 +457,17 @@ class DataFrameReaderWriterSuite extends QueryTest with SharedSQLContext with Be checkAnswer(df2, df) } + test("save as table if a same-name temp view exists") { + withTable("same_name") { + withTempView("same_name") { + spark.range(10).createTempView("same_name") + spark.range(20).write.saveAsTable("same_name") + checkAnswer(spark.table("same_name"), spark.range(10).toDF()) + checkAnswer(spark.table("default.same_name"), spark.range(20).toDF()) + } + } + } + private def testRead( df: => DataFrame, expectedResult: Seq[String], diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala index 85c509847d8ef..1034fc1a0afd0 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala @@ -53,19 +53,11 @@ private[sql] class HiveSessionCatalog( conf, hadoopConf) { - override def lookupRelation(name: TableIdentifier, alias: Option[String]): LogicalPlan = { - val table = formatTableName(name.table) - if (name.database.isDefined || !tempTables.contains(table)) { - val database = name.database.map(formatDatabaseName) - val newName = name.copy(database = database, table = table) - metastoreCatalog.lookupRelation(newName, alias) - } else { - val relation = tempTables(table) - val tableWithQualifiers = SubqueryAlias(table, relation, None) - // If an alias was specified by the lookup, wrap the plan in a subquery so that - // attributes are properly qualified with this alias. - alias.map(a => SubqueryAlias(a, tableWithQualifiers, None)).getOrElse(tableWithQualifiers) - } + override protected def lookupMetastoreRelation( + db: String, + table: String, + alias: Option[String]): LogicalPlan = { + metastoreCatalog.lookupRelation(TableIdentifier(table, Some(db)), alias) } // ---------------------------------------------------------------- diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/test/TestHive.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/test/TestHive.scala index 163f210802b53..382e74d841337 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/test/TestHive.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/test/TestHive.scala @@ -424,7 +424,7 @@ private[hive] class TestHiveSparkSession( sharedState.cacheManager.clearCache() loadedTables.clear() - sessionState.catalog.clearTempTables() + sessionState.catalog.clearTempViews() sessionState.catalog.invalidateCache() sessionState.metadataHive.reset() diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala index df33731df2d00..040310b9825c9 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala @@ -140,19 +140,6 @@ class HiveCommandSuite extends QueryTest with SQLTestUtils with TestHiveSingleto checkAnswer(sql("SHOW TBLPROPERTIES parquet_tab2('`prop2Key`')"), Row("prop2Val")) } - test("show tblproperties for spark temporary table - empty row") { - withTempView("parquet_temp") { - sql( - """ - |CREATE TEMPORARY TABLE parquet_temp (c1 INT, c2 STRING) - |USING org.apache.spark.sql.parquet.DefaultSource - """.stripMargin) - - // An empty sequence of row is returned for session temporary table. - checkAnswer(sql("SHOW TBLPROPERTIES parquet_temp"), Nil) - } - } - test("LOAD DATA") { withTable("non_part_table", "part_table") { sql( @@ -353,6 +340,13 @@ class HiveCommandSuite extends QueryTest with SQLTestUtils with TestHiveSingleto sql("SHOW COLUMNS IN parquet_tab4 IN default"), Row("price") :: Row("qty") :: Row("year") :: Row("month") :: Nil) + withTempView("tmp_view") { + Seq(1 -> "a").toDF("i", "j").createTempView("tmp_view") + checkAnswer( + sql("SHOW COLUMNS IN tmp_view"), + Row("i") :: Row("j") :: Nil) + } + val message = intercept[NoSuchTableException] { sql("SHOW COLUMNS IN badtable FROM default") }.getMessage @@ -405,11 +399,12 @@ class HiveCommandSuite extends QueryTest with SQLTestUtils with TestHiveSingleto |CREATE TEMPORARY TABLE parquet_temp (c1 INT, c2 STRING) |USING org.apache.spark.sql.parquet.DefaultSource """.stripMargin) - // An empty sequence of row is returned for session temporary table. + // SHOW PARTITIONS should ignore temp views entirely and report "table not found" even if a + // same-name temp view exists. val message1 = intercept[AnalysisException] { sql("SHOW PARTITIONS parquet_temp") }.getMessage - assert(message1.contains("is not allowed on a temporary table")) + assert(message1.contains("Table or view 'parquet_temp' not found in database 'default'")) val message2 = intercept[AnalysisException] { sql("SHOW PARTITIONS parquet_tab3") diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala index 3cba5b2a097f1..cf9f1f32fb6ba 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala @@ -25,7 +25,7 @@ import org.scalatest.BeforeAndAfterEach import org.apache.spark.internal.config._ import org.apache.spark.sql.{AnalysisException, QueryTest, Row, SaveMode} import org.apache.spark.sql.catalyst.analysis.TableAlreadyExistsException -import org.apache.spark.sql.catalyst.catalog.{CatalogDatabase, CatalogTable, CatalogTableType} +import org.apache.spark.sql.catalyst.catalog.{CatalogDatabase, CatalogStorageFormat, CatalogTable, CatalogTableType} import org.apache.spark.sql.catalyst.TableIdentifier import org.apache.spark.sql.execution.command.DDLUtils import org.apache.spark.sql.execution.datasources.CaseInsensitiveMap @@ -671,8 +671,11 @@ class HiveDDLSuite .createTempView(sourceViewName) sql(s"CREATE TABLE $targetTabName LIKE $sourceViewName") - val sourceTable = spark.sessionState.catalog.getTableMetadata( - TableIdentifier(sourceViewName, None)) + val sourceTable = CatalogTable( + identifier = TableIdentifier(sourceViewName), + tableType = CatalogTableType.VIEW, + storage = CatalogStorageFormat.empty, + schema = spark.sessionState.catalog.getTempView(sourceViewName).get.schema) val targetTable = spark.sessionState.catalog.getTableMetadata( TableIdentifier(targetTabName, Some("default"))) diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLViewSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLViewSuite.scala index bc999d4724062..2b0992c38554a 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLViewSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLViewSuite.scala @@ -62,15 +62,15 @@ class SQLViewSuite extends QueryTest with SQLTestUtils with TestHiveSingleton { var e = intercept[AnalysisException] { sql("CREATE OR REPLACE VIEW tab1 AS SELECT * FROM jt") }.getMessage - assert(e.contains("`default`.`tab1` is not a view")) + assert(e.contains("`tab1` is not a view")) e = intercept[AnalysisException] { sql("CREATE VIEW tab1 AS SELECT * FROM jt") }.getMessage - assert(e.contains("`default`.`tab1` is not a view")) + assert(e.contains("`tab1` is not a view")) e = intercept[AnalysisException] { sql("ALTER VIEW tab1 AS SELECT * FROM jt") }.getMessage - assert(e.contains("`default`.`tab1` is not a view")) + assert(e.contains("`tab1` is not a view")) } } @@ -95,12 +95,12 @@ class SQLViewSuite extends QueryTest with SQLTestUtils with TestHiveSingleton { e = intercept[AnalysisException] { sql(s"""LOAD DATA LOCAL INPATH "$testData" INTO TABLE $viewName""") }.getMessage - assert(e.contains(s"Target table in LOAD DATA cannot be temporary: `$viewName`")) + assert(e.contains(s"Target table in LOAD DATA does not exist: `$viewName`")) e = intercept[AnalysisException] { sql(s"TRUNCATE TABLE $viewName") }.getMessage - assert(e.contains(s"Operation not allowed: TRUNCATE TABLE on temporary tables: `$viewName`")) + assert(e.contains(s"Table `$viewName` in TRUNCATE TABLE does not exist")) } } @@ -195,7 +195,7 @@ class SQLViewSuite extends QueryTest with SQLTestUtils with TestHiveSingleton { sql("CREATE TEMPORARY VIEW testView AS SELECT id FROM jt") } - assert(e.message.contains("Temporary table") && e.message.contains("already exists")) + assert(e.message.contains("Temporary view") && e.message.contains("already exists")) } } diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/sources/HadoopFsRelationTest.scala b/sql/hive/src/test/scala/org/apache/spark/sql/sources/HadoopFsRelationTest.scala index 27bb9676e9abf..99738291239f4 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/sources/HadoopFsRelationTest.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/sources/HadoopFsRelationTest.scala @@ -337,9 +337,8 @@ abstract class HadoopFsRelationTest extends QueryTest with SQLTestUtils with Tes } test("saveAsTable()/load() - non-partitioned table - ErrorIfExists") { - Seq.empty[(Int, String)].toDF().createOrReplaceTempView("t") - - withTempView("t") { + withTable("t") { + Seq.empty[(Int, String)].toDF().write.mode(SaveMode.Overwrite).saveAsTable("t") intercept[AnalysisException] { testDF.write.format(dataSourceName).mode(SaveMode.ErrorIfExists).saveAsTable("t") } @@ -347,9 +346,8 @@ abstract class HadoopFsRelationTest extends QueryTest with SQLTestUtils with Tes } test("saveAsTable()/load() - non-partitioned table - Ignore") { - Seq.empty[(Int, String)].toDF().createOrReplaceTempView("t") - - withTempView("t") { + withTable("t") { + Seq.empty[(Int, String)].toDF().write.mode(SaveMode.Overwrite).saveAsTable("t") testDF.write.format(dataSourceName).mode(SaveMode.Ignore).saveAsTable("t") assert(spark.table("t").collect().isEmpty) } @@ -459,9 +457,9 @@ abstract class HadoopFsRelationTest extends QueryTest with SQLTestUtils with Tes } test("saveAsTable()/load() - partitioned table - ErrorIfExists") { - Seq.empty[(Int, String)].toDF().createOrReplaceTempView("t") + withTable("t") { + Seq.empty[(Int, String)].toDF().write.mode(SaveMode.Overwrite).saveAsTable("t") - withTempView("t") { intercept[AnalysisException] { partitionedTestDF.write .format(dataSourceName) @@ -474,9 +472,9 @@ abstract class HadoopFsRelationTest extends QueryTest with SQLTestUtils with Tes } test("saveAsTable()/load() - partitioned table - Ignore") { - Seq.empty[(Int, String)].toDF().createOrReplaceTempView("t") + withTable("t") { + Seq.empty[(Int, String)].toDF().write.mode(SaveMode.Overwrite).saveAsTable("t") - withTempView("t") { partitionedTestDF.write .format(dataSourceName) .mode(SaveMode.Ignore) From a8ce27f802e7ce22d2959f432ecd55f685677b76 Mon Sep 17 00:00:00 2001 From: Wenchen Fan Date: Thu, 8 Sep 2016 18:55:25 +0800 Subject: [PATCH 2/4] address comments --- .../sql/catalyst/analysis/Analyzer.scala | 11 ++- .../sql/catalyst/catalog/SessionCatalog.scala | 68 +++++++------------ .../catalyst/catalog/TempViewManager.scala | 2 + .../catalog/SessionCatalogSuite.scala | 18 ++--- .../apache/spark/sql/DataFrameReader.scala | 4 +- .../org/apache/spark/sql/SparkSession.scala | 15 +++- .../spark/sql/execution/command/tables.scala | 20 ++---- .../spark/sql/internal/CatalogImpl.scala | 4 +- .../spark/sql/hive/HiveSessionCatalog.scala | 7 +- .../sql/hive/execution/HiveDDLSuite.scala | 2 +- .../sql/hive/execution/SQLViewSuite.scala | 6 +- 11 files changed, 75 insertions(+), 82 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala index 18f814d6cdfd4..510837ae29188 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala @@ -439,7 +439,16 @@ class Analyzer( object ResolveRelations extends Rule[LogicalPlan] { private def lookupTableFromCatalog(u: UnresolvedRelation): LogicalPlan = { try { - catalog.lookupRelation(u.tableIdentifier, u.alias) + if (u.tableIdentifier.database.isDefined) { + catalog.lookupRelation(u.tableIdentifier, u.alias) + } else { + val maybeTempView = catalog.lookupTempView(u.tableIdentifier.table, u.alias) + if (maybeTempView.isDefined) { + maybeTempView.get + } else { + catalog.lookupRelation(u.tableIdentifier, u.alias) + } + } } catch { case _: NoSuchTableException => u.failAnalysis(s"Table or view not found: ${u.tableName}") diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala index a76a15a36885b..c9bce76be5fea 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala @@ -271,6 +271,22 @@ class SessionCatalog( externalCatalog.getTableOption(db, table) } + /** + * Return a [[LogicalPlan]] that represents the given table/view in metastore. + * + * If the relation is a view, the relation will be wrapped in a [[SubqueryAlias]] which will + * track the name of the view. + */ + def lookupRelation(name: TableIdentifier, alias: Option[String] = None): LogicalPlan = { + val db = formatDatabaseName(name.database.getOrElse(getCurrentDatabase)) + val table = formatTableName(name.table) + val tableMeta = externalCatalog.getTable(db, table) + val view = Option(tableMeta.tableType).collect { + case CatalogTableType.VIEW => TableIdentifier(table, Some(db)) + } + SubqueryAlias(alias.getOrElse(table), SimpleCatalogRelation(db, tableMeta), view) + } + /** * Return whether a table/view with the specified name exists in metastore. */ @@ -370,6 +386,16 @@ class SessionCatalog( tempViews.create(table, viewDefinition, overrideIfExists) } + /** + * Return a [[LogicalPlan]] that represents the given temporary view. + */ + def lookupTempView(name: String, alias: Option[String] = None): Option[LogicalPlan] = { + val viewName = formatTableName(name) + tempViews.get(viewName).map { viewDef => + SubqueryAlias(alias.getOrElse(viewName), viewDef, Some(TableIdentifier(viewName))) + } + } + /** * Rename a temporary view, and returns true if it succeeds, false otherwise. */ @@ -401,43 +427,6 @@ class SessionCatalog( // | Methods that interact with temporary views and metastore tables/views | // ------------------------------------------------------------------------- - /** - * Return a [[LogicalPlan]] that represents the given table/view. - * - * If a database is specified in `name`, this will return the table/view from that database. - * If no database is specified, this will first attempt to return a temporary view with - * the same name, then, if that does not exist, return the table/view from the current database. - * - * If the relation is a view, the relation will be wrapped in a [[SubqueryAlias]] which will - * track the name of the view. - */ - def lookupRelation(name: TableIdentifier, alias: Option[String] = None): LogicalPlan = { - val db = formatDatabaseName(name.database.getOrElse(getCurrentDatabase)) - val table = formatTableName(name.table) - - if (name.database.isDefined) { - lookupMetastoreRelation(db, table, alias) - } else { - val maybeTempView = tempViews.get(table) - if (maybeTempView.isDefined) { - SubqueryAlias(alias.getOrElse(table), maybeTempView.get, Some(name)) - } else { - lookupMetastoreRelation(db, table, alias) - } - } - } - - protected def lookupMetastoreRelation( - db: String, - table: String, - alias: Option[String]): LogicalPlan = { - val metadata = externalCatalog.getTable(db, table) - val view = Option(metadata.tableType).collect { - case CatalogTableType.VIEW => TableIdentifier(table, Some(db)) - } - SubqueryAlias(alias.getOrElse(table), SimpleCatalogRelation(db, metadata), view) - } - /** * List all tables/views in the specified database, including temporary views. */ @@ -475,11 +464,6 @@ class SessionCatalog( */ def clearTempViews(): Unit = tempViews.clear() - /** - * Return a temporary view exactly as it was stored. - */ - def getTempView(name: String): Option[LogicalPlan] = tempViews.get(formatTableName(name)) - // ---------------------------------------------------------------------------- // Partitions // ---------------------------------------------------------------------------- diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/TempViewManager.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/TempViewManager.scala index 2e8be1419c386..d148bbf499cb9 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/TempViewManager.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/TempViewManager.scala @@ -29,6 +29,8 @@ import org.apache.spark.sql.catalyst.util.StringUtils /** * A thread-safe manager for a list of temp views, providing atomic operations to manage temp views. + * Note that, the temp view name is always case-sensitive here, callers are responsible to format + * the view name w.r.t. case-sensitivity config. */ class TempViewManager { diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala index 857366581bcff..a1026f481bd61 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala @@ -201,16 +201,16 @@ class SessionCatalogSuite extends SparkFunSuite { val tempTable2 = Range(1, 20, 2, 10) catalog.createTempView("tbl1", tempTable1, overrideIfExists = false) catalog.createTempView("tbl2", tempTable2, overrideIfExists = false) - assert(catalog.getTempView("tbl1") == Option(tempTable1)) - assert(catalog.getTempView("tbl2") == Option(tempTable2)) - assert(catalog.getTempView("tbl3").isEmpty) + assert(catalog.lookupTempView("tbl1") == Option(tempTable1)) + assert(catalog.lookupTempView("tbl2") == Option(tempTable2)) + assert(catalog.lookupTempView("tbl3").isEmpty) // Temporary table already exists intercept[TempViewAlreadyExistsException] { catalog.createTempView("tbl1", tempTable1, overrideIfExists = false) } // Temporary table already exists but we override it catalog.createTempView("tbl1", tempTable2, overrideIfExists = true) - assert(catalog.getTempView("tbl1") == Option(tempTable2)) + assert(catalog.lookupTempView("tbl1") == Option(tempTable2)) } test("drop table") { @@ -251,10 +251,10 @@ class SessionCatalogSuite extends SparkFunSuite { val tempTable = Range(1, 10, 2, 10) sessionCatalog.createTempView("tbl1", tempTable, overrideIfExists = false) sessionCatalog.setCurrentDatabase("db2") - assert(sessionCatalog.getTempView("tbl1") == Some(tempTable)) + assert(sessionCatalog.lookupTempView("tbl1") == Some(tempTable)) assert(externalCatalog.listTables("db2").toSet == Set("tbl1", "tbl2")) sessionCatalog.dropTempView("tbl1") - assert(sessionCatalog.getTempView("tbl1").isEmpty) + assert(sessionCatalog.lookupTempView("tbl1").isEmpty) assert(externalCatalog.listTables("db2").toSet == Set("tbl1", "tbl2")) } @@ -292,11 +292,11 @@ class SessionCatalogSuite extends SparkFunSuite { val tempTable = Range(1, 10, 2, 10) sessionCatalog.createTempView("tbl1", tempTable, overrideIfExists = false) sessionCatalog.setCurrentDatabase("db2") - assert(sessionCatalog.getTempView("tbl1") == Option(tempTable)) + assert(sessionCatalog.lookupTempView("tbl1") == Option(tempTable)) assert(externalCatalog.listTables("db2").toSet == Set("tbl1", "tbl2")) sessionCatalog.renameTempView("tbl1", "tbl3") - assert(sessionCatalog.getTempView("tbl1").isEmpty) - assert(sessionCatalog.getTempView("tbl3") == Option(tempTable)) + assert(sessionCatalog.lookupTempView("tbl1").isEmpty) + assert(sessionCatalog.lookupTempView("tbl3") == Option(tempTable)) assert(externalCatalog.listTables("db2").toSet == Set("tbl1", "tbl2")) } diff --git a/sql/core/src/main/scala/org/apache/spark/sql/DataFrameReader.scala b/sql/core/src/main/scala/org/apache/spark/sql/DataFrameReader.scala index 93bf74d06b71d..88d0109c917cf 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/DataFrameReader.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/DataFrameReader.scala @@ -463,9 +463,7 @@ class DataFrameReader private[sql](sparkSession: SparkSession) extends Logging { * @since 1.4.0 */ def table(tableName: String): DataFrame = { - Dataset.ofRows(sparkSession, - sparkSession.sessionState.catalog.lookupRelation( - sparkSession.sessionState.sqlParser.parseTableIdentifier(tableName))) + sparkSession.table(tableName) } /** diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala index 0f6292db62172..8a7f38091f850 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala @@ -556,7 +556,20 @@ class SparkSession private( } private[sql] def table(tableIdent: TableIdentifier): DataFrame = { - Dataset.ofRows(self, sessionState.catalog.lookupRelation(tableIdent)) + val plan = { + if (tableIdent.database.isDefined) { + sessionState.catalog.lookupRelation(tableIdent) + } else { + val maybeTempView = sessionState.catalog.lookupTempView(tableIdent.table) + if (maybeTempView.isDefined) { + maybeTempView.get + } else { + sessionState.catalog.lookupRelation(tableIdent) + } + } + } + + Dataset.ofRows(self, plan) } /* ----------------- * diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala index 586b66c87534c..fce99756df05d 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/tables.scala @@ -66,7 +66,7 @@ case class CreateTableLikeCommand( val sourceTableDesc = if (sourceTable.database.isDefined) { catalog.getTableMetadata(sourceTable) } else { - val maybeTempView = catalog.getTempView(sourceTable.table) + val maybeTempView = catalog.lookupTempView(sourceTable.table) if (maybeTempView.isDefined) { CatalogTable( identifier = sourceTable, @@ -228,12 +228,7 @@ case class LoadDataCommand( override def run(sparkSession: SparkSession): Seq[Row] = { val catalog = sparkSession.sessionState.catalog - if (!catalog.tableExists(table)) { - throw new AnalysisException(s"Target table in LOAD DATA does not exist: $table") - } - val targetTable = catalog.getTableMetadataOption(table).getOrElse { - throw new AnalysisException(s"Target table in LOAD DATA cannot be temporary: $table") - } + val targetTable = catalog.getTableMetadata(table) if (targetTable.tableType == CatalogTableType.VIEW) { throw new AnalysisException(s"Target table in LOAD DATA cannot be a view: $table") } @@ -349,9 +344,6 @@ case class TruncateTableCommand( override def run(spark: SparkSession): Seq[Row] = { val catalog = spark.sessionState.catalog - if (!catalog.tableExists(tableName)) { - throw new AnalysisException(s"Table $tableName in TRUNCATE TABLE does not exist.") - } val table = catalog.getTableMetadata(tableName) if (table.tableType == CatalogTableType.EXTERNAL) { throw new AnalysisException( @@ -436,7 +428,7 @@ case class DescribeTableCommand(table: TableIdentifier, isExtended: Boolean, isF if (table.database.isDefined) { describeMetastoreTable(catalog, result) } else { - val maybeTempView = catalog.getTempView(table.table) + val maybeTempView = catalog.lookupTempView(table.table) if (maybeTempView.isDefined) { describeSchema(maybeTempView.get.schema, result) } else { @@ -619,7 +611,7 @@ case class ShowColumnsCommand(table: TableIdentifier) extends RunnableCommand { val schema = if (table.database.isDefined) { catalog.getTableMetadata(table).schema } else { - val maybeTempView = catalog.getTempView(table.table) + val maybeTempView = catalog.lookupTempView(table.table) if (maybeTempView.isDefined) { maybeTempView.get.schema } else { @@ -713,10 +705,6 @@ case class ShowCreateTableCommand(table: TableIdentifier) extends RunnableComman override def run(sparkSession: SparkSession): Seq[Row] = { val catalog = sparkSession.sessionState.catalog - if (!catalog.tableExists(table)) { - throw new AnalysisException(s"Table $table doesn't exist") - } - val tableMetadata = catalog.getTableMetadata(table) // TODO: unify this after we unify the CREATE TABLE syntax for hive serde and data source table. diff --git a/sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala b/sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala index d20f56c70cf8c..28026a58f195a 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala @@ -139,7 +139,7 @@ class CatalogImpl(sparkSession: SparkSession) extends Catalog { */ @throws[AnalysisException]("table does not exist") override def listColumns(tableName: String): Dataset[Column] = { - val maybeTempView = sessionCatalog.getTempView(tableName) + val maybeTempView = sessionCatalog.lookupTempView(tableName) if (maybeTempView.isDefined) { val columns = maybeTempView.get.schema.map { c => new Column( @@ -368,7 +368,7 @@ class CatalogImpl(sparkSession: SparkSession) extends Catalog { // If this table is cached as an InMemoryRelation, drop the original // cached version and make the new version cached lazily. - val logicalPlan = sparkSession.sessionState.catalog.lookupRelation(tableIdent) + val logicalPlan = sparkSession.table(tableIdent).queryExecution.logical // Use lookupCachedData directly since RefreshTable also takes databaseName. val isCached = sparkSession.sharedState.cacheManager.lookupCachedData(logicalPlan).nonEmpty if (isCached) { diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala index 1034fc1a0afd0..fc25a35e7cc8b 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala @@ -53,10 +53,9 @@ private[sql] class HiveSessionCatalog( conf, hadoopConf) { - override protected def lookupMetastoreRelation( - db: String, - table: String, - alias: Option[String]): LogicalPlan = { + override def lookupRelation(name: TableIdentifier, alias: Option[String] = None): LogicalPlan = { + val db = formatDatabaseName(name.database.getOrElse(getCurrentDatabase)) + val table = formatTableName(name.table) metastoreCatalog.lookupRelation(TableIdentifier(table, Some(db)), alias) } diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala index cf9f1f32fb6ba..63fe0da9f00e6 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala @@ -675,7 +675,7 @@ class HiveDDLSuite identifier = TableIdentifier(sourceViewName), tableType = CatalogTableType.VIEW, storage = CatalogStorageFormat.empty, - schema = spark.sessionState.catalog.getTempView(sourceViewName).get.schema) + schema = spark.sessionState.catalog.lookupTempView(sourceViewName).get.schema) val targetTable = spark.sessionState.catalog.getTableMetadata( TableIdentifier(targetTabName, Some("default"))) diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLViewSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLViewSuite.scala index 2b0992c38554a..bc2efc96bdd7f 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLViewSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLViewSuite.scala @@ -83,7 +83,7 @@ class SQLViewSuite extends QueryTest with SQLTestUtils with TestHiveSingleton { } test("error handling: insert/load/truncate table commands against a temp view") { - val viewName = "testView" + val viewName = "test_view" withTempView(viewName) { sql(s"CREATE TEMPORARY VIEW $viewName AS SELECT id FROM jt") var e = intercept[AnalysisException] { @@ -95,12 +95,12 @@ class SQLViewSuite extends QueryTest with SQLTestUtils with TestHiveSingleton { e = intercept[AnalysisException] { sql(s"""LOAD DATA LOCAL INPATH "$testData" INTO TABLE $viewName""") }.getMessage - assert(e.contains(s"Target table in LOAD DATA does not exist: `$viewName`")) + assert(e.contains(s"Table or view '$viewName' not found in database 'default'")) e = intercept[AnalysisException] { sql(s"TRUNCATE TABLE $viewName") }.getMessage - assert(e.contains(s"Table `$viewName` in TRUNCATE TABLE does not exist")) + assert(e.contains(s"Table or view '$viewName' not found in database 'default'")) } } From e0d2427c114e008add4a7df65b3fb4edbb526ae8 Mon Sep 17 00:00:00 2001 From: Wenchen Fan Date: Fri, 9 Sep 2016 22:31:22 +0800 Subject: [PATCH 3/4] improve test --- .../sql/test/DataFrameReaderWriterSuite.scala | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/test/DataFrameReaderWriterSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/test/DataFrameReaderWriterSuite.scala index 403631c8651d9..3bd7f31a7bab1 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/test/DataFrameReaderWriterSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/test/DataFrameReaderWriterSuite.scala @@ -458,12 +458,15 @@ class DataFrameReaderWriterSuite extends QueryTest with SharedSQLContext with Be } test("save as table if a same-name temp view exists") { - withTable("same_name") { - withTempView("same_name") { - spark.range(10).createTempView("same_name") - spark.range(20).write.saveAsTable("same_name") - checkAnswer(spark.table("same_name"), spark.range(10).toDF()) - checkAnswer(spark.table("default.same_name"), spark.range(20).toDF()) + import SaveMode._ + for (mode <- Seq(Append, ErrorIfExists, Overwrite, Ignore)) { + withTable("same_name") { + withTempView("same_name") { + spark.range(10).createTempView("same_name") + spark.range(20).write.mode(mode).saveAsTable("same_name") + checkAnswer(spark.table("same_name"), spark.range(10).toDF()) + checkAnswer(spark.table("default.same_name"), spark.range(20).toDF()) + } } } } From f42135586cdb9b71d44b9294a7f0d502a1da56ff Mon Sep 17 00:00:00 2001 From: Wenchen Fan Date: Sat, 10 Sep 2016 09:45:44 +0800 Subject: [PATCH 4/4] fix test --- .../sql/catalyst/analysis/Analyzer.scala | 11 +----- .../sql/catalyst/catalog/SessionCatalog.scala | 38 ++++++++++++++----- .../catalog/SessionCatalogSuite.scala | 10 ++--- .../org/apache/spark/sql/SparkSession.scala | 15 +------- 4 files changed, 35 insertions(+), 39 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala index 510837ae29188..c542f09c86d9f 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala @@ -439,16 +439,7 @@ class Analyzer( object ResolveRelations extends Rule[LogicalPlan] { private def lookupTableFromCatalog(u: UnresolvedRelation): LogicalPlan = { try { - if (u.tableIdentifier.database.isDefined) { - catalog.lookupRelation(u.tableIdentifier, u.alias) - } else { - val maybeTempView = catalog.lookupTempView(u.tableIdentifier.table, u.alias) - if (maybeTempView.isDefined) { - maybeTempView.get - } else { - catalog.lookupRelation(u.tableIdentifier, u.alias) - } - } + catalog.lookupTempViewOrRelation(u.tableIdentifier, u.alias) } catch { case _: NoSuchTableException => u.failAnalysis(s"Table or view not found: ${u.tableName}") diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala index c9bce76be5fea..241060ca843c3 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala @@ -387,14 +387,9 @@ class SessionCatalog( } /** - * Return a [[LogicalPlan]] that represents the given temporary view. + * Return a temporary view exactly as it was stored. */ - def lookupTempView(name: String, alias: Option[String] = None): Option[LogicalPlan] = { - val viewName = formatTableName(name) - tempViews.get(viewName).map { viewDef => - SubqueryAlias(alias.getOrElse(viewName), viewDef, Some(TableIdentifier(viewName))) - } - } + def lookupTempView(name: String): Option[LogicalPlan] = tempViews.get(formatTableName(name)) /** * Rename a temporary view, and returns true if it succeeds, false otherwise. @@ -423,9 +418,32 @@ class SessionCatalog( tempViews.update(viewName, viewDefinition) } - // ------------------------------------------------------------------------- - // | Methods that interact with temporary views and metastore tables/views | - // ------------------------------------------------------------------------- + // ------------------------------------------------------------------------------ + // | Methods that interact with both temporary views and metastore tables/views | + // ------------------------------------------------------------------------------ + + /** + * Return a [[LogicalPlan]] that represents the given table/view. + * + * If a database is specified in `name`, this will return the table/view from that database. + * If no database is specified, this will first attempt to return a temporary view with + * the same name, then, if that does not exist, return the table/view from the current database. + * + * If the relation is a view, the relation will be wrapped in a [[SubqueryAlias]] which will + * track the name of the view. + */ + def lookupTempViewOrRelation(name: TableIdentifier, alias: Option[String] = None): LogicalPlan = { + if (name.database.isDefined) { + lookupRelation(name, alias) + } else { + val maybeTempView = lookupTempView(name.table) + if (maybeTempView.isDefined) { + SubqueryAlias(alias.getOrElse(name.table), maybeTempView.get, Some(name)) + } else { + lookupRelation(name, alias) + } + } + } /** * List all tables/views in the specified database, including temporary views. diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala index a1026f481bd61..d82854f191864 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala @@ -358,7 +358,7 @@ class SessionCatalogSuite extends SparkFunSuite { } } - test("lookup table relation") { + test("lookupTempViewOrRelation") { val externalCatalog = newBasicCatalog() val sessionCatalog = new SessionCatalog(externalCatalog) val tempTable1 = Range(1, 10, 1, 10) @@ -366,14 +366,14 @@ class SessionCatalogSuite extends SparkFunSuite { sessionCatalog.createTempView("tbl1", tempTable1, overrideIfExists = false) sessionCatalog.setCurrentDatabase("db2") // If we explicitly specify the database, we'll look up the relation in that database - assert(sessionCatalog.lookupRelation(TableIdentifier("tbl1", Some("db2"))) + assert(sessionCatalog.lookupTempViewOrRelation(TableIdentifier("tbl1", Some("db2"))) == SubqueryAlias("tbl1", SimpleCatalogRelation("db2", metastoreTable1), None)) // Otherwise, we'll first look up a temporary table with the same name - assert(sessionCatalog.lookupRelation(TableIdentifier("tbl1")) + assert(sessionCatalog.lookupTempViewOrRelation(TableIdentifier("tbl1")) == SubqueryAlias("tbl1", tempTable1, Some(TableIdentifier("tbl1")))) // Then, if that does not exist, look up the relation in the current database sessionCatalog.dropTempView("tbl1") - assert(sessionCatalog.lookupRelation(TableIdentifier("tbl1")) + assert(sessionCatalog.lookupTempViewOrRelation(TableIdentifier("tbl1")) == SubqueryAlias("tbl1", SimpleCatalogRelation("db2", metastoreTable1), None)) } @@ -395,7 +395,7 @@ class SessionCatalogSuite extends SparkFunSuite { val catalog = new SessionCatalog(newBasicCatalog()) val tmpView = Range(1, 10, 2, 10) catalog.createTempView("vw1", tmpView, overrideIfExists = false) - val plan = catalog.lookupRelation(TableIdentifier("vw1"), Option("range")) + val plan = catalog.lookupTempViewOrRelation(TableIdentifier("vw1"), Option("range")) assert(plan == SubqueryAlias("range", tmpView, Option(TableIdentifier("vw1")))) } diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala index 8a7f38091f850..24c392a85d0cd 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala @@ -556,20 +556,7 @@ class SparkSession private( } private[sql] def table(tableIdent: TableIdentifier): DataFrame = { - val plan = { - if (tableIdent.database.isDefined) { - sessionState.catalog.lookupRelation(tableIdent) - } else { - val maybeTempView = sessionState.catalog.lookupTempView(tableIdent.table) - if (maybeTempView.isDefined) { - maybeTempView.get - } else { - sessionState.catalog.lookupRelation(tableIdent) - } - } - } - - Dataset.ofRows(self, plan) + Dataset.ofRows(self, sessionState.catalog.lookupTempViewOrRelation(tableIdent)) } /* ----------------- *