Skip to content
Closed
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -19,34 +19,45 @@ package org.apache.spark.sql.sources

import java.io.{File, IOException}

import org.scalatest.BeforeAndAfter
import org.scalatest.BeforeAndAfterEach

import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.execution.datasources.DDLException
import org.apache.spark.sql.test.SharedSQLContext
import org.apache.spark.util.Utils

class CreateTableAsSelectSuite extends DataSourceTest with SharedSQLContext with BeforeAndAfter {
class CreateTableAsSelectSuite
extends DataSourceTest
with SharedSQLContext
with BeforeAndAfterEach {

protected override lazy val sql = caseInsensitiveContext.sql _
private var path: File = null

override def beforeAll(): Unit = {
super.beforeAll()
path = Utils.createTempDir()
val rdd = sparkContext.parallelize((1 to 10).map(i => s"""{"a":$i, "b":"str${i}"}"""))
caseInsensitiveContext.read.json(rdd).registerTempTable("jt")
}

override def afterAll(): Unit = {
try {
caseInsensitiveContext.dropTempTable("jt")
Utils.deleteRecursively(path)
} finally {
super.afterAll()
}
}

after {
override def beforeEach(): Unit = {
super.beforeEach()
path = Utils.createTempDir()
path.delete()
}

override def afterEach(): Unit = {
Utils.deleteRecursively(path)
super.afterEach()
}

test("CREATE TEMPORARY TABLE AS SELECT") {
Expand Down