Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -508,8 +508,8 @@ case class TruncateTableCommand(
var optPermission: Option[FsPermission] = None
var optAcls: Option[java.util.List[AclEntry]] = None
if (!ignorePermissionAcl) {
val fileStatus = fs.getFileStatus(path)
try {
val fileStatus = fs.getFileStatus(path)
optPermission = Some(fileStatus.getPermission())
} catch {
case NonFatal(_) => // do nothing
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2084,6 +2084,27 @@ abstract class DDLSuite extends QueryTest with SQLTestUtils {
}
}

test("SPARK-31163: acl/permission should handle non-existed path when truncating table") {
withSQLConf(SQLConf.TRUNCATE_TABLE_IGNORE_PERMISSION_ACL.key -> "false") {
withTable("tab1") {
sql("CREATE TABLE tab1 (col1 STRING, col2 INT) USING parquet PARTITIONED BY (col2)")
sql("INSERT INTO tab1 SELECT 'one', 1")
checkAnswer(spark.table("tab1"), Row("one", 1))
val part = spark.sessionState.catalog.listPartitions(TableIdentifier("tab1")).head
val path = new File(part.location.getPath)
sql("TRUNCATE TABLE tab1")
// simulate incomplete/unsuccessful truncate
assert(path.exists())
path.delete()
assert(!path.exists())
// execute without java.io.FileNotFoundException
sql("TRUNCATE TABLE tab1")
// partition path should be re-created
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this only affects partition path, or table path as well?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Both, but can be more likely to happen on partition path since there's much more than table path.

assert(path.exists())
}
}
}

test("create temporary view with mismatched schema") {
withTable("tab1") {
spark.range(10).write.saveAsTable("tab1")
Expand Down