diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/CheckpointFileManager.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/CheckpointFileManager.scala index 6df0a2f30638..ad3212871fc9 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/CheckpointFileManager.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/CheckpointFileManager.scala @@ -370,20 +370,6 @@ class FileContextBasedCheckpointFileManager(path: Path, hadoopConf: Configuratio override def renameTempFile(srcPath: Path, dstPath: Path, overwriteIfPossible: Boolean): Unit = { import Options.Rename._ fc.rename(srcPath, dstPath, if (overwriteIfPossible) OVERWRITE else NONE) - // TODO: this is a workaround of HADOOP-16255 - remove this when HADOOP-16255 is resolved - mayRemoveCrcFile(srcPath) - } - - private def mayRemoveCrcFile(path: Path): Unit = { - try { - val checksumFile = new Path(path.getParent, s".${path.getName}.crc") - if (exists(checksumFile)) { - // checksum file exists, deleting it - delete(checksumFile) - } - } catch { - case NonFatal(_) => // ignore, we are removing crc file as "best-effort" - } } }