diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/FileBatchWrite.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/FileBatchWrite.scala index 7b836111a59ba..db31927fa73bb 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/FileBatchWrite.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/FileBatchWrite.scala @@ -46,8 +46,7 @@ class FileBatchWrite( } override def createBatchWriterFactory(): DataWriterFactory = { - val conf = new SerializableConfiguration(job.getConfiguration) - FileWriterFactory(description, committer, conf) + FileWriterFactory(description, committer) } } diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/FileWriterFactory.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/FileWriterFactory.scala index be9c180a901bb..eb573b317142a 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/FileWriterFactory.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/FileWriterFactory.scala @@ -29,8 +29,7 @@ import org.apache.spark.util.SerializableConfiguration case class FileWriterFactory ( description: WriteJobDescription, - committer: FileCommitProtocol, - conf: SerializableConfiguration) extends DataWriterFactory { + committer: FileCommitProtocol) extends DataWriterFactory { override def createWriter(partitionId: Int, realTaskId: Long): DataWriter[InternalRow] = { val taskAttemptContext = createTaskAttemptContext(partitionId) committer.setupTask(taskAttemptContext) @@ -46,7 +45,7 @@ case class FileWriterFactory ( val taskId = new TaskID(jobId, TaskType.MAP, partitionId) val taskAttemptId = new TaskAttemptID(taskId, 0) // Set up the configuration object - val hadoopConf = conf.value + val hadoopConf = description.serializableHadoopConf.value hadoopConf.set("mapreduce.job.id", jobId.toString) hadoopConf.set("mapreduce.task.id", taskId.toString) hadoopConf.set("mapreduce.task.attempt.id", taskAttemptId.toString) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/orc/OrcPartitionDiscoverySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/orc/OrcPartitionDiscoverySuite.scala index 4a695ac74c476..bc5a30e97faef 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/orc/OrcPartitionDiscoverySuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/orc/OrcPartitionDiscoverySuite.scala @@ -232,5 +232,8 @@ class OrcPartitionDiscoverySuite extends OrcPartitionDiscoveryTest with SharedSQ class OrcV1PartitionDiscoverySuite extends OrcPartitionDiscoveryTest with SharedSQLContext { override protected def sparkConf: SparkConf = - super.sparkConf.set(SQLConf.USE_V1_SOURCE_READER_LIST, "orc") + super + .sparkConf + .set(SQLConf.USE_V1_SOURCE_READER_LIST, "orc") + .set(SQLConf.USE_V1_SOURCE_WRITER_LIST, "orc") } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/orc/OrcQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/orc/OrcQuerySuite.scala index c0d26011e7917..9bf1227666877 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/orc/OrcQuerySuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/orc/OrcQuerySuite.scala @@ -690,5 +690,8 @@ class OrcQuerySuite extends OrcQueryTest with SharedSQLContext { class OrcV1QuerySuite extends OrcQuerySuite { override protected def sparkConf: SparkConf = - super.sparkConf.set(SQLConf.USE_V1_SOURCE_READER_LIST, "orc") + super + .sparkConf + .set(SQLConf.USE_V1_SOURCE_READER_LIST, "orc") + .set(SQLConf.USE_V1_SOURCE_WRITER_LIST, "orc") } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/orc/OrcV1FilterSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/orc/OrcV1FilterSuite.scala index cf5bbb3fff706..5a1bf9b437569 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/orc/OrcV1FilterSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/orc/OrcV1FilterSuite.scala @@ -31,7 +31,10 @@ import org.apache.spark.sql.internal.SQLConf class OrcV1FilterSuite extends OrcFilterSuite { override protected def sparkConf: SparkConf = - super.sparkConf.set(SQLConf.USE_V1_SOURCE_READER_LIST, "orc") + super + .sparkConf + .set(SQLConf.USE_V1_SOURCE_READER_LIST, "orc") + .set(SQLConf.USE_V1_SOURCE_WRITER_LIST, "orc") override def checkFilterPredicate( df: DataFrame,