From 7be6862024c5d49e1954cb6764b87d2d6975c0d0 Mon Sep 17 00:00:00 2001 From: Angerszhuuuu Date: Mon, 29 Nov 2021 14:13:49 +0800 Subject: [PATCH 1/8] [SPARK-37291][PYSPARK] PySpark create SparkSession should pass initialSessionOptions --- .../scala/org/apache/spark/api/python/PythonUtils.scala | 4 ++++ python/pyspark/sql/session.py | 7 ++----- python/pyspark/sql/tests/test_session.py | 8 ++++---- .../main/scala/org/apache/spark/sql/SparkSession.scala | 4 ++-- .../sql/execution/ui/SQLAppStatusListenerSuite.scala | 2 +- .../org/apache/spark/sql/internal/SQLConfSuite.scala | 4 ++-- .../scala/org/apache/spark/sql/test/TestSQLContext.scala | 3 ++- .../scala/org/apache/spark/sql/hive/test/TestHive.scala | 2 +- 8 files changed, 18 insertions(+), 16 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/api/python/PythonUtils.scala b/core/src/main/scala/org/apache/spark/api/python/PythonUtils.scala index 549edc446415b..dc982ca534747 100644 --- a/core/src/main/scala/org/apache/spark/api/python/PythonUtils.scala +++ b/core/src/main/scala/org/apache/spark/api/python/PythonUtils.scala @@ -78,6 +78,10 @@ private[spark] object PythonUtils { jm.asScala.toMap } + def toScalaMapString(jm: java.util.Map[String, String]): Map[String, String] = { + jm.asScala.toMap + } + def isEncryptionEnabled(sc: JavaSparkContext): Boolean = { sc.conf.get(org.apache.spark.internal.config.IO_ENCRYPTION_ENABLED) } diff --git a/python/pyspark/sql/session.py b/python/pyspark/sql/session.py index 927554198743e..3d0aa0b1d28e8 100644 --- a/python/pyspark/sql/session.py +++ b/python/pyspark/sql/session.py @@ -291,7 +291,7 @@ def __init__( self, sparkContext: SparkContext, jsparkSession: Optional[JavaObject] = None, - options: Optional[Dict[str, Any]] = None, + options: Optional[Dict[str, Any]] = {}, ): from pyspark.sql.context import SQLContext @@ -305,10 +305,7 @@ def __init__( ): jsparkSession = self._jvm.SparkSession.getDefaultSession().get() else: - jsparkSession = self._jvm.SparkSession(self._jsc.sc()) - if options is not None: - for key, value in options.items(): - jsparkSession.sharedState().conf().set(key, value) + jsparkSession = self._jvm.SparkSession(self._jsc.sc(), self._jvm.PythonUtils.toScalaMap(options)) self._jsparkSession = jsparkSession self._jwrapped = self._jsparkSession.sqlContext() self._wrapped = SQLContext(self._sc, self, self._jwrapped) diff --git a/python/pyspark/sql/tests/test_session.py b/python/pyspark/sql/tests/test_session.py index eb23b68ccf498..22e1a5ed85daf 100644 --- a/python/pyspark/sql/tests/test_session.py +++ b/python/pyspark/sql/tests/test_session.py @@ -61,7 +61,7 @@ def test_set_jvm_default_session(self): def test_jvm_default_session_already_set(self): # Here, we assume there is the default session already set in JVM. - jsession = self.sc._jvm.SparkSession(self.sc._jsc.sc()) + jsession = self.sc._jvm.SparkSession(self.sc._jsc.sc(), self.sc._jvm.PythonUtils.toScalaMap({})) self.sc._jvm.SparkSession.setDefaultSession(jsession) spark = SparkSession.builder.getOrCreate() @@ -289,18 +289,18 @@ def test_another_spark_session(self): if session2 is not None: session2.stop() - def test_create_spark_context_first_and_copy_options_to_sharedState(self): + def test_create_spark_context_with_initial_session_options(self): sc = None session = None try: conf = SparkConf().set("key1", "value1") sc = SparkContext("local[4]", "SessionBuilderTests", conf=conf) session = ( - SparkSession.builder.config("key2", "value2").enableHiveSupport().getOrCreate() + SparkSession.builder.config("spark.sql.codegen.comments", "true").enableHiveSupport().getOrCreate() ) self.assertEqual(session._jsparkSession.sharedState().conf().get("key1"), "value1") - self.assertEqual(session._jsparkSession.sharedState().conf().get("key2"), "value2") + self.assertEqual(session._jsparkSession.sharedState().conf().get("spark.sql.codegen.comments"), "true") self.assertEqual( session._jsparkSession.sharedState().conf().get("spark.sql.catalogImplementation"), "hive", diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala index 63812b873ba8e..d7c25292662bc 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala @@ -97,11 +97,11 @@ class SparkSession private( * since that would cause every new session to reinvoke Spark Session Extensions on the currently * running extensions. */ - private[sql] def this(sc: SparkContext) = { + private[sql] def this(sc: SparkContext, initialSessionOptions: Map[String, String]) = { this(sc, None, None, SparkSession.applyExtensions( sc.getConf.get(StaticSQLConf.SPARK_SESSION_EXTENSIONS).getOrElse(Seq.empty), - new SparkSessionExtensions), Map.empty) + new SparkSessionExtensions), initialSessionOptions) } private[sql] val sessionUUID: String = UUID.randomUUID.toString diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/ui/SQLAppStatusListenerSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/ui/SQLAppStatusListenerSuite.scala index 61230641ded81..490432e041a92 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/ui/SQLAppStatusListenerSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/ui/SQLAppStatusListenerSuite.scala @@ -948,7 +948,7 @@ class SQLAppStatusListenerMemoryLeakSuite extends SparkFunSuite { .set(ASYNC_TRACKING_ENABLED, false) withSpark(new SparkContext(conf)) { sc => quietly { - val spark = new SparkSession(sc) + val spark = new SparkSession(sc, Map.empty) import spark.implicits._ // Run 100 successful executions and 100 failed executions. // Each execution only has one job and one stage. diff --git a/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala index 13f5778617b5c..3942d2dbefb2e 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala @@ -283,7 +283,7 @@ class SQLConfSuite extends QueryTest with SharedSparkSession { try { sparkContext.conf.set("spark.to.be.or.not.to.be", "my love") sparkContext.conf.set("spark.sql.with.or.without.you", "my love") - val spark = new SparkSession(sparkContext) + val spark = new SparkSession(sparkContext, Map.empty) assert(spark.conf.get("spark.to.be.or.not.to.be") == "my love") assert(spark.conf.get("spark.sql.with.or.without.you") == "my love") } finally { @@ -303,7 +303,7 @@ class SQLConfSuite extends QueryTest with SharedSparkSession { val previousValue = sparkContext.conf.get(GLOBAL_TEMP_DATABASE) try { sparkContext.conf.set(GLOBAL_TEMP_DATABASE, "a") - val newSession = new SparkSession(sparkContext) + val newSession = new SparkSession(sparkContext, Map.empty) assert(newSession.conf.get(GLOBAL_TEMP_DATABASE) == "a") checkAnswer( newSession.sql(s"SET ${GLOBAL_TEMP_DATABASE.key}"), diff --git a/sql/core/src/test/scala/org/apache/spark/sql/test/TestSQLContext.scala b/sql/core/src/test/scala/org/apache/spark/sql/test/TestSQLContext.scala index 47a6f3617da63..de93c4dee03d7 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/test/TestSQLContext.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/test/TestSQLContext.scala @@ -24,7 +24,8 @@ import org.apache.spark.sql.internal.{SessionState, SessionStateBuilder, SQLConf /** * A special `SparkSession` prepared for testing. */ -private[spark] class TestSparkSession(sc: SparkContext) extends SparkSession(sc) { self => +private[spark] class TestSparkSession(sc: SparkContext) + extends SparkSession(sc, Map.empty) { self => def this(sparkConf: SparkConf) = { this(new SparkContext("local[2]", "test-sql-context", sparkConf.set("spark.sql.testkey", "true"))) diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/test/TestHive.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/test/TestHive.scala index 3769de07d8a37..0a87956e96f19 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/test/TestHive.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/test/TestHive.scala @@ -175,7 +175,7 @@ private[hive] class TestHiveSparkSession( @transient private val existingSharedState: Option[TestHiveSharedState], @transient private val parentSessionState: Option[SessionState], private val loadTestTables: Boolean) - extends SparkSession(sc) with Logging { self => + extends SparkSession(sc, Map.empty) with Logging { self => def this(sc: SparkContext, loadTestTables: Boolean) = { this( From d374536c60039523df7ef81749137d85d58b9e54 Mon Sep 17 00:00:00 2001 From: Angerszhuuuu Date: Mon, 29 Nov 2021 14:21:50 +0800 Subject: [PATCH 2/8] Update PythonUtils.scala --- .../main/scala/org/apache/spark/api/python/PythonUtils.scala | 4 ---- 1 file changed, 4 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/api/python/PythonUtils.scala b/core/src/main/scala/org/apache/spark/api/python/PythonUtils.scala index dc982ca534747..549edc446415b 100644 --- a/core/src/main/scala/org/apache/spark/api/python/PythonUtils.scala +++ b/core/src/main/scala/org/apache/spark/api/python/PythonUtils.scala @@ -78,10 +78,6 @@ private[spark] object PythonUtils { jm.asScala.toMap } - def toScalaMapString(jm: java.util.Map[String, String]): Map[String, String] = { - jm.asScala.toMap - } - def isEncryptionEnabled(sc: JavaSparkContext): Boolean = { sc.conf.get(org.apache.spark.internal.config.IO_ENCRYPTION_ENABLED) } From 65a24b14433ae6ef8abb27f4c457a8b6d36df6a5 Mon Sep 17 00:00:00 2001 From: Angerszhuuuu Date: Mon, 29 Nov 2021 14:23:13 +0800 Subject: [PATCH 3/8] follow comment --- .../spark/sql/execution/ui/SQLAppStatusListenerSuite.scala | 2 +- .../test/scala/org/apache/spark/sql/test/TestSQLContext.scala | 2 +- .../test/scala/org/apache/spark/sql/hive/test/TestHive.scala | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/ui/SQLAppStatusListenerSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/ui/SQLAppStatusListenerSuite.scala index 490432e041a92..61230641ded81 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/ui/SQLAppStatusListenerSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/ui/SQLAppStatusListenerSuite.scala @@ -948,7 +948,7 @@ class SQLAppStatusListenerMemoryLeakSuite extends SparkFunSuite { .set(ASYNC_TRACKING_ENABLED, false) withSpark(new SparkContext(conf)) { sc => quietly { - val spark = new SparkSession(sc, Map.empty) + val spark = new SparkSession(sc) import spark.implicits._ // Run 100 successful executions and 100 failed executions. // Each execution only has one job and one stage. diff --git a/sql/core/src/test/scala/org/apache/spark/sql/test/TestSQLContext.scala b/sql/core/src/test/scala/org/apache/spark/sql/test/TestSQLContext.scala index de93c4dee03d7..8817c758e14aa 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/test/TestSQLContext.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/test/TestSQLContext.scala @@ -25,7 +25,7 @@ import org.apache.spark.sql.internal.{SessionState, SessionStateBuilder, SQLConf * A special `SparkSession` prepared for testing. */ private[spark] class TestSparkSession(sc: SparkContext) - extends SparkSession(sc, Map.empty) { self => + extends SparkSession(sc) { self => def this(sparkConf: SparkConf) = { this(new SparkContext("local[2]", "test-sql-context", sparkConf.set("spark.sql.testkey", "true"))) diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/test/TestHive.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/test/TestHive.scala index 0a87956e96f19..3769de07d8a37 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/test/TestHive.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/test/TestHive.scala @@ -175,7 +175,7 @@ private[hive] class TestHiveSparkSession( @transient private val existingSharedState: Option[TestHiveSharedState], @transient private val parentSessionState: Option[SessionState], private val loadTestTables: Boolean) - extends SparkSession(sc, Map.empty) with Logging { self => + extends SparkSession(sc) with Logging { self => def this(sc: SparkContext, loadTestTables: Boolean) = { this( From 530f719378ac11af5c88563d7463a57580b618e5 Mon Sep 17 00:00:00 2001 From: Angerszhuuuu Date: Mon, 29 Nov 2021 14:23:16 +0800 Subject: [PATCH 4/8] Update SparkSession.scala --- sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala | 2 ++ 1 file changed, 2 insertions(+) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala index d7c25292662bc..bd924e79ca258 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala @@ -91,6 +91,8 @@ class SparkSession private( // The call site where this SparkSession was constructed. private val creationSite: CallSite = Utils.getCallSite() + private[sql] def this(sc: SparkContext) = this(sc, Map.empty) + /** * Constructor used in Pyspark. Contains explicit application of Spark Session Extensions * which otherwise only occurs during getOrCreate. We cannot add this to the default constructor From f6df6a8d4efa1f22dd280396e11b93ddaedd3e68 Mon Sep 17 00:00:00 2001 From: Angerszhuuuu Date: Mon, 29 Nov 2021 14:24:44 +0800 Subject: [PATCH 5/8] update --- .../scala/org/apache/spark/sql/internal/SQLConfSuite.scala | 4 ++-- .../test/scala/org/apache/spark/sql/test/TestSQLContext.scala | 3 +-- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala index 3942d2dbefb2e..13f5778617b5c 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala @@ -283,7 +283,7 @@ class SQLConfSuite extends QueryTest with SharedSparkSession { try { sparkContext.conf.set("spark.to.be.or.not.to.be", "my love") sparkContext.conf.set("spark.sql.with.or.without.you", "my love") - val spark = new SparkSession(sparkContext, Map.empty) + val spark = new SparkSession(sparkContext) assert(spark.conf.get("spark.to.be.or.not.to.be") == "my love") assert(spark.conf.get("spark.sql.with.or.without.you") == "my love") } finally { @@ -303,7 +303,7 @@ class SQLConfSuite extends QueryTest with SharedSparkSession { val previousValue = sparkContext.conf.get(GLOBAL_TEMP_DATABASE) try { sparkContext.conf.set(GLOBAL_TEMP_DATABASE, "a") - val newSession = new SparkSession(sparkContext, Map.empty) + val newSession = new SparkSession(sparkContext) assert(newSession.conf.get(GLOBAL_TEMP_DATABASE) == "a") checkAnswer( newSession.sql(s"SET ${GLOBAL_TEMP_DATABASE.key}"), diff --git a/sql/core/src/test/scala/org/apache/spark/sql/test/TestSQLContext.scala b/sql/core/src/test/scala/org/apache/spark/sql/test/TestSQLContext.scala index 8817c758e14aa..47a6f3617da63 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/test/TestSQLContext.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/test/TestSQLContext.scala @@ -24,8 +24,7 @@ import org.apache.spark.sql.internal.{SessionState, SessionStateBuilder, SQLConf /** * A special `SparkSession` prepared for testing. */ -private[spark] class TestSparkSession(sc: SparkContext) - extends SparkSession(sc) { self => +private[spark] class TestSparkSession(sc: SparkContext) extends SparkSession(sc) { self => def this(sparkConf: SparkConf) = { this(new SparkContext("local[2]", "test-sql-context", sparkConf.set("spark.sql.testkey", "true"))) From b33d254c8b7cd892e350058645fb8a4d5365cc23 Mon Sep 17 00:00:00 2001 From: Angerszhuuuu Date: Mon, 29 Nov 2021 15:07:52 +0800 Subject: [PATCH 6/8] update --- python/pyspark/sql/tests/test_session.py | 2 +- .../src/main/scala/org/apache/spark/sql/SparkSession.scala | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/python/pyspark/sql/tests/test_session.py b/python/pyspark/sql/tests/test_session.py index 22e1a5ed85daf..bc986933fa23b 100644 --- a/python/pyspark/sql/tests/test_session.py +++ b/python/pyspark/sql/tests/test_session.py @@ -61,7 +61,7 @@ def test_set_jvm_default_session(self): def test_jvm_default_session_already_set(self): # Here, we assume there is the default session already set in JVM. - jsession = self.sc._jvm.SparkSession(self.sc._jsc.sc(), self.sc._jvm.PythonUtils.toScalaMap({})) + jsession = self.sc._jvm.SparkSession(self.sc._jsc.sc()) self.sc._jvm.SparkSession.setDefaultSession(jsession) spark = SparkSession.builder.getOrCreate() diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala index bd924e79ca258..d8962a862df79 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala @@ -91,8 +91,6 @@ class SparkSession private( // The call site where this SparkSession was constructed. private val creationSite: CallSite = Utils.getCallSite() - private[sql] def this(sc: SparkContext) = this(sc, Map.empty) - /** * Constructor used in Pyspark. Contains explicit application of Spark Session Extensions * which otherwise only occurs during getOrCreate. We cannot add this to the default constructor @@ -106,6 +104,8 @@ class SparkSession private( new SparkSessionExtensions), initialSessionOptions) } + private[sql] def this(sc: SparkContext) = this(sc, Map.empty) + private[sql] val sessionUUID: String = UUID.randomUUID.toString sparkContext.assertNotStopped() From 4598e8b52e361ef10e0ec3057041b10feb01f8ce Mon Sep 17 00:00:00 2001 From: Angerszhuuuu Date: Mon, 29 Nov 2021 15:27:37 +0800 Subject: [PATCH 7/8] update --- python/pyspark/sql/session.py | 4 +++- python/pyspark/sql/tests/test_session.py | 9 +++++++-- 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/python/pyspark/sql/session.py b/python/pyspark/sql/session.py index 3d0aa0b1d28e8..e33c720833e4a 100644 --- a/python/pyspark/sql/session.py +++ b/python/pyspark/sql/session.py @@ -305,7 +305,9 @@ def __init__( ): jsparkSession = self._jvm.SparkSession.getDefaultSession().get() else: - jsparkSession = self._jvm.SparkSession(self._jsc.sc(), self._jvm.PythonUtils.toScalaMap(options)) + jsparkSession = self._jvm.SparkSession( + self._jsc.sc(), self._jvm.PythonUtils.toScalaMap(options) + ) self._jsparkSession = jsparkSession self._jwrapped = self._jsparkSession.sqlContext() self._wrapped = SQLContext(self._sc, self, self._jwrapped) diff --git a/python/pyspark/sql/tests/test_session.py b/python/pyspark/sql/tests/test_session.py index bc986933fa23b..06771fac896ba 100644 --- a/python/pyspark/sql/tests/test_session.py +++ b/python/pyspark/sql/tests/test_session.py @@ -296,11 +296,16 @@ def test_create_spark_context_with_initial_session_options(self): conf = SparkConf().set("key1", "value1") sc = SparkContext("local[4]", "SessionBuilderTests", conf=conf) session = ( - SparkSession.builder.config("spark.sql.codegen.comments", "true").enableHiveSupport().getOrCreate() + SparkSession.builder.config("spark.sql.codegen.comments", "true") + .enableHiveSupport() + .getOrCreate() ) self.assertEqual(session._jsparkSession.sharedState().conf().get("key1"), "value1") - self.assertEqual(session._jsparkSession.sharedState().conf().get("spark.sql.codegen.comments"), "true") + self.assertEqual( + session._jsparkSession.sharedState().conf().get("spark.sql.codegen.comments"), + "true", + ) self.assertEqual( session._jsparkSession.sharedState().conf().get("spark.sql.catalogImplementation"), "hive", From 21ab18f8278fdab3a6ff5b7328aea6fa10083593 Mon Sep 17 00:00:00 2001 From: Angerszhuuuu Date: Tue, 30 Nov 2021 11:05:37 +0800 Subject: [PATCH 8/8] update --- python/pyspark/sql/session.py | 4 +--- .../main/scala/org/apache/spark/sql/SparkSession.scala | 8 +++++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/python/pyspark/sql/session.py b/python/pyspark/sql/session.py index e33c720833e4a..f94b9c2115044 100644 --- a/python/pyspark/sql/session.py +++ b/python/pyspark/sql/session.py @@ -305,9 +305,7 @@ def __init__( ): jsparkSession = self._jvm.SparkSession.getDefaultSession().get() else: - jsparkSession = self._jvm.SparkSession( - self._jsc.sc(), self._jvm.PythonUtils.toScalaMap(options) - ) + jsparkSession = self._jvm.SparkSession(self._jsc.sc(), options) self._jsparkSession = jsparkSession self._jwrapped = self._jsparkSession.sqlContext() self._wrapped = SQLContext(self._sc, self, self._jwrapped) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala index d8962a862df79..df110aa269e7b 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/SparkSession.scala @@ -97,14 +97,16 @@ class SparkSession private( * since that would cause every new session to reinvoke Spark Session Extensions on the currently * running extensions. */ - private[sql] def this(sc: SparkContext, initialSessionOptions: Map[String, String]) = { + private[sql] def this( + sc: SparkContext, + initialSessionOptions: java.util.HashMap[String, String]) = { this(sc, None, None, SparkSession.applyExtensions( sc.getConf.get(StaticSQLConf.SPARK_SESSION_EXTENSIONS).getOrElse(Seq.empty), - new SparkSessionExtensions), initialSessionOptions) + new SparkSessionExtensions), initialSessionOptions.asScala.toMap) } - private[sql] def this(sc: SparkContext) = this(sc, Map.empty) + private[sql] def this(sc: SparkContext) = this(sc, new java.util.HashMap[String, String]()) private[sql] val sessionUUID: String = UUID.randomUUID.toString