From 3dcb66827e8189b2da4db506ab72ee184c7bde50 Mon Sep 17 00:00:00 2001 From: Hyukjin Kwon Date: Mon, 10 Jul 2023 13:23:06 +0900 Subject: [PATCH 1/5] followup --- .../org/apache/spark/api/python/PythonWorkerFactory.scala | 4 ++-- python/pyspark/sql/tests/connect/client/test_artifact.py | 4 ++++ 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/core/src/main/scala/org/apache/spark/api/python/PythonWorkerFactory.scala b/core/src/main/scala/org/apache/spark/api/python/PythonWorkerFactory.scala index d6dcd906d92fb..1f5c079f99941 100644 --- a/core/src/main/scala/org/apache/spark/api/python/PythonWorkerFactory.scala +++ b/core/src/main/scala/org/apache/spark/api/python/PythonWorkerFactory.scala @@ -157,7 +157,7 @@ private[spark] class PythonWorkerFactory(pythonExec: String, envVars: Map[String // Create and start the worker val pb = new ProcessBuilder(Arrays.asList(pythonExec, "-m", workerModule)) - val sessionId = envVars.getOrElse("SPARK_CONNECT_SESSION_UUID", "deafult") + val sessionId = envVars.getOrElse("SPARK_CONNECT_SESSION_UUID", "default") if (sessionId != "default") { pb.directory(new File(SparkFiles.getRootDirectory(), sessionId)) } @@ -214,7 +214,7 @@ private[spark] class PythonWorkerFactory(pythonExec: String, envVars: Map[String // Create and start the daemon val command = Arrays.asList(pythonExec, "-m", daemonModule) val pb = new ProcessBuilder(command) - val sessionId = envVars.getOrElse("SPARK_CONNECT_SESSION_UUID", "deafult") + val sessionId = envVars.getOrElse("SPARK_CONNECT_SESSION_UUID", "default") if (sessionId != "default") { pb.directory(new File(SparkFiles.getRootDirectory(), sessionId)) } diff --git a/python/pyspark/sql/tests/connect/client/test_artifact.py b/python/pyspark/sql/tests/connect/client/test_artifact.py index cbd00acf82902..58768d4009922 100644 --- a/python/pyspark/sql/tests/connect/client/test_artifact.py +++ b/python/pyspark/sql/tests/connect/client/test_artifact.py @@ -245,6 +245,7 @@ def func(x): spark_session.addArtifacts(pyfile_path, pyfile=True) self.assertEqual(spark_session.range(1).select(func("id")).first()[0], 10) + @unittest.skip("SPARK-44348: Reeanble Session-based artifact test cases") def test_add_pyfile(self): self.check_add_pyfile(self.spark) @@ -272,6 +273,7 @@ def func(x): spark_session.addArtifacts(f"{package_path}.zip", pyfile=True) self.assertEqual(spark_session.range(1).select(func("id")).first()[0], 5) + @unittest.skip("SPARK-44348: Reeanble Session-based artifact test cases") def test_add_zipped_package(self): self.check_add_zipped_package(self.spark) @@ -303,6 +305,7 @@ def func(x): spark_session.addArtifacts(f"{archive_path}.zip#my_files", archive=True) self.assertEqual(spark_session.range(1).select(func("id")).first()[0], "hello world!") + @unittest.skip("SPARK-44348: Reeanble Session-based artifact test cases") def test_add_archive(self): self.check_add_archive(self.spark) @@ -328,6 +331,7 @@ def func(x): spark_session.addArtifacts(file_path, file=True) self.assertEqual(spark_session.range(1).select(func("id")).first()[0], "Hello world!!") + @unittest.skip("SPARK-44348: Reeanble Session-based artifact test cases") def test_add_file(self): self.check_add_file(self.spark) From 361f6147e8a5be3ee48553d53a5b5db6e584a9cf Mon Sep 17 00:00:00 2001 From: Kent Yao Date: Mon, 10 Jul 2023 13:48:58 +0800 Subject: [PATCH 2/5] Update python/pyspark/sql/tests/connect/client/test_artifact.py --- python/pyspark/sql/tests/connect/client/test_artifact.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/pyspark/sql/tests/connect/client/test_artifact.py b/python/pyspark/sql/tests/connect/client/test_artifact.py index 58768d4009922..5a9852f062f03 100644 --- a/python/pyspark/sql/tests/connect/client/test_artifact.py +++ b/python/pyspark/sql/tests/connect/client/test_artifact.py @@ -245,7 +245,7 @@ def func(x): spark_session.addArtifacts(pyfile_path, pyfile=True) self.assertEqual(spark_session.range(1).select(func("id")).first()[0], 10) - @unittest.skip("SPARK-44348: Reeanble Session-based artifact test cases") + @unittest.skip("SPARK-44348: Reenable Session-based artifact test cases") def test_add_pyfile(self): self.check_add_pyfile(self.spark) From 42a19eac72a6a05b3a8f04d0aad0113f36f89753 Mon Sep 17 00:00:00 2001 From: Kent Yao Date: Mon, 10 Jul 2023 13:49:10 +0800 Subject: [PATCH 3/5] Update python/pyspark/sql/tests/connect/client/test_artifact.py --- python/pyspark/sql/tests/connect/client/test_artifact.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/pyspark/sql/tests/connect/client/test_artifact.py b/python/pyspark/sql/tests/connect/client/test_artifact.py index 5a9852f062f03..e3bbca78e51ca 100644 --- a/python/pyspark/sql/tests/connect/client/test_artifact.py +++ b/python/pyspark/sql/tests/connect/client/test_artifact.py @@ -273,7 +273,7 @@ def func(x): spark_session.addArtifacts(f"{package_path}.zip", pyfile=True) self.assertEqual(spark_session.range(1).select(func("id")).first()[0], 5) - @unittest.skip("SPARK-44348: Reeanble Session-based artifact test cases") + @unittest.skip("SPARK-44348: Reenable Session-based artifact test cases") def test_add_zipped_package(self): self.check_add_zipped_package(self.spark) From 0bd40d420218900f4df248ec913ec6eadb8867ed Mon Sep 17 00:00:00 2001 From: Kent Yao Date: Mon, 10 Jul 2023 13:49:18 +0800 Subject: [PATCH 4/5] Update python/pyspark/sql/tests/connect/client/test_artifact.py --- python/pyspark/sql/tests/connect/client/test_artifact.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/pyspark/sql/tests/connect/client/test_artifact.py b/python/pyspark/sql/tests/connect/client/test_artifact.py index e3bbca78e51ca..6ae1b6dfbe598 100644 --- a/python/pyspark/sql/tests/connect/client/test_artifact.py +++ b/python/pyspark/sql/tests/connect/client/test_artifact.py @@ -305,7 +305,7 @@ def func(x): spark_session.addArtifacts(f"{archive_path}.zip#my_files", archive=True) self.assertEqual(spark_session.range(1).select(func("id")).first()[0], "hello world!") - @unittest.skip("SPARK-44348: Reeanble Session-based artifact test cases") + @unittest.skip("SPARK-44348: Reenable Session-based artifact test cases") def test_add_archive(self): self.check_add_archive(self.spark) From 2ed8ea943cdb536a51813058542c818f9852beee Mon Sep 17 00:00:00 2001 From: Kent Yao Date: Mon, 10 Jul 2023 13:49:24 +0800 Subject: [PATCH 5/5] Update python/pyspark/sql/tests/connect/client/test_artifact.py --- python/pyspark/sql/tests/connect/client/test_artifact.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python/pyspark/sql/tests/connect/client/test_artifact.py b/python/pyspark/sql/tests/connect/client/test_artifact.py index 6ae1b6dfbe598..c685000b5ea49 100644 --- a/python/pyspark/sql/tests/connect/client/test_artifact.py +++ b/python/pyspark/sql/tests/connect/client/test_artifact.py @@ -331,7 +331,7 @@ def func(x): spark_session.addArtifacts(file_path, file=True) self.assertEqual(spark_session.range(1).select(func("id")).first()[0], "Hello world!!") - @unittest.skip("SPARK-44348: Reeanble Session-based artifact test cases") + @unittest.skip("SPARK-44348: Reenable Session-based artifact test cases") def test_add_file(self): self.check_add_file(self.spark)