From bb17665955ad536d8c81605da9a59fb94b6e0162 Mon Sep 17 00:00:00 2001 From: Dongjoon Hyun Date: Sun, 8 Dec 2024 01:32:18 +0800 Subject: [PATCH] [SPARK-49249][SQL][FOLLOWUP] Rename `spark.sql.artifact.isolation.(always.apply.classloader -> alwaysApplyClassloader)` ### What changes were proposed in this pull request? This is a follow-up to rename a new configuration to comply Apache Spark config namespace. - #48120 ### Why are the changes needed? Currently, `spark.sql.artifact.isolation.always.apply.classloader` config name introduces redundant namespace `spark.sql.artifact.isolation.always.*` and `spark.sql.artifact.isolation.always.apply.*`. ``` - spark.sql.artifact.isolation.always.apply.classloader + spark.sql.artifact.isolation.alwaysApplyClassloader ``` Since we have `spark.sql.artifact.isolation.enabled` already, we had better keep the above in the same namespace `spark.sql.artifact.isolation.*`. ### Does this PR introduce _any_ user-facing change? No, this is a newly added configuration at Spark 4.0.0 ### How was this patch tested? Pass the CIs. ### Was this patch authored or co-authored using generative AI tooling? No. Closes #49101 from dongjoon-hyun/SPARK-49249. Authored-by: Dongjoon Hyun Signed-off-by: yangjie01 --- python/pyspark/sql/connect/session.py | 2 +- .../src/main/scala/org/apache/spark/sql/internal/SQLConf.scala | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/python/pyspark/sql/connect/session.py b/python/pyspark/sql/connect/session.py index 2d544f3f0eca7..7c40f1dd54a2b 100644 --- a/python/pyspark/sql/connect/session.py +++ b/python/pyspark/sql/connect/session.py @@ -1044,7 +1044,7 @@ def _start_connect_server(master: str, opts: Dict[str, Any]) -> None: default_conf = { "spark.plugins": "org.apache.spark.sql.connect.SparkConnectPlugin", "spark.sql.artifact.isolation.enabled": "true", - "spark.sql.artifact.isolation.always.apply.classloader": "true", + "spark.sql.artifact.isolation.alwaysApplyClassloader": "true", } if "SPARK_TESTING" in os.environ: diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala index c0d35fa0ce2b4..47b670b730ad5 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala @@ -3999,7 +3999,7 @@ object SQLConf { .createWithDefault(true) val ARTIFACTS_SESSION_ISOLATION_ALWAYS_APPLY_CLASSLOADER = - buildConf("spark.sql.artifact.isolation.always.apply.classloader") + buildConf("spark.sql.artifact.isolation.alwaysApplyClassloader") .internal() .doc("When enabled, the classloader holding per-session artifacts will always be applied " + "during SQL executions (useful for Spark Connect). When disabled, the classloader will " +