This is an automated email from the ASF dual-hosted git repository.

haejoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 547661002fb8 [SPARK-50311][PYTHON][FOLLOWUP] Remove @remote_only from 
supported APIs
547661002fb8 is described below

commit 547661002fb8f772de13b048db50dffdc28da676
Author: Haejoon Lee <[email protected]>
AuthorDate: Fri Nov 29 18:00:16 2024 +0900

    [SPARK-50311][PYTHON][FOLLOWUP] Remove @remote_only from supported APIs
    
    ### What changes were proposed in this pull request?
    
    This PR followups https://github.com/apache/spark/pull/48843 to remove 
remote_only decorator from supported API and update the test
    
    ### Why are the changes needed?
    
    To check Connect parity properly
    
    ### Does this PR introduce _any_ user-facing change?
    
    No, it's test-only
    
    ### How was this patch tested?
    
    Updated the existing test
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No
    
    Closes #49012 from itholic/SPARK-50311-followup.
    
    Authored-by: Haejoon Lee <[email protected]>
    Signed-off-by: Haejoon Lee <[email protected]>
---
 python/pyspark/sql/session.py                          | 4 ----
 python/pyspark/sql/tests/test_connect_compatibility.py | 4 ----
 2 files changed, 8 deletions(-)

diff --git a/python/pyspark/sql/session.py b/python/pyspark/sql/session.py
index d19e01eecc89..1ec85e52bbcd 100644
--- a/python/pyspark/sql/session.py
+++ b/python/pyspark/sql/session.py
@@ -2268,7 +2268,6 @@ class SparkSession(SparkConversionMixin):
             messageParameters={"feature": "SparkSession.interruptOperation"},
         )
 
-    @remote_only
     def addTag(self, tag: str) -> None:
         """
         Add a tag to be assigned to all the operations started by this thread 
in this session.
@@ -2293,7 +2292,6 @@ class SparkSession(SparkConversionMixin):
         """
         self._jsparkSession.addTag(tag)
 
-    @remote_only
     def removeTag(self, tag: str) -> None:
         """
         Remove a tag previously added to be assigned to all the operations 
started by this thread in
@@ -2311,7 +2309,6 @@ class SparkSession(SparkConversionMixin):
         """
         self._jsparkSession.removeTag(tag)
 
-    @remote_only
     def getTags(self) -> Set[str]:
         """
         Get the tags that are currently set to be assigned to all the 
operations started by this
@@ -2337,7 +2334,6 @@ class SparkSession(SparkConversionMixin):
 
         return python_set
 
-    @remote_only
     def clearTags(self) -> None:
         """
         Clear the current thread's operation tags.
diff --git a/python/pyspark/sql/tests/test_connect_compatibility.py 
b/python/pyspark/sql/tests/test_connect_compatibility.py
index 3d74e796cd7a..ef83dc3834d0 100644
--- a/python/pyspark/sql/tests/test_connect_compatibility.py
+++ b/python/pyspark/sql/tests/test_connect_compatibility.py
@@ -264,18 +264,14 @@ class ConnectCompatibilityTestsMixin:
         expected_missing_connect_methods = {
             "addArtifact",
             "addArtifacts",
-            "addTag",
             "clearProgressHandlers",
-            "clearTags",
             "copyFromLocalToFs",
-            "getTags",
             "interruptAll",
             "interruptOperation",
             "interruptTag",
             "newSession",
             "registerProgressHandler",
             "removeProgressHandler",
-            "removeTag",
         }
         expected_missing_classic_methods = set()
         self.check_compatibility(


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to