This is an automated email from the ASF dual-hosted git repository.
ruifengz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 26e0967505d [SPARK-41629][CONNECT][FOLLOW] Enable access to
SparkSession from Plugin
26e0967505d is described below
commit 26e0967505dac493d9aeac1b791103f10ad3ff18
Author: Martin Grund <[email protected]>
AuthorDate: Sun Jan 22 09:53:09 2023 +0800
[SPARK-41629][CONNECT][FOLLOW] Enable access to SparkSession from Plugin
### What changes were proposed in this pull request?
This patch allows the planner and command plugins for Spark Connect to
access the Spark Session and let other consumers access the configuration
provided by the module. Without access to the Spark session the plugins cannot
provide their full usability.
### Why are the changes needed?
Usability
### Does this PR introduce _any_ user-facing change?
Plugins have access to Spark session.
### How was this patch tested?
Added assertion in UT that Spark session is not null.
Closes #39692 from grundprinzip/SPARK-41629-follow.
Authored-by: Martin Grund <[email protected]>
Signed-off-by: Ruifeng Zheng <[email protected]>
---
.../src/main/scala/org/apache/spark/sql/connect/config/Connect.scala | 2 +-
.../org/apache/spark/sql/connect/planner/SparkConnectPlanner.scala | 2 +-
.../spark/sql/connect/plugin/SparkConnectPluginRegistrySuite.scala | 1 +
3 files changed, 3 insertions(+), 2 deletions(-)
diff --git
a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/config/Connect.scala
b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/config/Connect.scala
index b38f0f6f6d1..64b5bd5d813 100644
---
a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/config/Connect.scala
+++
b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/config/Connect.scala
@@ -20,7 +20,7 @@ import org.apache.spark.internal.config.ConfigBuilder
import org.apache.spark.network.util.ByteUnit
import org.apache.spark.sql.connect.common.config.ConnectCommon
-private[spark] object Connect {
+object Connect {
val CONNECT_GRPC_BINDING_PORT =
ConfigBuilder("spark.connect.grpc.binding.port")
diff --git
a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/planner/SparkConnectPlanner.scala
b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/planner/SparkConnectPlanner.scala
index d72aa162132..f65fc2c8d0f 100644
---
a/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/planner/SparkConnectPlanner.scala
+++
b/connector/connect/server/src/main/scala/org/apache/spark/sql/connect/planner/SparkConnectPlanner.scala
@@ -57,7 +57,7 @@ final case class InvalidCommandInput(
private val cause: Throwable = null)
extends Exception(message, cause)
-class SparkConnectPlanner(session: SparkSession) {
+class SparkConnectPlanner(val session: SparkSession) {
private lazy val pythonExec =
sys.env.getOrElse("PYSPARK_PYTHON",
sys.env.getOrElse("PYSPARK_DRIVER_PYTHON", "python3"))
diff --git
a/connector/connect/server/src/test/scala/org/apache/spark/sql/connect/plugin/SparkConnectPluginRegistrySuite.scala
b/connector/connect/server/src/test/scala/org/apache/spark/sql/connect/plugin/SparkConnectPluginRegistrySuite.scala
index 460fd32098c..dfb7f5d0f90 100644
---
a/connector/connect/server/src/test/scala/org/apache/spark/sql/connect/plugin/SparkConnectPluginRegistrySuite.scala
+++
b/connector/connect/server/src/test/scala/org/apache/spark/sql/connect/plugin/SparkConnectPluginRegistrySuite.scala
@@ -89,6 +89,7 @@ class ExampleCommandPlugin extends CommandPlugin {
return None
}
val cmd = command.unpack(classOf[proto.ExamplePluginCommand])
+ assert(planner.session != null)
SparkContext.getActive.get.setLocalProperty("testingProperty",
cmd.getCustomField)
Some()
}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]