This is an automated email from the ASF dual-hosted git repository.

gurwls223 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new e9a822bef051 [SPARK-52384][CONNECT] Fix bug Connect should insensitive 
for JDBC options
e9a822bef051 is described below

commit e9a822bef05177f6f062b7b031b602e95607a1e7
Author: beliefer <belie...@163.com>
AuthorDate: Wed Jun 4 10:43:26 2025 +0900

    [SPARK-52384][CONNECT] Fix bug Connect should insensitive for JDBC options
    
    ### What changes were proposed in this pull request?
    This PR aims to fix bug Connect should insensitive for JDBC options.
    Please refer to the comments. 
https://github.com/apache/spark/pull/50059#discussion_r2111071577
    In fact, the built-in Scala API ensures these parameters are lowercase.
    
https://github.com/apache/spark/blob/b18b956f967038db4b751a3845154f2b1d4f5f79/sql/connect/common/src/main/scala/org/apache/spark/sql/connect/DataFrameReader.scala#L126
    
    ### Why are the changes needed?
    Fix bug Connect should insensitive for JDBC options.
    
    ### Does this PR introduce _any_ user-facing change?
    'Yes'.
    Restore the original behavior.
    
    ### How was this patch tested?
    GA.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    'No'.
    
    Closes #51068 from beliefer/SPARK-52384.
    
    Authored-by: beliefer <belie...@163.com>
    Signed-off-by: Hyukjin Kwon <gurwls...@apache.org>
---
 .../apache/spark/sql/connect/planner/SparkConnectPlanner.scala | 10 +++++-----
 1 file changed, 5 insertions(+), 5 deletions(-)

diff --git 
a/sql/connect/server/src/main/scala/org/apache/spark/sql/connect/planner/SparkConnectPlanner.scala
 
b/sql/connect/server/src/main/scala/org/apache/spark/sql/connect/planner/SparkConnectPlanner.scala
index 5978560c67d4..28fcd09faf28 100644
--- 
a/sql/connect/server/src/main/scala/org/apache/spark/sql/connect/planner/SparkConnectPlanner.scala
+++ 
b/sql/connect/server/src/main/scala/org/apache/spark/sql/connect/planner/SparkConnectPlanner.scala
@@ -1516,20 +1516,20 @@ class SparkConnectPlanner(
 
       case proto.Read.ReadTypeCase.DATA_SOURCE if !rel.getIsStreaming =>
         val reader = session.read
+        val localMap = 
CaseInsensitiveMap[String](rel.getDataSource.getOptionsMap.asScala.toMap)
         if (rel.getDataSource.getFormat == "jdbc" && 
rel.getDataSource.getPredicatesCount > 0) {
-          if 
(!rel.getDataSource.getOptionsMap.containsKey(JDBCOptions.JDBC_URL) ||
-            
!rel.getDataSource.getOptionsMap.containsKey(JDBCOptions.JDBC_TABLE_NAME)) {
+          if (!localMap.contains(JDBCOptions.JDBC_URL) ||
+            !localMap.contains(JDBCOptions.JDBC_TABLE_NAME)) {
             throw InvalidInputErrors.invalidJdbcParams()
           }
 
-          val url = rel.getDataSource.getOptionsMap.get(JDBCOptions.JDBC_URL)
-          val table = 
rel.getDataSource.getOptionsMap.get(JDBCOptions.JDBC_TABLE_NAME)
+          val url = localMap.get(JDBCOptions.JDBC_URL).get
+          val table = localMap.get(JDBCOptions.JDBC_TABLE_NAME).get
           val predicates = rel.getDataSource.getPredicatesList.asScala.toArray
           val properties = new Properties()
           properties.putAll(rel.getDataSource.getOptionsMap)
           reader.jdbc(url, table, predicates, 
properties).queryExecution.analyzed
         } else if (rel.getDataSource.getPredicatesCount == 0) {
-          val localMap = 
CaseInsensitiveMap[String](rel.getDataSource.getOptionsMap.asScala.toMap)
           if (rel.getDataSource.hasFormat) {
             reader.format(rel.getDataSource.getFormat)
           }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to