This is an automated email from the ASF dual-hosted git repository. dongjoon pushed a commit to branch branch-4.0 in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/branch-4.0 by this push: new b45f188b0e4d [SPARK-52384][CONNECT][4.0] Fix bug Connect should case insensitive for JDBC options b45f188b0e4d is described below commit b45f188b0e4dccc604ac32b7143303a1a7f692c3 Author: beliefer <belie...@163.com> AuthorDate: Thu Jun 5 08:49:19 2025 -0700 [SPARK-52384][CONNECT][4.0] Fix bug Connect should case insensitive for JDBC options ### What changes were proposed in this pull request? This PR is a 4.0 backport related to https://github.com/apache/spark/pull/51068 ### Why are the changes needed? Fix bug Connect should insensitive for JDBC options. ### Does this PR introduce _any_ user-facing change? 'Yes'. Restore the original behavior. ### How was this patch tested? GA. ### Was this patch authored or co-authored using generative AI tooling? 'No'. Closes #51095 from beliefer/SPARK-52384_4.0. Authored-by: beliefer <belie...@163.com> Signed-off-by: Dongjoon Hyun <dongj...@apache.org> --- .../apache/spark/sql/connect/planner/SparkConnectPlanner.scala | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/sql/connect/server/src/main/scala/org/apache/spark/sql/connect/planner/SparkConnectPlanner.scala b/sql/connect/server/src/main/scala/org/apache/spark/sql/connect/planner/SparkConnectPlanner.scala index 16b8c1afe7e2..72415b002401 100644 --- a/sql/connect/server/src/main/scala/org/apache/spark/sql/connect/planner/SparkConnectPlanner.scala +++ b/sql/connect/server/src/main/scala/org/apache/spark/sql/connect/planner/SparkConnectPlanner.scala @@ -1384,20 +1384,20 @@ class SparkConnectPlanner( case proto.Read.ReadTypeCase.DATA_SOURCE if !rel.getIsStreaming => val reader = session.read + val localMap = CaseInsensitiveMap[String](rel.getDataSource.getOptionsMap.asScala.toMap) if (rel.getDataSource.getFormat == "jdbc" && rel.getDataSource.getPredicatesCount > 0) { - if (!rel.getDataSource.getOptionsMap.containsKey(JDBCOptions.JDBC_URL) || - !rel.getDataSource.getOptionsMap.containsKey(JDBCOptions.JDBC_TABLE_NAME)) { + if (!localMap.contains(JDBCOptions.JDBC_URL) || + !localMap.contains(JDBCOptions.JDBC_TABLE_NAME)) { throw InvalidPlanInput(s"Invalid jdbc params, please specify jdbc url and table.") } - val url = rel.getDataSource.getOptionsMap.get(JDBCOptions.JDBC_URL) - val table = rel.getDataSource.getOptionsMap.get(JDBCOptions.JDBC_TABLE_NAME) + val url = localMap.get(JDBCOptions.JDBC_URL).get + val table = localMap.get(JDBCOptions.JDBC_TABLE_NAME).get val predicates = rel.getDataSource.getPredicatesList.asScala.toArray val properties = new Properties() properties.putAll(rel.getDataSource.getOptionsMap) reader.jdbc(url, table, predicates, properties).queryExecution.analyzed } else if (rel.getDataSource.getPredicatesCount == 0) { - val localMap = CaseInsensitiveMap[String](rel.getDataSource.getOptionsMap.asScala.toMap) if (rel.getDataSource.hasFormat) { reader.format(rel.getDataSource.getFormat) } --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org