This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new d5b79c0cfdc3 [SPARK-48348][SPARK-48376][FOLLOWUP][SQL] Replace 
parseScript with runSqlScript in SQL Scripting Interpreter test suite
d5b79c0cfdc3 is described below

commit d5b79c0cfdc330b6442ad5cb94669706b2ad2c83
Author: David Milicevic <[email protected]>
AuthorDate: Fri Sep 6 16:35:12 2024 +0200

    [SPARK-48348][SPARK-48376][FOLLOWUP][SQL] Replace parseScript with 
runSqlScript in SQL Scripting Interpreter test suite
    
    ### What changes were proposed in this pull request?
    Previous [pull request](https://github.com/apache/spark/pull/47973) 
introduced new tests to `SqlScriptingInterpreterSuite` (among others) where 
accidentally `parseScript` was used instead of `runSqlScript`.
    While the same exception would get thrown (since it happens in the parsing 
phase) it violates the consistency among the tests in this suite and adds 
unnecessary import, so it would be nice to change it.
    
    ### Why are the changes needed?
    Changes are minor, they improve consistency among test suites for SQL 
scripting.
    
    ### Does this PR introduce _any_ user-facing change?
    No.
    
    ### How was this patch tested?
    This patch alters tests.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    No.
    
    Closes #48016 from davidm-db/interpreter_test_suite_fix.
    
    Authored-by: David Milicevic <[email protected]>
    Signed-off-by: Max Gekk <[email protected]>
---
 .../apache/spark/sql/scripting/SqlScriptingInterpreterSuite.scala  | 7 +++----
 1 file changed, 3 insertions(+), 4 deletions(-)

diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/scripting/SqlScriptingInterpreterSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/scripting/SqlScriptingInterpreterSuite.scala
index 5568f85fc476..6d836884c5d5 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/scripting/SqlScriptingInterpreterSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/scripting/SqlScriptingInterpreterSuite.scala
@@ -20,7 +20,6 @@ package org.apache.spark.sql.scripting
 import org.apache.spark.SparkException
 import org.apache.spark.sql.{AnalysisException, DataFrame, Dataset, QueryTest, 
Row}
 import org.apache.spark.sql.catalyst.QueryPlanningTracker
-import org.apache.spark.sql.catalyst.parser.CatalystSqlParser.parseScript
 import org.apache.spark.sql.exceptions.SqlScriptingException
 import org.apache.spark.sql.test.SharedSparkSession
 
@@ -575,7 +574,7 @@ class SqlScriptingInterpreterSuite extends QueryTest with 
SharedSparkSession {
         |END""".stripMargin
     checkError(
       exception = intercept[SqlScriptingException] {
-        parseScript(sqlScriptText)
+        runSqlScript(sqlScriptText)
       },
       errorClass = "INVALID_LABEL_USAGE.ITERATE_IN_COMPOUND",
       parameters = Map("labelName" -> "LBL"))
@@ -614,7 +613,7 @@ class SqlScriptingInterpreterSuite extends QueryTest with 
SharedSparkSession {
         |END""".stripMargin
     checkError(
       exception = intercept[SqlScriptingException] {
-        parseScript(sqlScriptText)
+        runSqlScript(sqlScriptText)
       },
       errorClass = "INVALID_LABEL_USAGE.DOES_NOT_EXIST",
       parameters = Map("labelName" -> "RANDOMLBL", "statementType" -> "LEAVE"))
@@ -629,7 +628,7 @@ class SqlScriptingInterpreterSuite extends QueryTest with 
SharedSparkSession {
         |END""".stripMargin
     checkError(
       exception = intercept[SqlScriptingException] {
-        parseScript(sqlScriptText)
+        runSqlScript(sqlScriptText)
       },
       errorClass = "INVALID_LABEL_USAGE.DOES_NOT_EXIST",
       parameters = Map("labelName" -> "RANDOMLBL", "statementType" -> 
"ITERATE"))


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to