This is an automated email from the ASF dual-hosted git repository.

gurwls223 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new cdc25791f878 [SPARK-52735][SQL] Fix missing error conditions for SQL 
UDFs
cdc25791f878 is described below

commit cdc25791f8783204e479af21fda5c291b132f851
Author: Allison Wang <allison.w...@databricks.com>
AuthorDate: Thu Jul 10 08:34:17 2025 +0900

    [SPARK-52735][SQL] Fix missing error conditions for SQL UDFs
    
    ### What changes were proposed in this pull request?
    
    This PR adds two missing error conditions for SQL UDFs.
    
    ### Why are the changes needed?
    
    To mix invalid error conditions.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No
    
    ### How was this patch tested?
    
    More SQL query tests
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No
    
    Closes #51427 from allisonwang-db/spark-52735-fix-err-cls-def.
    
    Authored-by: Allison Wang <allison.w...@databricks.com>
    Signed-off-by: Hyukjin Kwon <gurwls...@apache.org>
---
 .../src/main/resources/error/error-conditions.json | 12 ++++
 .../sql-tests/analyzer-results/sql-udf.sql.out     | 70 ++++++++++++++++++++++
 .../test/resources/sql-tests/inputs/sql-udf.sql    |  7 +++
 .../resources/sql-tests/results/sql-udf.sql.out    | 60 +++++++++++++++++++
 4 files changed, 149 insertions(+)

diff --git a/common/utils/src/main/resources/error/error-conditions.json 
b/common/utils/src/main/resources/error/error-conditions.json
index cb7e2faaf2be..dc3b3c6aea55 100644
--- a/common/utils/src/main/resources/error/error-conditions.json
+++ b/common/utils/src/main/resources/error/error-conditions.json
@@ -4411,6 +4411,18 @@
     ],
     "sqlState" : "42809"
   },
+  "NOT_A_SCALAR_FUNCTION" : {
+    "message" : [
+      "<functionName> appears as a scalar expression here, but the function 
was defined as a table function. Please update the query to move the function 
call into the FROM clause, or redefine <functionName> as a scalar function 
instead."
+    ],
+    "sqlState" : "42887"
+  },
+  "NOT_A_TABLE_FUNCTION" : {
+    "message" : [
+      "<functionName> appears as a table function here, but the function was 
defined as a scalar function. Please update the query to move the function call 
outside the FROM clause, or redefine <functionName> as a table function 
instead."
+    ],
+    "sqlState" : "42887"
+  },
   "NOT_NULL_ASSERT_VIOLATION" : {
     "message" : [
       "NULL value appeared in non-nullable field: <walkedTypePath>If the 
schema is inferred from a Scala tuple/case class, or a Java bean, please try to 
use scala.Option[_] or other nullable types (such as java.lang.Integer instead 
of int/scala.Int)."
diff --git 
a/sql/core/src/test/resources/sql-tests/analyzer-results/sql-udf.sql.out 
b/sql/core/src/test/resources/sql-tests/analyzer-results/sql-udf.sql.out
index 6130a134adc8..184457328cbe 100644
--- a/sql/core/src/test/resources/sql-tests/analyzer-results/sql-udf.sql.out
+++ b/sql/core/src/test/resources/sql-tests/analyzer-results/sql-udf.sql.out
@@ -4336,6 +4336,76 @@ RESET spark.sql.ansi.enabled
 ResetCommand spark.sql.ansi.enabled
 
 
+-- !query
+CREATE FUNCTION foo3_14a() RETURNS INT RETURN 1
+-- !query analysis
+org.apache.spark.sql.catalyst.analysis.FunctionAlreadyExistsException
+{
+  "errorClass" : "ROUTINE_ALREADY_EXISTS",
+  "sqlState" : "42723",
+  "messageParameters" : {
+    "existingRoutineType" : "routine",
+    "newRoutineType" : "routine",
+    "routineName" : "`default`.`foo3_14a`"
+  }
+}
+
+
+-- !query
+CREATE FUNCTION foo3_14b() RETURNS TABLE (a INT) RETURN SELECT 1
+-- !query analysis
+org.apache.spark.sql.catalyst.analysis.FunctionAlreadyExistsException
+{
+  "errorClass" : "ROUTINE_ALREADY_EXISTS",
+  "sqlState" : "42723",
+  "messageParameters" : {
+    "existingRoutineType" : "routine",
+    "newRoutineType" : "routine",
+    "routineName" : "`default`.`foo3_14b`"
+  }
+}
+
+
+-- !query
+SELECT * FROM foo3_14a()
+-- !query analysis
+org.apache.spark.sql.AnalysisException
+{
+  "errorClass" : "NOT_A_TABLE_FUNCTION",
+  "sqlState" : "42887",
+  "messageParameters" : {
+    "functionName" : "`spark_catalog`.`default`.`foo3_14a`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 15,
+    "stopIndex" : 24,
+    "fragment" : "foo3_14a()"
+  } ]
+}
+
+
+-- !query
+SELECT foo3_14b()
+-- !query analysis
+org.apache.spark.sql.AnalysisException
+{
+  "errorClass" : "NOT_A_SCALAR_FUNCTION",
+  "sqlState" : "42887",
+  "messageParameters" : {
+    "functionName" : "`spark_catalog`.`default`.`foo3_14b`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 17,
+    "fragment" : "foo3_14b()"
+  } ]
+}
+
+
 -- !query
 CREATE FUNCTION foo4_0() RETURNS TABLE (x INT) RETURN SELECT 1
 -- !query analysis
diff --git a/sql/core/src/test/resources/sql-tests/inputs/sql-udf.sql 
b/sql/core/src/test/resources/sql-tests/inputs/sql-udf.sql
index 0c387fa4201a..d83921e13859 100644
--- a/sql/core/src/test/resources/sql-tests/inputs/sql-udf.sql
+++ b/sql/core/src/test/resources/sql-tests/inputs/sql-udf.sql
@@ -788,6 +788,13 @@ SELECT * FROM foo3_3ct();
 SELECT * FROM foo3_3dt();
 RESET spark.sql.ansi.enabled;
 
+-- 3.14 Invalid usage of SQL scalar/table functions in query clauses.
+CREATE FUNCTION foo3_14a() RETURNS INT RETURN 1;
+CREATE FUNCTION foo3_14b() RETURNS TABLE (a INT) RETURN SELECT 1;
+-- Expect error
+SELECT * FROM foo3_14a();
+SELECT foo3_14b();
+
 -- 4. SQL table functions
 CREATE FUNCTION foo4_0() RETURNS TABLE (x INT) RETURN SELECT 1;
 CREATE FUNCTION foo4_1(x INT) RETURNS TABLE (a INT) RETURN SELECT x;
diff --git a/sql/core/src/test/resources/sql-tests/results/sql-udf.sql.out 
b/sql/core/src/test/resources/sql-tests/results/sql-udf.sql.out
index 07abc815777d..aa7736892290 100644
--- a/sql/core/src/test/resources/sql-tests/results/sql-udf.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/sql-udf.sql.out
@@ -3895,6 +3895,66 @@ struct<>
 
 
 
+-- !query
+CREATE FUNCTION foo3_14a() RETURNS INT RETURN 1
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+CREATE FUNCTION foo3_14b() RETURNS TABLE (a INT) RETURN SELECT 1
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+SELECT * FROM foo3_14a()
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.AnalysisException
+{
+  "errorClass" : "NOT_A_TABLE_FUNCTION",
+  "sqlState" : "42887",
+  "messageParameters" : {
+    "functionName" : "`spark_catalog`.`default`.`foo3_14a`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 15,
+    "stopIndex" : 24,
+    "fragment" : "foo3_14a()"
+  } ]
+}
+
+
+-- !query
+SELECT foo3_14b()
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.AnalysisException
+{
+  "errorClass" : "NOT_A_SCALAR_FUNCTION",
+  "sqlState" : "42887",
+  "messageParameters" : {
+    "functionName" : "`spark_catalog`.`default`.`foo3_14b`"
+  },
+  "queryContext" : [ {
+    "objectType" : "",
+    "objectName" : "",
+    "startIndex" : 8,
+    "stopIndex" : 17,
+    "fragment" : "foo3_14b()"
+  } ]
+}
+
+
 -- !query
 CREATE FUNCTION foo4_0() RETURNS TABLE (x INT) RETURN SELECT 1
 -- !query schema


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to