This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new bbc80430dfce [SPARK-51563][SQL] Support the fully qualified type name 
TIME(n) WITHOUT TIME ZONE
bbc80430dfce is described below

commit bbc80430dfce72336c5ff5184f9d3d935c3ca850
Author: Max Gekk <[email protected]>
AuthorDate: Sat Jun 14 16:03:25 2025 +0300

    [SPARK-51563][SQL] Support the fully qualified type name TIME(n) WITHOUT 
TIME ZONE
    
    ### What changes were proposed in this pull request?
    In the PR, I propose to recognize the optional suffix `WITHOUT TIME ZONE` 
of the type `TIME(n)`, and consider `TIME(n) WITHOUT TIME ZONE` and `TIME(n)` 
as synonyms.
    
    ### Why are the changes needed?
    To conform to the ANSI SQL standard.
    
    ### Does this PR introduce _any_ user-facing change?
    No. It just extends the existing syntax.
    
    ### How was this patch tested?
    By running the affected test suites:
    ```
    $ build/sbt "test:testOnly *DataTypeParserSuite"
    $ build/sbt "sql/testOnly org.apache.spark.sql.SQLQueryTestSuite -- -z 
keywords.sql"
    $ build/sbt "sql/testOnly org.apache.spark.sql.SQLQueryTestSuite -- -z 
keywords-enforced.sql"
    ```
    
    ### Was this patch authored or co-authored using generative AI tooling?
    No.
    
    Closes #51177 from MaxGekk/time-without-timezone.
    
    Authored-by: Max Gekk <[email protected]>
    Signed-off-by: Max Gekk <[email protected]>
---
 docs/sql-ref-ansi-compliance.md                    |  1 +
 .../spark/sql/catalyst/parser/SqlBaseLexer.g4      |  1 +
 .../spark/sql/catalyst/parser/SqlBaseParser.g4     |  4 +++
 .../sql/catalyst/parser/DataTypeAstBuilder.scala   | 14 ++++++++--
 .../sql/catalyst/parser/DataTypeParserSuite.scala  | 30 ++++++++++++++++++++++
 .../sql-tests/results/keywords-enforced.sql.out    |  1 +
 .../resources/sql-tests/results/keywords.sql.out   |  1 +
 .../sql-tests/results/nonansi/keywords.sql.out     |  1 +
 .../org/apache/spark/sql/JsonFunctionsSuite.scala  |  2 +-
 .../org/apache/spark/sql/XmlFunctionsSuite.scala   |  2 +-
 .../ThriftServerWithSparkContextSuite.scala        |  2 +-
 11 files changed, 54 insertions(+), 5 deletions(-)

diff --git a/docs/sql-ref-ansi-compliance.md b/docs/sql-ref-ansi-compliance.md
index ef79ef187136..acef717ef40a 100644
--- a/docs/sql-ref-ansi-compliance.md
+++ b/docs/sql-ref-ansi-compliance.md
@@ -794,6 +794,7 @@ Below is a list of all the keywords in Spark SQL.
 |WINDOW|non-reserved|non-reserved|reserved|
 |WITH|reserved|non-reserved|reserved|
 |WITHIN|reserved|non-reserved|reserved|
+|WITHOUT|non-reserved|non-reserved|non-reserved|
 |X|non-reserved|non-reserved|non-reserved|
 |YEAR|non-reserved|non-reserved|non-reserved|
 |YEARS|non-reserved|non-reserved|non-reserved|
diff --git 
a/sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseLexer.g4 
b/sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseLexer.g4
index c698f2696eb4..d3a1958f2b47 100644
--- 
a/sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseLexer.g4
+++ 
b/sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseLexer.g4
@@ -507,6 +507,7 @@ WHILE: 'WHILE';
 WINDOW: 'WINDOW';
 WITH: 'WITH';
 WITHIN: 'WITHIN';
+WITHOUT: 'WITHOUT';
 YEAR: 'YEAR';
 YEARS: 'YEARS';
 ZONE: 'ZONE';
diff --git 
a/sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseParser.g4 
b/sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseParser.g4
index 08f222b2f412..b56a7d39f628 100644
--- 
a/sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseParser.g4
+++ 
b/sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseParser.g4
@@ -1343,6 +1343,8 @@ dataType
     | INTERVAL from=(YEAR | MONTH) (TO to=MONTH)?               
#yearMonthIntervalDataType
     | INTERVAL from=(DAY | HOUR | MINUTE | SECOND)
       (TO to=(HOUR | MINUTE | SECOND))?                         
#dayTimeIntervalDataType
+    | TIME (LEFT_PAREN precision=INTEGER_VALUE RIGHT_PAREN)?
+      (WITHOUT TIME ZONE)?                                      #timeDataType
     | type (LEFT_PAREN INTEGER_VALUE
       (COMMA INTEGER_VALUE)* RIGHT_PAREN)?                      
#primitiveDataType
     ;
@@ -2013,6 +2015,7 @@ ansiNonReserved
     | WEEKS
     | WHILE
     | WINDOW
+    | WITHOUT
     | YEAR
     | YEARS
     | ZONE
@@ -2423,6 +2426,7 @@ nonReserved
     | WINDOW
     | WITH
     | WITHIN
+    | WITHOUT
     | YEAR
     | YEARS
     | ZONE
diff --git 
a/sql/api/src/main/scala/org/apache/spark/sql/catalyst/parser/DataTypeAstBuilder.scala
 
b/sql/api/src/main/scala/org/apache/spark/sql/catalyst/parser/DataTypeAstBuilder.scala
index bf9a250d6499..b18b783c7bc8 100644
--- 
a/sql/api/src/main/scala/org/apache/spark/sql/catalyst/parser/DataTypeAstBuilder.scala
+++ 
b/sql/api/src/main/scala/org/apache/spark/sql/catalyst/parser/DataTypeAstBuilder.scala
@@ -65,6 +65,18 @@ class DataTypeAstBuilder extends 
SqlBaseParserBaseVisitor[AnyRef] {
       ctx.parts.asScala.map(_.getText).toSeq
     }
 
+  /**
+   * Resolve/create the TIME primitive type.
+   */
+  override def visitTimeDataType(ctx: TimeDataTypeContext): DataType = 
withOrigin(ctx) {
+    val precision = if (ctx.precision == null) {
+      TimeType.MICROS_PRECISION
+    } else {
+      ctx.precision.getText.toInt
+    }
+    TimeType(precision)
+  }
+
   /**
    * Resolve/create a primitive type.
    */
@@ -79,8 +91,6 @@ class DataTypeAstBuilder extends 
SqlBaseParserBaseVisitor[AnyRef] {
       case (FLOAT | REAL, Nil) => FloatType
       case (DOUBLE, Nil) => DoubleType
       case (DATE, Nil) => DateType
-      case (TIME, Nil) => TimeType(TimeType.MICROS_PRECISION)
-      case (TIME, precision :: Nil) => TimeType(precision.getText.toInt)
       case (TIMESTAMP, Nil) => SqlApiConf.get.timestampType
       case (TIMESTAMP_NTZ, Nil) => TimestampNTZType
       case (TIMESTAMP_LTZ, Nil) => TimestampType
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DataTypeParserSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DataTypeParserSuite.scala
index 8b61328a0099..bf15e184b650 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DataTypeParserSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DataTypeParserSuite.scala
@@ -58,8 +58,11 @@ class DataTypeParserSuite extends SparkFunSuite with 
SQLHelper {
   checkDataType("deC", DecimalType.USER_DEFAULT)
   checkDataType("DATE", DateType)
   checkDataType("TimE", TimeType())
+  checkDataType("TimE WiTHOUT TiME ZoNE", TimeType())
   checkDataType("time(0)", TimeType(0))
+  checkDataType("time(0) without time zone", TimeType(0))
   checkDataType("TIME(6)", TimeType(6))
+  checkDataType("TIME(6) WITHOUT TIME ZONE", TimeType(6))
   checkDataType("timestamp", TimestampType)
   checkDataType("timestamp_ntz", TimestampNTZType)
   checkDataType("timestamp_ltz", TimestampType)
@@ -183,11 +186,38 @@ class DataTypeParserSuite extends SparkFunSuite with 
SQLHelper {
       },
       condition = "UNSUPPORTED_TIME_PRECISION",
       parameters = Map("precision" -> "9"))
+    checkError(
+      exception = intercept[SparkException] {
+        CatalystSqlParser.parseDataType("time(8) without time zone")
+      },
+      condition = "UNSUPPORTED_TIME_PRECISION",
+      parameters = Map("precision" -> "8"))
     checkError(
       exception = intercept[ParseException] {
         CatalystSqlParser.parseDataType("time(-1)")
       },
       condition = "PARSE_SYNTAX_ERROR",
       parameters = Map("error" -> "'('", "hint" -> ""))
+    checkError(
+      exception = intercept[ParseException] {
+        CatalystSqlParser.parseDataType("time(-100) WITHOUT TIME ZONE")
+      },
+      condition = "PARSE_SYNTAX_ERROR",
+      parameters = Map("error" -> "'('", "hint" -> ""))
+  }
+
+  test("invalid TIME suffix") {
+    checkError(
+      exception = intercept[ParseException] {
+        CatalystSqlParser.parseDataType("time(0) WITHOUT TIMEZONE")
+      },
+      condition = "PARSE_SYNTAX_ERROR",
+      parameters = Map("error" -> "'WITHOUT'", "hint" -> ""))
+    checkError(
+      exception = intercept[ParseException] {
+        CatalystSqlParser.parseDataType("time(0) WITH TIME ZONE")
+      },
+      condition = "PARSE_SYNTAX_ERROR",
+      parameters = Map("error" -> "'WITH'", "hint" -> ""))
   }
 }
diff --git 
a/sql/core/src/test/resources/sql-tests/results/keywords-enforced.sql.out 
b/sql/core/src/test/resources/sql-tests/results/keywords-enforced.sql.out
index a72b2c4dc8da..ef17566850e3 100644
--- a/sql/core/src/test/resources/sql-tests/results/keywords-enforced.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/keywords-enforced.sql.out
@@ -390,6 +390,7 @@ WHILE       false
 WINDOW false
 WITH   true
 WITHIN true
+WITHOUT        false
 X      false
 YEAR   false
 YEARS  false
diff --git a/sql/core/src/test/resources/sql-tests/results/keywords.sql.out 
b/sql/core/src/test/resources/sql-tests/results/keywords.sql.out
index 59f860eb7ef4..97309774cc37 100644
--- a/sql/core/src/test/resources/sql-tests/results/keywords.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/keywords.sql.out
@@ -390,6 +390,7 @@ WHILE       false
 WINDOW false
 WITH   false
 WITHIN false
+WITHOUT        false
 X      false
 YEAR   false
 YEARS  false
diff --git 
a/sql/core/src/test/resources/sql-tests/results/nonansi/keywords.sql.out 
b/sql/core/src/test/resources/sql-tests/results/nonansi/keywords.sql.out
index 59f860eb7ef4..97309774cc37 100644
--- a/sql/core/src/test/resources/sql-tests/results/nonansi/keywords.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/nonansi/keywords.sql.out
@@ -390,6 +390,7 @@ WHILE       false
 WINDOW false
 WITH   false
 WITHIN false
+WITHOUT        false
 X      false
 YEAR   false
 YEARS  false
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/JsonFunctionsSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/JsonFunctionsSuite.scala
index ea185b6b4901..68d98cd0d182 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/JsonFunctionsSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/JsonFunctionsSuite.scala
@@ -559,7 +559,7 @@ class JsonFunctionsSuite extends QueryTest with 
SharedSparkSession {
       sqlState = "42601",
       parameters = Map(
         "error" -> "'InvalidType'",
-        "hint" -> ": extra input 'InvalidType'"
+        "hint" -> ""
       ),
       context = ExpectedContext(
         fragment = "from_json(value, 'time InvalidType')",
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/XmlFunctionsSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/XmlFunctionsSuite.scala
index afb0ceac5b50..51509cc5ec3e 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/XmlFunctionsSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/XmlFunctionsSuite.scala
@@ -366,7 +366,7 @@ class XmlFunctionsSuite extends QueryTest with 
SharedSparkSession {
       sqlState = "42601",
       parameters = Map(
         "error" -> "'InvalidType'",
-        "hint" -> ": extra input 'InvalidType'"
+        "hint" -> ""
       ),
       context = ExpectedContext(
         fragment = "from_xml(value, 'time InvalidType')",
diff --git 
a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/ThriftServerWithSparkContextSuite.scala
 
b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/ThriftServerWithSparkContextSuite.scala
index 33ac3ebc6c95..a394295360f5 100644
--- 
a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/ThriftServerWithSparkContextSuite.scala
+++ 
b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/ThriftServerWithSparkContextSuite.scala
@@ -214,7 +214,7 @@ trait ThriftServerWithSparkContextSuite extends 
SharedThriftServer {
       val sessionHandle = client.openSession(user, "")
       val infoValue = client.getInfo(sessionHandle, 
GetInfoType.CLI_ODBC_KEYWORDS)
       // scalastyle:off line.size.limit
-      assert(infoValue.getStringValue == 
"ADD,AFTER,AGGREGATE,ALL,ALTER,ALWAYS,ANALYZE,AND,ANTI,ANY,ANY_VALUE,ARCHIVE,ARRAY,AS,ASC,AT,ATOMIC,AUTHORIZATION,BEGIN,BETWEEN,BIGINT,BINARY,BINDING,BOOLEAN,BOTH,BUCKET,BUCKETS,BY,BYTE,CACHE,CALL,CALLED,CASCADE,CASE,CAST,CATALOG,CATALOGS,CHANGE,CHAR,CHARACTER,CHECK,CLEAR,CLUSTER,CLUSTERED,CODEGEN,COLLATE,COLLATION,COLLECTION,COLUMN,COLUMNS,COMMENT,COMMIT,COMPACT,COMPACTIONS,COMPENSATION,COMPUTE,CONCATENATE,CONDITION,CONSTRAINT,CONTAINS,CONTINUE,C
 [...]
+      assert(infoValue.getStringValue == 
"ADD,AFTER,AGGREGATE,ALL,ALTER,ALWAYS,ANALYZE,AND,ANTI,ANY,ANY_VALUE,ARCHIVE,ARRAY,AS,ASC,AT,ATOMIC,AUTHORIZATION,BEGIN,BETWEEN,BIGINT,BINARY,BINDING,BOOLEAN,BOTH,BUCKET,BUCKETS,BY,BYTE,CACHE,CALL,CALLED,CASCADE,CASE,CAST,CATALOG,CATALOGS,CHANGE,CHAR,CHARACTER,CHECK,CLEAR,CLUSTER,CLUSTERED,CODEGEN,COLLATE,COLLATION,COLLECTION,COLUMN,COLUMNS,COMMENT,COMMIT,COMPACT,COMPACTIONS,COMPENSATION,COMPUTE,CONCATENATE,CONDITION,CONSTRAINT,CONTAINS,CONTINUE,C
 [...]
       // scalastyle:on line.size.limit
     }
   }


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to