This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 2bc9dd3525dc [SPARK-52656][SQL] Fix current_time()
2bc9dd3525dc is described below

commit 2bc9dd3525dc0fb5eeb826f5eccdede8ba276a89
Author: Max Gekk <[email protected]>
AuthorDate: Thu Jul 3 11:20:25 2025 +0200

    [SPARK-52656][SQL] Fix current_time()
    
    ### What changes were proposed in this pull request?
    After switching of internal representation of TIME values from microseconds 
to nanoseconds by the PR https://github.com/apache/spark/pull/51156, the 
`current_time(p)` returns incorrect result. In the PR, I propose to fix two 
datetime functions `truncateTimeToPrecision()` and `instantToNanosOfDay()`, and 
apparently the expression `CurrentTime`.
    
    ### Why are the changes needed?
    It fixes incorrect behaviour of the `current_time()` function.
    
    ### Does this PR introduce _any_ user-facing change?
    No, the TIME data type and related functions haven't been released yet.
    
    ### How was this patch tested?
    By running new tests:
    ```sh
    $ build/sbt "test:testOnly *DateTimeUtilsSuite"
    $ build/sbt "test:testOnly *ComputeCurrentTimeSuite"
    ```
    and by manual testing:
    ```sql
    spark-sql (default)> SELECT current_time(2);
    00:01:12.24
    ```
    
    ### Was this patch authored or co-authored using generative AI tooling?
    No.
    
    Closes #51351 from MaxGekk/fix-truncateTimeMicrosToPrecision.
    
    Authored-by: Max Gekk <[email protected]>
    Signed-off-by: Max Gekk <[email protected]>
---
 .../sql/catalyst/util/SparkDateTimeUtils.scala     | 34 +++++++++++-----------
 .../org/apache/spark/sql/types/TimeType.scala      |  1 +
 .../sql/catalyst/expressions/timeExpressions.scala |  4 +--
 .../sql/catalyst/optimizer/finishAnalysis.scala    |  6 ++--
 .../sql/catalyst/util/DateTimeUtilsSuite.scala     | 20 +++++++++++++
 5 files changed, 43 insertions(+), 22 deletions(-)

diff --git 
a/sql/api/src/main/scala/org/apache/spark/sql/catalyst/util/SparkDateTimeUtils.scala
 
b/sql/api/src/main/scala/org/apache/spark/sql/catalyst/util/SparkDateTimeUtils.scala
index b88600b8b8af..c6846ef480ba 100644
--- 
a/sql/api/src/main/scala/org/apache/spark/sql/catalyst/util/SparkDateTimeUtils.scala
+++ 
b/sql/api/src/main/scala/org/apache/spark/sql/catalyst/util/SparkDateTimeUtils.scala
@@ -19,7 +19,7 @@ package org.apache.spark.sql.catalyst.util
 import java.lang.invoke.{MethodHandles, MethodType}
 import java.sql.{Date, Timestamp}
 import java.time.{Instant, LocalDate, LocalDateTime, LocalTime, ZonedDateTime, 
ZoneId, ZoneOffset}
-import java.time.temporal.ChronoField.{MICRO_OF_DAY, NANO_OF_DAY}
+import java.time.temporal.ChronoField.NANO_OF_DAY
 import java.util.TimeZone
 import java.util.concurrent.TimeUnit.{MICROSECONDS, NANOSECONDS}
 import java.util.regex.Pattern
@@ -145,42 +145,42 @@ trait SparkDateTimeUtils {
   }
 
   /**
-   * Gets the number of microseconds since midnight using the given time zone.
+   * Gets the number of nanoseconds since midnight using the given time zone.
    */
-  def instantToMicrosOfDay(instant: Instant, timezone: String): Long = {
-    instantToMicrosOfDay(instant, getZoneId(timezone))
+  def instantToNanosOfDay(instant: Instant, timezone: String): Long = {
+    instantToNanosOfDay(instant, getZoneId(timezone))
   }
 
   /**
-   * Gets the number of microseconds since midnight using the given time zone.
+   * Gets the number of nanoseconds since midnight using the given time zone.
    */
-  def instantToMicrosOfDay(instant: Instant, zoneId: ZoneId): Long = {
+  def instantToNanosOfDay(instant: Instant, zoneId: ZoneId): Long = {
     val localDateTime = LocalDateTime.ofInstant(instant, zoneId)
-    localDateTime.toLocalTime.getLong(MICRO_OF_DAY)
+    localDateTime.toLocalTime.getLong(NANO_OF_DAY)
   }
 
   /**
-   * Truncates a time value (in microseconds) to the specified fractional 
precision `p`.
+   * Truncates a time value (in nanoseconds) to the specified fractional 
precision `p`.
    *
    * For example, if `p = 3`, we keep millisecond resolution and discard any 
digits beyond the
-   * thousand-microsecond place. So a value like `123456` microseconds 
(12:34:56.123456) becomes
+   * thousand-nanosecond place. So a value like `123456` microseconds 
(12:34:56.123456) becomes
    * `123000` microseconds (12:34:56.123).
    *
-   * @param micros
-   *   The original time in microseconds.
+   * @param nanos
+   *   The original time in nanoseconds.
    * @param p
    *   The fractional second precision (range 0 to 6).
    * @return
-   *   The truncated microsecond value, preserving only `p` fractional digits.
+   *   The truncated nanosecond value, preserving only `p` fractional digits.
    */
-  def truncateTimeMicrosToPrecision(micros: Long, p: Int): Long = {
+  def truncateTimeToPrecision(nanos: Long, p: Int): Long = {
     assert(
-      p >= TimeType.MIN_PRECISION && p <= TimeType.MICROS_PRECISION,
+      TimeType.MIN_PRECISION <= p && p <= TimeType.MAX_PRECISION,
       s"Fractional second precision $p out" +
-        s" of range 
[${TimeType.MIN_PRECISION}..${TimeType.MICROS_PRECISION}].")
-    val scale = TimeType.MICROS_PRECISION - p
+        s" of range [${TimeType.MIN_PRECISION}..${TimeType.MAX_PRECISION}].")
+    val scale = TimeType.NANOS_PRECISION - p
     val factor = math.pow(10, scale).toLong
-    (micros / factor) * factor
+    (nanos / factor) * factor
   }
 
   /**
diff --git a/sql/api/src/main/scala/org/apache/spark/sql/types/TimeType.scala 
b/sql/api/src/main/scala/org/apache/spark/sql/types/TimeType.scala
index 216239dca5c1..135ad278438e 100644
--- a/sql/api/src/main/scala/org/apache/spark/sql/types/TimeType.scala
+++ b/sql/api/src/main/scala/org/apache/spark/sql/types/TimeType.scala
@@ -52,6 +52,7 @@ object TimeType {
   val MICROS_PRECISION: Int = 6
   val MAX_PRECISION: Int = MICROS_PRECISION
   val DEFAULT_PRECISION: Int = MICROS_PRECISION
+  val NANOS_PRECISION: Int = 9
 
   def apply(): TimeType = new TimeType(DEFAULT_PRECISION)
 }
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/timeExpressions.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/timeExpressions.scala
index 5d1b9d7d3abd..9636c568dac0 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/timeExpressions.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/timeExpressions.scala
@@ -513,8 +513,8 @@ case class CurrentTime(
   override def inputTypes: Seq[AbstractDataType] = Seq(IntegerType)
 
   override def eval(input: InternalRow): Any = {
-    val currentTimeOfDayMicros = 
DateTimeUtils.instantToMicrosOfDay(java.time.Instant.now(), zoneId)
-    DateTimeUtils.truncateTimeMicrosToPrecision(currentTimeOfDayMicros, 
precision)
+    val currentTimeOfDayNanos = 
DateTimeUtils.instantToNanosOfDay(java.time.Instant.now(), zoneId)
+    DateTimeUtils.truncateTimeToPrecision(currentTimeOfDayNanos, precision)
   }
 }
 
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/finishAnalysis.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/finishAnalysis.scala
index 21e09f2e56d1..c9c26d473b98 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/finishAnalysis.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/optimizer/finishAnalysis.scala
@@ -31,7 +31,7 @@ import org.apache.spark.sql.catalyst.trees.TreePattern._
 import org.apache.spark.sql.catalyst.trees.TreePatternBits
 import org.apache.spark.sql.catalyst.util.DateTimeUtils
 import org.apache.spark.sql.catalyst.util.DateTimeUtils.{convertSpecialDate, 
convertSpecialTimestamp, convertSpecialTimestampNTZ, instantToMicros, 
localDateTimeToMicros}
-import 
org.apache.spark.sql.catalyst.util.SparkDateTimeUtils.{instantToMicrosOfDay, 
truncateTimeMicrosToPrecision}
+import 
org.apache.spark.sql.catalyst.util.SparkDateTimeUtils.{instantToNanosOfDay, 
truncateTimeToPrecision}
 import org.apache.spark.sql.catalyst.util.TypeUtils.toSQLExpr
 import org.apache.spark.sql.connector.catalog.CatalogManager
 import org.apache.spark.sql.types._
@@ -114,7 +114,7 @@ object ComputeCurrentTime extends Rule[LogicalPlan] {
     val instant = Instant.now()
     val currentTimestampMicros = instantToMicros(instant)
     val currentTime = Literal.create(currentTimestampMicros, TimestampType)
-    val currentTimeOfDayMicros = instantToMicrosOfDay(instant, 
conf.sessionLocalTimeZone)
+    val currentTimeOfDayNanos = instantToNanosOfDay(instant, 
conf.sessionLocalTimeZone)
     val timezone = Literal.create(conf.sessionLocalTimeZone, StringType)
     val currentDates = collection.mutable.HashMap.empty[ZoneId, Literal]
     val localTimestamps = collection.mutable.HashMap.empty[ZoneId, Literal]
@@ -132,7 +132,7 @@ object ComputeCurrentTime extends Rule[LogicalPlan] {
                 DateTimeUtils.microsToDays(currentTimestampMicros, cd.zoneId), 
DateType)
             })
           case currentTimeType : CurrentTime =>
-            val truncatedTime = 
truncateTimeMicrosToPrecision(currentTimeOfDayMicros,
+            val truncatedTime = truncateTimeToPrecision(currentTimeOfDayNanos,
               currentTimeType.precision)
             Literal.create(truncatedTime, TimeType(currentTimeType.precision))
           case CurrentTimestamp() | Now() => currentTime
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
index 80fc44c9097e..2289f00119ce 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/util/DateTimeUtilsSuite.scala
@@ -1230,4 +1230,24 @@ class DateTimeUtilsSuite extends SparkFunSuite with 
Matchers with SQLHelper {
     }.getMessage
     assert(msg.contains("Invalid value"))
   }
+
+  test("instant to nanos of day") {
+    
assert(instantToNanosOfDay(Instant.parse("1970-01-01T00:00:01.001002003Z"), 
"UTC") ==
+      1001002003)
+    assert(instantToNanosOfDay(Instant.parse("0001-01-01T23:59:59.999999Z"), 
"UTC") ==
+      localTime(23, 59, 59, 999999))
+    assert(instantToNanosOfDay(Instant.parse("2025-07-02T19:24:12Z"),
+      ZoneId.of("America/Los_Angeles")) == localTime(12, 24, 12))
+  }
+
+  test("truncate time to precision") {
+    assert(truncateTimeToPrecision(1234, 0) == 0)
+    assert(truncateTimeToPrecision(1000, 6) == 1000)
+    assert(truncateTimeToPrecision(localTime(0, 0, 0, 999999), 6) == 999999000)
+    assert(truncateTimeToPrecision(localTime(0, 0, 0, 999999), 5) == 999990000)
+    assert(truncateTimeToPrecision(localTime(23, 59, 59, 123000), 2) ==
+      localTime(23, 59, 59, 120000))
+    assert(truncateTimeToPrecision(localTime(23, 59, 59, 987654), 1) ==
+      localTime(23, 59, 59, 900000))
+  }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to