This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 1a2977e289ac [SPARK-52881][SQL] Implement the make_time function in 
Scala
1a2977e289ac is described below

commit 1a2977e289ac509210aaa11d50d5320237b14bd3
Author: Uros Bojanic <uros.boja...@databricks.com>
AuthorDate: Tue Jul 22 10:57:00 2025 +0200

    [SPARK-52881][SQL] Implement the make_time function in Scala
    
    ### What changes were proposed in this pull request?
    Implement the `make_time` function in Scala API.
    
    ### Why are the changes needed?
    Expand API support for the `MakeTime` expression.
    
    ### Does this PR introduce _any_ user-facing change?
    Yes, the new function is now available in Scala API.
    
    ### How was this patch tested?
    Added appropriate Scala function tests.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    No.
    
    Closes #51573 from uros-db/scala-make_time.
    
    Lead-authored-by: Uros Bojanic <uros.boja...@databricks.com>
    Co-authored-by: Maxim Gekk <max.g...@gmail.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 python/pyspark/sql/tests/test_functions.py         |  5 +++-
 .../scala/org/apache/spark/sql/functions.scala     | 16 ++++++++++
 .../apache/spark/sql/TimeFunctionsSuiteBase.scala  | 35 ++++++++++++++++++++++
 3 files changed, 55 insertions(+), 1 deletion(-)

diff --git a/python/pyspark/sql/tests/test_functions.py 
b/python/pyspark/sql/tests/test_functions.py
index e2b3e33756ba..0ce5178c9240 100644
--- a/python/pyspark/sql/tests/test_functions.py
+++ b/python/pyspark/sql/tests/test_functions.py
@@ -81,7 +81,10 @@ class FunctionsTestsMixin:
         missing_in_py = jvm_fn_set.difference(py_fn_set)
 
         # Functions that we expect to be missing in python until they are 
added to pyspark
-        expected_missing_in_py = set()
+        expected_missing_in_py = set(
+            # TODO(SPARK-52888): Implement the make_time function in Python
+            ["make_time"]
+        )
 
         self.assertEqual(
             expected_missing_in_py, missing_in_py, "Missing functions in 
pyspark not as expected"
diff --git a/sql/api/src/main/scala/org/apache/spark/sql/functions.scala 
b/sql/api/src/main/scala/org/apache/spark/sql/functions.scala
index c431b093599e..1d00f91248f8 100644
--- a/sql/api/src/main/scala/org/apache/spark/sql/functions.scala
+++ b/sql/api/src/main/scala/org/apache/spark/sql/functions.scala
@@ -871,6 +871,22 @@ object functions {
   def last_value(e: Column, ignoreNulls: Column): Column =
     Column.fn("last_value", e, ignoreNulls)
 
+  /**
+   * Create time from hour, minute and second fields. For invalid inputs it 
will throw an error.
+   *
+   * @param hour
+   *   the hour to represent, from 0 to 23
+   * @param minute
+   *   the minute to represent, from 0 to 59
+   * @param second
+   *   the second to represent, from 0 to 59.999999
+   * @group datetime_funcs
+   * @since 4.1.0
+   */
+  def make_time(hour: Column, minute: Column, second: Column): Column = {
+    Column.fn("make_time", hour, minute, second)
+  }
+
   /**
    * Aggregate function: returns the most frequent value in a group.
    *
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/TimeFunctionsSuiteBase.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/TimeFunctionsSuiteBase.scala
index fb80368b3e4f..4702b9d43498 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/TimeFunctionsSuiteBase.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/TimeFunctionsSuiteBase.scala
@@ -28,6 +28,41 @@ import org.apache.spark.sql.types._
 abstract class TimeFunctionsSuiteBase extends QueryTest with 
SharedSparkSession {
   import testImplicits._
 
+  test("SPARK-52881: make_time function") {
+    // Input data for the function.
+    val schema = StructType(Seq(
+      StructField("hour", IntegerType, nullable = false),
+      StructField("minute", IntegerType, nullable = false),
+      StructField("second", DecimalType(16, 6), nullable = false)
+    ))
+    val data = Seq(
+      Row(0, 0, BigDecimal(0.0)),
+      Row(1, 2, BigDecimal(3.4)),
+      Row(23, 59, BigDecimal(59.999999))
+    )
+    val df = spark.createDataFrame(spark.sparkContext.parallelize(data), 
schema)
+
+    // Test the function using both `selectExpr` and `select`.
+    val result1 = df.selectExpr(
+      "make_time(hour, minute, second)"
+    )
+    val result2 = df.select(
+      make_time(col("hour"), col("minute"), col("second"))
+    )
+    // Check that both methods produce the same result.
+    checkAnswer(result1, result2)
+
+    // Expected output of the function.
+    val expected = Seq(
+      "00:00:00",
+      "01:02:03.4",
+      "23:59:59.999999"
+    ).toDF("timeString").select(col("timeString").cast("time"))
+    // Check that the results match the expected output.
+    checkAnswer(result1, expected)
+    checkAnswer(result2, expected)
+  }
+
   test("SPARK-52885: hour function") {
     // Input data for the function.
     val schema = StructType(Seq(


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to