This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new f17a8d7f0d86 [SPARK-51557][SQL][TESTS] Add tests for TIME data type in 
Java API
f17a8d7f0d86 is described below

commit f17a8d7f0d868d188901e20e1bf2ef732a2816bd
Author: Mike Artz <far...@hotmail.com>
AuthorDate: Mon Jul 7 22:49:58 2025 +0200

    [SPARK-51557][SQL][TESTS] Add tests for TIME data type in Java API
    
    ### What changes were proposed in this pull request?
    
    This PR adds tests for the TIME data type in Spark's Java API, covering 
Dataset and UDF functionality with `java.time.LocalTime`:
      - Adding a dataset filter operations with `java.time.LocalTime` (there is 
not a similar one for `TimestampType`).
      - UDF registration and execution with TimeType in udf8Test - the same as 
the `udf7Test()` for `TimestampType`.
      - `testLocalTimeEncoder()` already existed for `TimestampType` parity.
    
    ### Why are the changes needed?
    
    As part of the TIME data type SPIP (SPARK-51162), we need test coverage in 
the Java API for Datasets and UDFs.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No
    
    ### How was this patch tested?
    
      Added new test methods:
      - `JavaDatasetSuite.testLocalTimeFilter` - Tests Dataset filter with 
`LocalTime`
      - `JavaUDFSuite.udf8Test` - Tests UDF registration and execution with 
`LocalTime`
    
    ![Screenshot 2025-07-07 at 4 00 58 
AM](https://github.com/user-attachments/assets/160f207e-e3e5-45a2-ac5d-35b55a76215e)
    ![Screenshot 2025-07-07 at 3 59 24 
AM](https://github.com/user-attachments/assets/c18bf26d-a6ef-4445-95b0-a67566ac9fa0)
    
      I executed the tests themselves locally and also ran `./build/mvn 
test-compile -pl sql/core` to test compilation.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No
    
    Closes #51387 from 
fartzy/SPARK-51557_Add_tests_for_TIME_data_type_in_Java_API.
    
    Authored-by: Mike Artz <far...@hotmail.com>
    Signed-off-by: Max Gekk <max.g...@gmail.com>
---
 .../test/org/apache/spark/sql/JavaDatasetSuite.java  | 20 ++++++++++++++++++++
 .../java/test/org/apache/spark/sql/JavaUDFSuite.java | 11 +++++++++++
 2 files changed, 31 insertions(+)

diff --git 
a/sql/core/src/test/java/test/org/apache/spark/sql/JavaDatasetSuite.java 
b/sql/core/src/test/java/test/org/apache/spark/sql/JavaDatasetSuite.java
index 692b5c0ebc3a..06df8df13216 100644
--- a/sql/core/src/test/java/test/org/apache/spark/sql/JavaDatasetSuite.java
+++ b/sql/core/src/test/java/test/org/apache/spark/sql/JavaDatasetSuite.java
@@ -812,6 +812,26 @@ public class JavaDatasetSuite implements Serializable {
     Assertions.assertEquals(data, ds.collectAsList());
   }
 
+  @Test
+  public void testLocalTimeFilter() {
+    Encoder<LocalTime> encoder = Encoders.LOCALTIME();
+    List<LocalTime> data = Arrays.asList(
+      LocalTime.of(9, 30, 45),
+      LocalTime.of(14, 10, 10),
+      LocalTime.of(22, 10, 10)
+    );
+    Dataset<LocalTime> ds = spark.createDataset(data, encoder);
+
+    Dataset<LocalTime> filtered = ds.filter(
+      (FilterFunction<LocalTime>) time -> time.isAfter(LocalTime.of(12, 0, 0))
+    );
+    List<LocalTime> expectedFiltered = Arrays.asList(
+      LocalTime.of(14, 10, 10),
+      LocalTime.of(22, 10, 10)
+    );
+    Assertions.assertEquals(expectedFiltered, filtered.collectAsList());
+  }
+
   public static class KryoSerializable {
     String value;
 
diff --git a/sql/core/src/test/java/test/org/apache/spark/sql/JavaUDFSuite.java 
b/sql/core/src/test/java/test/org/apache/spark/sql/JavaUDFSuite.java
index c1f48a922b72..732824995514 100644
--- a/sql/core/src/test/java/test/org/apache/spark/sql/JavaUDFSuite.java
+++ b/sql/core/src/test/java/test/org/apache/spark/sql/JavaUDFSuite.java
@@ -19,6 +19,7 @@ package test.org.apache.spark.sql;
 
 import java.io.Serializable;
 import java.time.LocalDate;
+import java.time.LocalTime;
 import java.util.List;
 
 import org.apache.spark.sql.catalyst.FunctionIdentifier;
@@ -34,6 +35,7 @@ import org.apache.spark.sql.Row;
 import org.apache.spark.sql.classic.SparkSession;
 import org.apache.spark.sql.api.java.UDF2;
 import org.apache.spark.sql.types.DataTypes;
+import org.apache.spark.sql.types.TimeType;
 
 // The test suite itself is Serializable so that anonymous Function 
implementations can be
 // serialized, as an alternative to converting these anonymous classes to 
static inner classes;
@@ -136,6 +138,15 @@ public class JavaUDFSuite implements Serializable {
     }
   }
 
+  @Test
+  public void udf8Test() {
+    spark.udf().register(
+        "plusTwoHours",
+        (java.time.LocalTime lt) -> lt.plusHours(2), new TimeType(6));
+    Row result = spark.sql("SELECT plusTwoHours(TIME '09:10:10')").head();
+    Assertions.assertEquals(LocalTime.of(11, 10, 10), result.get(0));
+  }
+
   @Test
   public void sourceTest() {
     spark.udf().register("stringLengthTest", (String str) -> str.length(), 
DataTypes.IntegerType);


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to