This is an automated email from the ASF dual-hosted git repository.
ruifengz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 470d622a926b [SPARK-53112][PYTHON][TESTS][FOLLOW-UP] Remove some tests
for Non-ANSI
470d622a926b is described below
commit 470d622a926b77023c3b4bb5144ef88197964f8f
Author: Ruifeng Zheng <[email protected]>
AuthorDate: Fri Sep 26 19:59:33 2025 +0800
[SPARK-53112][PYTHON][TESTS][FOLLOW-UP] Remove some tests for Non-ANSI
### What changes were proposed in this pull request?
remove some newly added tests which failed in non-ansi
see https://github.com/apache/spark/actions/runs/18025276597/job/51291154866
```
======================================================================
FAIL [1.563s]: test_make_timestamp_ntz
(pyspark.sql.tests.test_functions.FunctionsTests.test_make_timestamp_ntz)
Comprehensive test cases for make_timestamp_ntz with various arguments and
edge cases.
----------------------------------------------------------------------
Traceback (most recent call last):
File "/__w/spark/spark/python/pyspark/sql/tests/test_functions.py", line
999, in test_make_timestamp_ntz
with self.assertRaises(Exception):
AssertionError: Exception not raised
----------------------------------------------------------------------
```
### Why are the changes needed?
to make non-ansi schedule job happy
### Does this PR introduce _any_ user-facing change?
no
### How was this patch tested?
manually check
### Was this patch authored or co-authored using generative AI tooling?
no
Closes #52466 from zhengruifeng/remove_ansi_test.
Authored-by: Ruifeng Zheng <[email protected]>
Signed-off-by: Ruifeng Zheng <[email protected]>
---
python/pyspark/sql/tests/test_functions.py | 56 ------------------------------
1 file changed, 56 deletions(-)
diff --git a/python/pyspark/sql/tests/test_functions.py
b/python/pyspark/sql/tests/test_functions.py
index e39609ac615d..91e519c6f8c7 100644
--- a/python/pyspark/sql/tests/test_functions.py
+++ b/python/pyspark/sql/tests/test_functions.py
@@ -995,62 +995,6 @@ class FunctionsTestsMixin:
with self.assertRaises(Exception):
F.make_timestamp_ntz(date=df_dt.date)
- # Test 17: Invalid data types - should raise exception for invalid
string to int cast
- with self.assertRaises(Exception):
- self.spark.range(1).select(
- F.make_timestamp_ntz(
- F.lit("invalid"), F.lit(5), F.lit(22), F.lit(10),
F.lit(30), F.lit(0)
- )
- ).collect()
-
- # Test 18: Out of range values (month=13) - should raise exception for
invalid date
- df_invalid = self.spark.createDataFrame(
- [(2024, 13, 22, 10, 30, 0)], ["year", "month", "day", "hour",
"minute", "second"]
- )
- with self.assertRaises(Exception):
- df_invalid.select(
- F.make_timestamp_ntz(
- df_invalid.year,
- df_invalid.month,
- df_invalid.day,
- df_invalid.hour,
- df_invalid.minute,
- df_invalid.second,
- )
- ).collect()
-
- # Test 19: Out of range values (hour=25) - should raise exception for
invalid time
- df_invalid_hour = self.spark.createDataFrame(
- [(2024, 5, 22, 25, 30, 0)], ["year", "month", "day", "hour",
"minute", "second"]
- )
- with self.assertRaises(Exception):
- df_invalid_hour.select(
- F.make_timestamp_ntz(
- df_invalid_hour.year,
- df_invalid_hour.month,
- df_invalid_hour.day,
- df_invalid_hour.hour,
- df_invalid_hour.minute,
- df_invalid_hour.second,
- )
- ).collect()
-
- # Test 20: February 29 in non-leap year
- df_non_leap = self.spark.createDataFrame(
- [(2023, 2, 29, 0, 0, 0)], ["year", "month", "day", "hour",
"minute", "second"]
- )
- with self.assertRaises(Exception): # Should raise runtime exception
for invalid date
- df_non_leap.select(
- F.make_timestamp_ntz(
- df_non_leap.year,
- df_non_leap.month,
- df_non_leap.day,
- df_non_leap.hour,
- df_non_leap.minute,
- df_non_leap.second,
- )
- ).collect()
-
def test_make_date(self):
# SPARK-36554: expose make_date expression
df = self.spark.createDataFrame([(2020, 6, 26)], ["Y", "M", "D"])
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]