This is an automated email from the ASF dual-hosted git repository.

yao pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new aed5d110c2aa [SPARK-50731][SQL][TESTS] Mark `*TransformWithStateSuite` 
as `SlowSQLTest`
aed5d110c2aa is described below

commit aed5d110c2aabe8e6176c64f93d4a91bf07acc62
Author: Dongjoon Hyun <[email protected]>
AuthorDate: Mon Jan 6 11:30:26 2025 +0800

    [SPARK-50731][SQL][TESTS] Mark `*TransformWithStateSuite` as `SlowSQLTest`
    
    ### What changes were proposed in this pull request?
    
    This PR aims to mark `*TransformWithStateSuite` as `SlowSQLTest`. For 
example, `StateDataSourceTransformWithStateSuite` took almost 2 minutes.
    ```
    54950   2025-01-05T19:48:22.9043468Z [info] 
StateDataSourceTransformWithStateSuite:
    ...
    56139   2025-01-05T19:50:15.1404546Z [info] BloomFilterAggregateQuerySuite:
    ```
    
    ### Why are the changes needed?
    
    To balance the test pipelines by moving newly added long-running test 
suites.
    
    Currently,
    - https://github.com/apache/spark/actions/runs/12613864071/job/35152182685 
(`sql - other tests` took 1h 38 mins)
    - https://github.com/apache/spark/actions/runs/12613864071/job/35152182636 
(`sql - slow tests` took 47 mins)
    
    ### Does this PR introduce _any_ user-facing change?
    
    No, this is a test-only change.
    
    ### How was this patch tested?
    
    Manually check the CI logs.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No.
    
    Closes #49361 from dongjoon-hyun/SPARK-50731.
    
    Authored-by: Dongjoon Hyun <[email protected]>
    Signed-off-by: Kent Yao <[email protected]>
---
 .../datasources/v2/state/StateDataSourceTransformWithStateSuite.scala   | 2 ++
 .../scala/org/apache/spark/sql/streaming/TransformWithStateSuite.scala  | 2 ++
 2 files changed, 4 insertions(+)

diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/v2/state/StateDataSourceTransformWithStateSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/v2/state/StateDataSourceTransformWithStateSuite.scala
index b402344a7317..fe224608fffd 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/v2/state/StateDataSourceTransformWithStateSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/v2/state/StateDataSourceTransformWithStateSuite.scala
@@ -29,6 +29,7 @@ import org.apache.spark.sql.functions.{col, explode, 
timestamp_seconds}
 import org.apache.spark.sql.internal.SQLConf
 import org.apache.spark.sql.streaming.{InputMapRow, ListState, MapInputEvent, 
MapOutputEvent, MapStateTTLProcessor, MaxEventTimeStatefulProcessor, 
OutputMode, RunningCountStatefulProcessor, 
RunningCountStatefulProcessorWithProcTimeTimerUpdates, StatefulProcessor, 
StateStoreMetricsTest, TestMapStateProcessor, TimeMode, TimerValues, 
TransformWithStateSuiteUtils, Trigger, TTLConfig, ValueState}
 import org.apache.spark.sql.streaming.util.StreamManualClock
+import org.apache.spark.tags.SlowSQLTest
 import org.apache.spark.util.Utils
 
 /** Stateful processor of single value state var with non-primitive type */
@@ -125,6 +126,7 @@ class SessionGroupsStatefulProcessorWithTTL extends
 /**
  * Test suite to verify integration of state data source reader with the 
transformWithState operator
  */
+@SlowSQLTest
 class StateDataSourceTransformWithStateSuite extends StateStoreMetricsTest
   with AlsoTestWithRocksDBFeatures with AlsoTestWithEncodingTypes {
 
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/TransformWithStateSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/TransformWithStateSuite.scala
index 601699c8320f..97dad5fe78a1 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/streaming/TransformWithStateSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/streaming/TransformWithStateSuite.scala
@@ -38,6 +38,7 @@ import org.apache.spark.sql.functions.timestamp_seconds
 import org.apache.spark.sql.internal.SQLConf
 import org.apache.spark.sql.streaming.util.StreamManualClock
 import org.apache.spark.sql.types.{IntegerType, LongType, StringType, 
StructType}
+import org.apache.spark.tags.SlowSQLTest
 
 object TransformWithStateSuiteUtils {
   val NUM_SHUFFLE_PARTITIONS = 5
@@ -441,6 +442,7 @@ class SleepingTimerProcessor extends 
StatefulProcessor[String, String, String] {
 /**
  * Class that adds tests for transformWithState stateful streaming operator
  */
+@SlowSQLTest
 class TransformWithStateSuite extends StateStoreMetricsTest
   with AlsoTestWithRocksDBFeatures with AlsoTestWithEncodingTypes {
 


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to