This is an automated email from the ASF dual-hosted git repository.
gengliang pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new e312318f5dea [SPARK-53621][CORE][FOLLOWUP] Adding Comments And
Enriching Tests of Continue Handler
e312318f5dea is described below
commit e312318f5dea183c29ba8fecef2677010b9f76cf
Author: Teodor Djelic <[email protected]>
AuthorDate: Tue Oct 7 16:09:29 2025 -0700
[SPARK-53621][CORE][FOLLOWUP] Adding Comments And Enriching Tests of
Continue Handler
### What changes were proposed in this pull request?
This is a follow up PR in which are added:
- Additional comments to further explain the implementation
- Expand the existing tests to increase the code coverage by adding
branches to conditional statements.
- Remove duplicate tests
### Why are the changes needed?
- Parts of the initial PR were left unclear and undocumented, and this PR
serves to help describe what the code does.
- Better test coverage is needed to ensure maintaining correctness.
- Duplicate of the same test has no reason to exist.
### Does this PR introduce _any_ user-facing change?
No.
### How was this patch tested?
This change only introduces test changes and comments, so it was tested
only by running the tests.
### Was this patch authored or co-authored using generative AI tooling?
No.
Closes #52508 from
TeodorDjelic/continue-handler-enriching-tests-and-adding-comments.
Authored-by: Teodor Djelic <[email protected]>
Signed-off-by: Gengliang Wang <[email protected]>
---
.../sql/scripting/SqlScriptingExecution.scala | 4 +-
.../scripting/SqlScriptingExecutionContext.scala | 3 +-
.../sql/scripting/SqlScriptingExecutionNode.scala | 9 +++
.../sql/scripting/SqlScriptingInterpreter.scala | 8 +--
.../sql/scripting/SqlScriptingExecutionSuite.scala | 76 ++++------------------
5 files changed, 32 insertions(+), 68 deletions(-)
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/scripting/SqlScriptingExecution.scala
b/sql/core/src/main/scala/org/apache/spark/sql/scripting/SqlScriptingExecution.scala
index 096ad11dd065..2a849aa2d604 100644
---
a/sql/core/src/main/scala/org/apache/spark/sql/scripting/SqlScriptingExecution.scala
+++
b/sql/core/src/main/scala/org/apache/spark/sql/scripting/SqlScriptingExecution.scala
@@ -122,7 +122,7 @@ class SqlScriptingExecution(
context.frames.remove(context.frames.size - 1)
- // If the last frame is a handler, set leave statement to be the next
one in the
+ // If the last frame is an EXIT handler, set leave statement to be the
next one in the
// innermost scope that should be exited.
if (lastFrame.frameType == SqlScriptingFrameType.EXIT_HANDLER
&& context.frames.nonEmpty) {
@@ -136,6 +136,8 @@ class SqlScriptingExecution(
injectLeaveStatement(context.frames.last.executionPlan,
lastFrame.scopeLabel.get)
}
+ // If the last frame is a CONTINUE handler, leave the handler without
injecting anything, but
+ // skip the conditional statement if the exception originated from its
conditional expression.
if (lastFrame.frameType == SqlScriptingFrameType.CONTINUE_HANDLER
&& context.frames.nonEmpty) {
// Remove the scope if handler is executed.
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/scripting/SqlScriptingExecutionContext.scala
b/sql/core/src/main/scala/org/apache/spark/sql/scripting/SqlScriptingExecutionContext.scala
index 08ba54e6e4e4..c91cbe02bccc 100644
---
a/sql/core/src/main/scala/org/apache/spark/sql/scripting/SqlScriptingExecutionContext.scala
+++
b/sql/core/src/main/scala/org/apache/spark/sql/scripting/SqlScriptingExecutionContext.scala
@@ -94,7 +94,8 @@ object SqlScriptingFrameType extends Enumeration {
* @param executionPlan CompoundBody which need to be executed.
* @param frameType Type of the frame.
* @param scopeLabel Label of the scope where handler is defined.
- * Available only for frameType = HANDLER.
+ * Available only for frameType = EXIT_HANDLER, frameType =
CONTINUE_HANDLER
+ * and frameType = SQL_STORED_PROCEDURE.
*/
class SqlScriptingExecutionFrame(
val executionPlan: CompoundBodyExec,
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/scripting/SqlScriptingExecutionNode.scala
b/sql/core/src/main/scala/org/apache/spark/sql/scripting/SqlScriptingExecutionNode.scala
index 598e379c73ac..aa2c2f405021 100644
---
a/sql/core/src/main/scala/org/apache/spark/sql/scripting/SqlScriptingExecutionNode.scala
+++
b/sql/core/src/main/scala/org/apache/spark/sql/scripting/SqlScriptingExecutionNode.scala
@@ -110,6 +110,15 @@ trait NonLeafStatementExec extends CompoundStatementExec {
* Conditional node in the execution tree. It is a conditional non-leaf node.
*/
trait ConditionalStatementExec extends NonLeafStatementExec {
+ /**
+ * Interrupted flag indicates if the statement has been interrupted, and is
used
+ * for skipping the execution of the conditional statements, by setting the
hasNext
+ * to be false.
+ * Interrupt is issued by the CONTINUE HANDLER when the conditional
statement's
+ * conditional expression throws an exception, and is issued by the Leave
Statement
+ * when the ForStatementExec executes the Leave Statement injected by the
EXIT
+ * HANDLER.
+ */
protected[scripting] var interrupted: Boolean = false
}
diff --git
a/sql/core/src/main/scala/org/apache/spark/sql/scripting/SqlScriptingInterpreter.scala
b/sql/core/src/main/scala/org/apache/spark/sql/scripting/SqlScriptingInterpreter.scala
index eebdb681f62c..e58e4b258307 100644
---
a/sql/core/src/main/scala/org/apache/spark/sql/scripting/SqlScriptingInterpreter.scala
+++
b/sql/core/src/main/scala/org/apache/spark/sql/scripting/SqlScriptingInterpreter.scala
@@ -87,11 +87,11 @@ case class SqlScriptingInterpreter(session: SparkSession) {
args,
context)
- // Scope label should be Some(compoundBody.label.get) for both handler
types
val handlerExec = new ExceptionHandlerExec(
- handlerBodyExec,
- handler.handlerType,
- Some(compoundBody.label.get))
+ body = handlerBodyExec,
+ handlerType = handler.handlerType,
+ // Used as a parent label of where the handler is declared (label of
compoundBody used).
+ scopeLabel = Some(compoundBody.label.get))
// For each condition handler is defined for, add corresponding key
value pair
// to the conditionHandlerMap.
diff --git
a/sql/core/src/test/scala/org/apache/spark/sql/scripting/SqlScriptingExecutionSuite.scala
b/sql/core/src/test/scala/org/apache/spark/sql/scripting/SqlScriptingExecutionSuite.scala
index 04b59e2f0817..b89c2d2a73fa 100644
---
a/sql/core/src/test/scala/org/apache/spark/sql/scripting/SqlScriptingExecutionSuite.scala
+++
b/sql/core/src/test/scala/org/apache/spark/sql/scripting/SqlScriptingExecutionSuite.scala
@@ -402,68 +402,6 @@ class SqlScriptingExecutionSuite extends QueryTest with
SharedSparkSession {
verifySqlScriptResult(sqlScript, expected = expected)
}
- test("handler - exit resolve in the same block when if condition fails") {
- val sqlScript =
- """
- |BEGIN
- | DECLARE VARIABLE flag INT = -1;
- | scope_to_exit: BEGIN
- | DECLARE EXIT HANDLER FOR SQLSTATE '22012'
- | BEGIN
- | SELECT flag;
- | SET flag = 1;
- | END;
- | SELECT 2;
- | SELECT 3;
- | IF 1 > 1/0 THEN
- | SELECT 10;
- | END IF;
- | SELECT 4;
- | SELECT 5;
- | END;
- | SELECT flag;
- |END
- |""".stripMargin
- val expected = Seq(
- Seq(Row(2)), // select
- Seq(Row(3)), // select
- Seq(Row(-1)), // select flag
- Seq(Row(1)) // select flag from the outer body
- )
- verifySqlScriptResult(sqlScript, expected = expected)
- }
-
- test("continue handler - continue after the if statement when if condition
fails") {
- val sqlScript =
- """
- |BEGIN
- | DECLARE VARIABLE flag INT = -1;
- | DECLARE CONTINUE HANDLER FOR DIVIDE_BY_ZERO
- | BEGIN
- | SELECT flag;
- | SET flag = 1;
- | END;
- | SELECT 2;
- | SELECT 3;
- | IF (1 > 1/0) THEN
- | SELECT 4;
- | END IF;
- | SELECT 5;
- | SELECT 6;
- | SELECT flag;
- |END
- |""".stripMargin
- val expected = Seq(
- Seq(Row(2)), // select
- Seq(Row(3)), // select
- Seq(Row(-1)), // select flag
- Seq(Row(5)), // select
- Seq(Row(6)), // select
- Seq(Row(1)) // select flag from the outer body
- )
- verifySqlScriptResult(sqlScript, expected = expected)
- }
-
test("handler - exit resolve in outer block") {
val sqlScript =
"""
@@ -956,6 +894,10 @@ class SqlScriptingExecutionSuite extends QueryTest with
SharedSparkSession {
| END;
| IF 1 > 1/0 THEN
| SELECT 10;
+ | ELSEIF (1 = 1) THEN
+ | SELECT 11;
+ | ELSE
+ | SELECT 12;
| END IF;
| SELECT 4;
| SELECT 5;
@@ -1012,6 +954,8 @@ class SqlScriptingExecutionSuite extends QueryTest with
SharedSparkSession {
| END;
| CASE 1/0
| WHEN flag THEN SELECT 10;
+ | WHEN 1 THEN SELECT 11;
+ | ELSE SELECT 12;
| END CASE;
| SELECT 4;
| SELECT 5;
@@ -1068,6 +1012,9 @@ class SqlScriptingExecutionSuite extends QueryTest with
SharedSparkSession {
| END;
| CASE flag
| WHEN 1/0 THEN SELECT 10;
+ | WHEN -1 THEN SELECT 11;
+ | WHEN 1 THEN SELECT 12;
+ | ELSE SELECT 13;
| END CASE;
| SELECT 4;
| SELECT 5;
@@ -1124,6 +1071,9 @@ class SqlScriptingExecutionSuite extends QueryTest with
SharedSparkSession {
| END;
| CASE flag
| WHEN 'teststr' THEN SELECT 10;
+ | WHEN -1 THEN SELECT 11;
+ | WHEN 1 THEN SELECT 12;
+ | ELSE SELECT 13;
| END CASE;
| SELECT 4;
| SELECT 5;
@@ -1180,6 +1130,8 @@ class SqlScriptingExecutionSuite extends QueryTest with
SharedSparkSession {
| END;
| CASE
| WHEN flag = 1/0 THEN SELECT 10;
+ | WHEN 1 = 1 THEN SELECT 11;
+ | ELSE SELECT 12;
| END CASE;
| SELECT 4;
| SELECT 5;
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]