From 587c17e2f44cae4b28bcb393955b63f0e7754633 Mon Sep 17 00:00:00 2001 From: Teodor Djelic <130703036+TeodorDjelic@users.noreply.github.com> Date: Thu, 2 Oct 2025 17:33:14 +0200 Subject: [PATCH 1/4] Added comments and enriched tests; --- .../sql/scripting/SqlScriptingExecution.scala | 4 +- .../SqlScriptingExecutionContext.scala | 3 +- .../scripting/SqlScriptingExecutionNode.scala | 5 ++ .../scripting/SqlScriptingInterpreter.scala | 8 +- .../SqlScriptingExecutionSuite.scala | 76 ++++--------------- 5 files changed, 28 insertions(+), 68 deletions(-) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/scripting/SqlScriptingExecution.scala b/sql/core/src/main/scala/org/apache/spark/sql/scripting/SqlScriptingExecution.scala index 096ad11dd0657..30400e3527780 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/scripting/SqlScriptingExecution.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/scripting/SqlScriptingExecution.scala @@ -122,7 +122,7 @@ class SqlScriptingExecution( context.frames.remove(context.frames.size - 1) - // If the last frame is a handler, set leave statement to be the next one in the + // If the last frame is an EXIT handler, set leave statement to be the next one in the // innermost scope that should be exited. if (lastFrame.frameType == SqlScriptingFrameType.EXIT_HANDLER && context.frames.nonEmpty) { @@ -136,6 +136,8 @@ class SqlScriptingExecution( injectLeaveStatement(context.frames.last.executionPlan, lastFrame.scopeLabel.get) } + // If the last frame is an CONTINUE handler, leave the handler without injecting anyting, + // but skip the conditional statement if the exception originated from a conditional if (lastFrame.frameType == SqlScriptingFrameType.CONTINUE_HANDLER && context.frames.nonEmpty) { // Remove the scope if handler is executed. diff --git a/sql/core/src/main/scala/org/apache/spark/sql/scripting/SqlScriptingExecutionContext.scala b/sql/core/src/main/scala/org/apache/spark/sql/scripting/SqlScriptingExecutionContext.scala index 08ba54e6e4e4d..c3f5c47f1b0d5 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/scripting/SqlScriptingExecutionContext.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/scripting/SqlScriptingExecutionContext.scala @@ -94,7 +94,8 @@ object SqlScriptingFrameType extends Enumeration { * @param executionPlan CompoundBody which need to be executed. * @param frameType Type of the frame. * @param scopeLabel Label of the scope where handler is defined. - * Available only for frameType = HANDLER. + * Available only for frameType = EXIT_HANDLER, frameType = CONTINUE_HANDLER + * and frameType = SQL_STORED_PROCEDURE */ class SqlScriptingExecutionFrame( val executionPlan: CompoundBodyExec, diff --git a/sql/core/src/main/scala/org/apache/spark/sql/scripting/SqlScriptingExecutionNode.scala b/sql/core/src/main/scala/org/apache/spark/sql/scripting/SqlScriptingExecutionNode.scala index 598e379c73ac9..f914b85195c01 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/scripting/SqlScriptingExecutionNode.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/scripting/SqlScriptingExecutionNode.scala @@ -110,6 +110,11 @@ trait NonLeafStatementExec extends CompoundStatementExec { * Conditional node in the execution tree. It is a conditional non-leaf node. */ trait ConditionalStatementExec extends NonLeafStatementExec { + /** + * Used by CONTINUE HANDLER for all ConditionalStatementExec and by EXIT HANDLER for + * ForStatementExec to flag the statement as interrupted and subsequently force the + * hasNext method to return false (used for skipping the conditional statement). + */ protected[scripting] var interrupted: Boolean = false } diff --git a/sql/core/src/main/scala/org/apache/spark/sql/scripting/SqlScriptingInterpreter.scala b/sql/core/src/main/scala/org/apache/spark/sql/scripting/SqlScriptingInterpreter.scala index eebdb681f62c1..caf6bfaf9bb71 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/scripting/SqlScriptingInterpreter.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/scripting/SqlScriptingInterpreter.scala @@ -87,11 +87,11 @@ case class SqlScriptingInterpreter(session: SparkSession) { args, context) - // Scope label should be Some(compoundBody.label.get) for both handler types val handlerExec = new ExceptionHandlerExec( - handlerBodyExec, - handler.handlerType, - Some(compoundBody.label.get)) + body = handlerBodyExec, + handlerType = handler.handlerType, + // Used as a parent label of where the handler is declared (label of compoundBody used) + scopeLabel = Some(compoundBody.label.get)) // For each condition handler is defined for, add corresponding key value pair // to the conditionHandlerMap. diff --git a/sql/core/src/test/scala/org/apache/spark/sql/scripting/SqlScriptingExecutionSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/scripting/SqlScriptingExecutionSuite.scala index 04b59e2f08173..b89c2d2a73faa 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/scripting/SqlScriptingExecutionSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/scripting/SqlScriptingExecutionSuite.scala @@ -402,68 +402,6 @@ class SqlScriptingExecutionSuite extends QueryTest with SharedSparkSession { verifySqlScriptResult(sqlScript, expected = expected) } - test("handler - exit resolve in the same block when if condition fails") { - val sqlScript = - """ - |BEGIN - | DECLARE VARIABLE flag INT = -1; - | scope_to_exit: BEGIN - | DECLARE EXIT HANDLER FOR SQLSTATE '22012' - | BEGIN - | SELECT flag; - | SET flag = 1; - | END; - | SELECT 2; - | SELECT 3; - | IF 1 > 1/0 THEN - | SELECT 10; - | END IF; - | SELECT 4; - | SELECT 5; - | END; - | SELECT flag; - |END - |""".stripMargin - val expected = Seq( - Seq(Row(2)), // select - Seq(Row(3)), // select - Seq(Row(-1)), // select flag - Seq(Row(1)) // select flag from the outer body - ) - verifySqlScriptResult(sqlScript, expected = expected) - } - - test("continue handler - continue after the if statement when if condition fails") { - val sqlScript = - """ - |BEGIN - | DECLARE VARIABLE flag INT = -1; - | DECLARE CONTINUE HANDLER FOR DIVIDE_BY_ZERO - | BEGIN - | SELECT flag; - | SET flag = 1; - | END; - | SELECT 2; - | SELECT 3; - | IF (1 > 1/0) THEN - | SELECT 4; - | END IF; - | SELECT 5; - | SELECT 6; - | SELECT flag; - |END - |""".stripMargin - val expected = Seq( - Seq(Row(2)), // select - Seq(Row(3)), // select - Seq(Row(-1)), // select flag - Seq(Row(5)), // select - Seq(Row(6)), // select - Seq(Row(1)) // select flag from the outer body - ) - verifySqlScriptResult(sqlScript, expected = expected) - } - test("handler - exit resolve in outer block") { val sqlScript = """ @@ -956,6 +894,10 @@ class SqlScriptingExecutionSuite extends QueryTest with SharedSparkSession { | END; | IF 1 > 1/0 THEN | SELECT 10; + | ELSEIF (1 = 1) THEN + | SELECT 11; + | ELSE + | SELECT 12; | END IF; | SELECT 4; | SELECT 5; @@ -1012,6 +954,8 @@ class SqlScriptingExecutionSuite extends QueryTest with SharedSparkSession { | END; | CASE 1/0 | WHEN flag THEN SELECT 10; + | WHEN 1 THEN SELECT 11; + | ELSE SELECT 12; | END CASE; | SELECT 4; | SELECT 5; @@ -1068,6 +1012,9 @@ class SqlScriptingExecutionSuite extends QueryTest with SharedSparkSession { | END; | CASE flag | WHEN 1/0 THEN SELECT 10; + | WHEN -1 THEN SELECT 11; + | WHEN 1 THEN SELECT 12; + | ELSE SELECT 13; | END CASE; | SELECT 4; | SELECT 5; @@ -1124,6 +1071,9 @@ class SqlScriptingExecutionSuite extends QueryTest with SharedSparkSession { | END; | CASE flag | WHEN 'teststr' THEN SELECT 10; + | WHEN -1 THEN SELECT 11; + | WHEN 1 THEN SELECT 12; + | ELSE SELECT 13; | END CASE; | SELECT 4; | SELECT 5; @@ -1180,6 +1130,8 @@ class SqlScriptingExecutionSuite extends QueryTest with SharedSparkSession { | END; | CASE | WHEN flag = 1/0 THEN SELECT 10; + | WHEN 1 = 1 THEN SELECT 11; + | ELSE SELECT 12; | END CASE; | SELECT 4; | SELECT 5; From 139693273006b1307e56ce60ca04d0b1d5f9f111 Mon Sep 17 00:00:00 2001 From: Teodor Djelic <130703036+TeodorDjelic@users.noreply.github.com> Date: Fri, 3 Oct 2025 10:15:24 +0200 Subject: [PATCH 2/4] Comment fix; Co-authored-by: David Milicevic <163021185+davidm-db@users.noreply.github.com> --- .../org/apache/spark/sql/scripting/SqlScriptingExecution.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/scripting/SqlScriptingExecution.scala b/sql/core/src/main/scala/org/apache/spark/sql/scripting/SqlScriptingExecution.scala index 30400e3527780..8a79e32a0f86e 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/scripting/SqlScriptingExecution.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/scripting/SqlScriptingExecution.scala @@ -136,7 +136,7 @@ class SqlScriptingExecution( injectLeaveStatement(context.frames.last.executionPlan, lastFrame.scopeLabel.get) } - // If the last frame is an CONTINUE handler, leave the handler without injecting anyting, + // If the last frame is a CONTINUE handler, leave the handler without injecting anyting, // but skip the conditional statement if the exception originated from a conditional if (lastFrame.frameType == SqlScriptingFrameType.CONTINUE_HANDLER && context.frames.nonEmpty) { From d09b0f28a03070f635d6f60ec5355e95015b68bc Mon Sep 17 00:00:00 2001 From: Teodor Djelic <130703036+TeodorDjelic@users.noreply.github.com> Date: Fri, 3 Oct 2025 10:23:02 +0200 Subject: [PATCH 3/4] Fixed up punctuation and reiterated some comments; --- .../apache/spark/sql/scripting/SqlScriptingExecution.scala | 4 ++-- .../spark/sql/scripting/SqlScriptingExecutionContext.scala | 2 +- .../apache/spark/sql/scripting/SqlScriptingInterpreter.scala | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/scripting/SqlScriptingExecution.scala b/sql/core/src/main/scala/org/apache/spark/sql/scripting/SqlScriptingExecution.scala index 8a79e32a0f86e..2a849aa2d6040 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/scripting/SqlScriptingExecution.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/scripting/SqlScriptingExecution.scala @@ -136,8 +136,8 @@ class SqlScriptingExecution( injectLeaveStatement(context.frames.last.executionPlan, lastFrame.scopeLabel.get) } - // If the last frame is a CONTINUE handler, leave the handler without injecting anyting, - // but skip the conditional statement if the exception originated from a conditional + // If the last frame is a CONTINUE handler, leave the handler without injecting anything, but + // skip the conditional statement if the exception originated from its conditional expression. if (lastFrame.frameType == SqlScriptingFrameType.CONTINUE_HANDLER && context.frames.nonEmpty) { // Remove the scope if handler is executed. diff --git a/sql/core/src/main/scala/org/apache/spark/sql/scripting/SqlScriptingExecutionContext.scala b/sql/core/src/main/scala/org/apache/spark/sql/scripting/SqlScriptingExecutionContext.scala index c3f5c47f1b0d5..c91cbe02bccc8 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/scripting/SqlScriptingExecutionContext.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/scripting/SqlScriptingExecutionContext.scala @@ -95,7 +95,7 @@ object SqlScriptingFrameType extends Enumeration { * @param frameType Type of the frame. * @param scopeLabel Label of the scope where handler is defined. * Available only for frameType = EXIT_HANDLER, frameType = CONTINUE_HANDLER - * and frameType = SQL_STORED_PROCEDURE + * and frameType = SQL_STORED_PROCEDURE. */ class SqlScriptingExecutionFrame( val executionPlan: CompoundBodyExec, diff --git a/sql/core/src/main/scala/org/apache/spark/sql/scripting/SqlScriptingInterpreter.scala b/sql/core/src/main/scala/org/apache/spark/sql/scripting/SqlScriptingInterpreter.scala index caf6bfaf9bb71..e58e4b258307e 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/scripting/SqlScriptingInterpreter.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/scripting/SqlScriptingInterpreter.scala @@ -90,7 +90,7 @@ case class SqlScriptingInterpreter(session: SparkSession) { val handlerExec = new ExceptionHandlerExec( body = handlerBodyExec, handlerType = handler.handlerType, - // Used as a parent label of where the handler is declared (label of compoundBody used) + // Used as a parent label of where the handler is declared (label of compoundBody used). scopeLabel = Some(compoundBody.label.get)) // For each condition handler is defined for, add corresponding key value pair From 143de1f9e785655183748f7d3fb4e4502492863a Mon Sep 17 00:00:00 2001 From: Teodor Djelic <130703036+TeodorDjelic@users.noreply.github.com> Date: Mon, 6 Oct 2025 10:34:27 +0200 Subject: [PATCH 4/4] Changed the interrupted flag comment; --- .../sql/scripting/SqlScriptingExecutionNode.scala | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/scripting/SqlScriptingExecutionNode.scala b/sql/core/src/main/scala/org/apache/spark/sql/scripting/SqlScriptingExecutionNode.scala index f914b85195c01..aa2c2f405021a 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/scripting/SqlScriptingExecutionNode.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/scripting/SqlScriptingExecutionNode.scala @@ -111,9 +111,13 @@ trait NonLeafStatementExec extends CompoundStatementExec { */ trait ConditionalStatementExec extends NonLeafStatementExec { /** - * Used by CONTINUE HANDLER for all ConditionalStatementExec and by EXIT HANDLER for - * ForStatementExec to flag the statement as interrupted and subsequently force the - * hasNext method to return false (used for skipping the conditional statement). + * Interrupted flag indicates if the statement has been interrupted, and is used + * for skipping the execution of the conditional statements, by setting the hasNext + * to be false. + * Interrupt is issued by the CONTINUE HANDLER when the conditional statement's + * conditional expression throws an exception, and is issued by the Leave Statement + * when the ForStatementExec executes the Leave Statement injected by the EXIT + * HANDLER. */ protected[scripting] var interrupted: Boolean = false }