Skip to content

Commit e50e077

Browse files
committed
test fix: org.apache.spark.sql.comet.ParquetEncryptionITCase.SPARK-37117
1 parent 70163ec commit e50e077

File tree

1 file changed

+24
-0
lines changed

1 file changed

+24
-0
lines changed

spark/src/test/scala/org/apache/spark/sql/CometTestBase.scala

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -528,6 +528,30 @@ abstract class CometTestBase
528528
}
529529
}
530530

531+
/**
532+
* Override waitForTasksToFinish to ensure SparkContext is active before checking tasks. This
533+
* fixes the issue where waitForTasksToFinish returns -1 when SparkContext is not active.
534+
*/
535+
override protected def waitForTasksToFinish(): Unit = {
536+
// Ensure SparkContext is active before checking tasks
537+
// The parent implementation uses SparkContext.getActive.map(_.activeTasks).getOrElse(-1)
538+
// If SparkContext is not active, it returns -1 which causes the assertion to fail.
539+
// We ensure we have an active SparkContext before calling the parent method.
540+
if (SparkContext.getActive.isEmpty) {
541+
// Ensure we have a SparkContext from the spark session
542+
if (_spark != null) {
543+
// SparkContext from spark session should already be active
544+
// but if not, getOrCreate will make it active
545+
val _ = _spark.sparkContext
546+
} else {
547+
// Fallback to sparkContext which will get or create one
548+
val _ = sparkContext
549+
}
550+
}
551+
// Now call parent implementation which should find an active SparkContext
552+
super.waitForTasksToFinish()
553+
}
554+
531555
protected def readResourceParquetFile(name: String): DataFrame = {
532556
spark.read.parquet(getResourceParquetFilePath(name))
533557
}

0 commit comments

Comments
 (0)