@@ -2211,6 +2211,8 @@ class CometExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelper {
22112211 }
22122212
22132213 test(" get_struct_field - select primitive fields" ) {
2214+ val scanImpl = CometConf .COMET_NATIVE_SCAN_IMPL .get()
2215+ assume(! (scanImpl == CometConf .SCAN_AUTO && CometSparkSessionExtensions .isSpark40Plus))
22142216 withTempPath { dir =>
22152217 // create input file with Comet disabled
22162218 withSQLConf(CometConf .COMET_ENABLED .key -> " false" ) {
@@ -2225,7 +2227,7 @@ class CometExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelper {
22252227 val df = spark.read.parquet(dir.toString()).select(" nested1.id" )
22262228 // Comet's original scan does not support structs.
22272229 // The plan will have a Comet Scan only if scan impl is native_full or native_recordbatch
2228- if (! CometConf . COMET_NATIVE_SCAN_IMPL .get() .equals(CometConf .SCAN_NATIVE_COMET )) {
2230+ if (! scanImpl .equals(CometConf .SCAN_NATIVE_COMET )) {
22292231 checkSparkAnswerAndOperator(df)
22302232 } else {
22312233 checkSparkAnswer(df)
@@ -2234,6 +2236,8 @@ class CometExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelper {
22342236 }
22352237
22362238 test(" get_struct_field - select subset of struct" ) {
2239+ val scanImpl = CometConf .COMET_NATIVE_SCAN_IMPL .get()
2240+ assume(! (scanImpl == CometConf .SCAN_AUTO && CometSparkSessionExtensions .isSpark40Plus))
22372241 withTempPath { dir =>
22382242 // create input file with Comet disabled
22392243 withSQLConf(CometConf .COMET_ENABLED .key -> " false" ) {
@@ -2255,7 +2259,7 @@ class CometExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelper {
22552259 val df = spark.read.parquet(dir.toString())
22562260 // Comet's original scan does not support structs.
22572261 // The plan will have a Comet Scan only if scan impl is native_full or native_recordbatch
2258- if (! CometConf .COMET_NATIVE_SCAN_IMPL .get().equals( CometConf . SCAN_NATIVE_COMET ) ) {
2262+ if (scanImpl != CometConf .SCAN_NATIVE_COMET ) {
22592263 checkSparkAnswerAndOperator(df.select(" nested1.id" ))
22602264 checkSparkAnswerAndOperator(df.select(" nested1.nested2" ))
22612265 checkSparkAnswerAndOperator(df.select(" nested1.nested2.id" ))
@@ -2270,6 +2274,8 @@ class CometExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelper {
22702274 }
22712275
22722276 test(" get_struct_field - read entire struct" ) {
2277+ val scanImpl = CometConf .COMET_NATIVE_SCAN_IMPL .get()
2278+ assume(! (scanImpl == CometConf .SCAN_AUTO && CometSparkSessionExtensions .isSpark40Plus))
22732279 withTempPath { dir =>
22742280 // create input file with Comet disabled
22752281 withSQLConf(CometConf .COMET_ENABLED .key -> " false" ) {
@@ -2291,7 +2297,7 @@ class CometExpressionSuite extends CometTestBase with AdaptiveSparkPlanHelper {
22912297 val df = spark.read.parquet(dir.toString()).select(" nested1.id" )
22922298 // Comet's original scan does not support structs.
22932299 // The plan will have a Comet Scan only if scan impl is native_full or native_recordbatch
2294- if (! CometConf .COMET_NATIVE_SCAN_IMPL .get().equals( CometConf . SCAN_NATIVE_COMET ) ) {
2300+ if (scanImpl != CometConf .SCAN_NATIVE_COMET ) {
22952301 checkSparkAnswerAndOperator(df)
22962302 } else {
22972303 checkSparkAnswer(df)
0 commit comments