@@ -1229,7 +1229,7 @@ object QueryPlanSerde extends Logging with CometExprShim {
12291229 //
12301230 // Reflection Strategy (for Iceberg 1.5.x - 1.10.x compatibility):
12311231 // - SparkInputPartition: package-private Spark class, use getDeclaredMethod + setAccessible
1232- // - Iceberg API methods: use Class.forName() on public interfaces, then getMethod()
1232+ // - Iceberg API methods: load public interfaces by name , then use getMethod()
12331233 // (avoids IllegalAccessException on package-private implementation classes like
12341234 // BaseFileScanTask$SplitScanTask in Iceberg 1.5.x)
12351235 var actualNumPartitions = 0
@@ -1253,8 +1253,9 @@ object QueryPlanSerde extends Logging with CometExprShim {
12531253
12541254 // Get the task group and extract tasks
12551255 try {
1256- // SparkInputPartition is package-private, so we need getDeclaredMethod + setAccessible
1257- // This is different from Iceberg API classes which have public interfaces
1256+ // SparkInputPartition is package-private, so we need
1257+ // getDeclaredMethod + setAccessible. This is different from
1258+ // Iceberg API classes which have public interfaces
12581259 val taskGroupMethod = inputPartClass.getDeclaredMethod(" taskGroup" )
12591260 taskGroupMethod.setAccessible(true )
12601261 val taskGroup = taskGroupMethod.invoke(inputPartition)
@@ -1271,7 +1272,8 @@ object QueryPlanSerde extends Logging with CometExprShim {
12711272 try {
12721273 val taskBuilder = OperatorOuterClass .IcebergFileScanTask .newBuilder()
12731274
1274- // Load interface classes to avoid IllegalAccessException on package-private implementations
1275+ // Load interface classes to avoid IllegalAccessException on
1276+ // package-private implementations
12751277 // scalastyle:off classforname
12761278 val contentScanTaskClass =
12771279 Class .forName(" org.apache.iceberg.ContentScanTask" )
@@ -1316,8 +1318,9 @@ object QueryPlanSerde extends Logging with CometExprShim {
13161318 val schemaJson = toJsonMethod.invoke(null , schema).asInstanceOf [String ]
13171319 taskBuilder.setSchemaJson(schemaJson)
13181320
1319- // Extract field IDs from the REQUIRED output schema, not the full task schema.
1320- // This ensures we only project the columns actually needed by the query
1321+ // Extract field IDs from the REQUIRED output schema, not the full
1322+ // task schema. This ensures we only project the columns actually
1323+ // needed by the query
13211324 val columnsMethod = schema.getClass.getMethod(" columns" )
13221325 val columns =
13231326 columnsMethod.invoke(schema).asInstanceOf [java.util.List [_]]
@@ -1456,16 +1459,18 @@ object QueryPlanSerde extends Logging with CometExprShim {
14561459 // - File partition: date = '2024-06-15'
14571460 // - Residual: status = 'active' (date condition proven true by partition)
14581461 //
1459- // This residual is what should be applied during Parquet row-group scanning.
1462+ // This residual is what should be applied during Parquet row-group
1463+ // scanning.
14601464 try {
14611465 val residualMethod = contentScanTaskClass.getMethod(" residual" )
14621466 val residualExpr = residualMethod.invoke(task)
14631467
14641468 val catalystExpr = convertIcebergExpression(residualExpr, scan.output)
14651469
14661470 // Serialize to protobuf WITHOUT binding to indices.
1467- // Iceberg residuals are already unbound (name-based), so we keep them
1468- // unbound in the protobuf to avoid unnecessary index->name resolution in Rust
1471+ // Iceberg residuals are already unbound (name-based), so we keep
1472+ // them unbound in the protobuf to avoid unnecessary index->name
1473+ // resolution in Rust
14691474 catalystExpr
14701475 .flatMap { expr =>
14711476 exprToProto(expr, scan.output, binding = false )
0 commit comments