Skip to content

Commit d52d593

Browse files
andygroveclaude
andcommitted
Remove unnecessary comments that describe what code does
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
1 parent f3480b5 commit d52d593

File tree

2 files changed

+0
-6
lines changed

2 files changed

+0
-6
lines changed

spark/src/main/scala/org/apache/comet/parquet/CometParquetFileFormat.scala

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -133,13 +133,8 @@ class CometParquetFileFormat(session: SparkSession)
133133
isCaseSensitive,
134134
datetimeRebaseSpec)
135135

136-
// We still need the predicate in the conf to allow us to generate row indexes based on
137-
// the actual row groups read
138136
val pushed = if (parquetFilterPushDown) {
139137
filters
140-
// Collects all converted Parquet filter predicates. Notice that not all predicates
141-
// can be converted (`ParquetFilters.createFilter` returns an `Option`). That's why
142-
// a `flatMap` is used here.
143138
.flatMap(parquetFilters.createFilter)
144139
.reduceOption(FilterApi.and)
145140
} else {

spark/src/test/scala/org/apache/comet/parquet/ParquetReadSuite.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1837,7 +1837,6 @@ class ParquetReadV2Suite extends ParquetReadSuite with AdaptiveSparkPlanHelper {
18371837
val scans = collect(r.filter(f).queryExecution.executedPlan) { case p: CometBatchScanExec =>
18381838
p.scan
18391839
}
1840-
// V2 Parquet scan acceleration is no longer active
18411840
assert(scans.isEmpty)
18421841
}
18431842
}

0 commit comments

Comments
 (0)