Skip to content

Commit 040c661

Browse files
author
xiaohongbo
committed
clean code format
1 parent af2f6f1 commit 040c661

File tree

2 files changed

+2
-4
lines changed

2 files changed

+2
-4
lines changed

paimon-core/src/main/java/org/apache/paimon/table/source/DataTableBatchScan.java

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -135,6 +135,7 @@ private Optional<StartingScanner.Result> applyPushDownLimit() {
135135
return Optional.of(result);
136136
}
137137

138+
long scannedRowCount = 0;
138139
SnapshotReader.Plan plan = ((ScannedResult) result).plan();
139140
List<Split> planSplits = plan.splits();
140141
if (planSplits.stream().anyMatch(s -> !(s instanceof DataSplit))) {
@@ -143,7 +144,6 @@ private Optional<StartingScanner.Result> applyPushDownLimit() {
143144
@SuppressWarnings("unchecked")
144145
List<DataSplit> splits = (List<DataSplit>) (List<?>) planSplits;
145146

146-
long scannedRowCount = 0;
147147
LOG.info("Applying limit pushdown. Original splits count: {}", splits.size());
148148
if (splits.isEmpty()) {
149149
return Optional.of(result);
@@ -200,7 +200,6 @@ private Optional<StartingScanner.Result> applyPushDownTopN() {
200200

201201
SnapshotReader.Plan plan = ((ScannedResult) result).plan();
202202
List<Split> planSplits = plan.splits();
203-
// TopN pushdown only supports DataSplit. Skip for IncrementalSplit.
204203
if (planSplits.stream().anyMatch(s -> !(s instanceof DataSplit))) {
205204
return Optional.of(result);
206205
}

paimon-spark/paimon-spark-ut/src/test/scala/org/apache/paimon/spark/sql/TableValuedFunctionsTest.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -344,8 +344,7 @@ class TableValuedFunctionsTest extends PaimonHiveTestBase {
344344
}
345345
}
346346

347-
test(
348-
"incremental query by tag with LIMIT - no ClassCastException (IncrementalSplit handled in applyPushDownLimit)") {
347+
test("incremental query by tag with LIMIT") {
349348
sql("use paimon")
350349
withTable("t") {
351350
spark.sql("""

0 commit comments

Comments
 (0)