Skip to content

Commit 96628b8

Browse files
committed
fix compile
1 parent a7119be commit 96628b8

File tree

5 files changed

+13
-12
lines changed

5 files changed

+13
-12
lines changed

kernel/kernel-api/src/main/java/io/delta/kernel/Scan.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -200,7 +200,7 @@ public FilteredColumnarBatch next() {
200200
// Transform physical variant columns (struct of binaries) into logical variant
201201
// columns.
202202
nextDataBatch = VariantUtils.withVariantColumns(
203-
tableClient.getExpressionHandler(),
203+
engine.getExpressionHandler(),
204204
nextDataBatch
205205
);
206206

kernel/kernel-api/src/main/java/io/delta/kernel/internal/util/VariantUtils.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,9 +18,9 @@
1818

1919
import java.util.Arrays;
2020

21-
import io.delta.kernel.client.ExpressionHandler;
2221
import io.delta.kernel.data.ColumnVector;
2322
import io.delta.kernel.data.ColumnarBatch;
23+
import io.delta.kernel.engine.ExpressionHandler;
2424
import io.delta.kernel.expressions.*;
2525
import io.delta.kernel.types.*;
2626

kernel/kernel-defaults/src/main/java/io/delta/kernel/defaults/internal/expressions/DefaultExpressionEvaluator.java

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -294,6 +294,7 @@ ExpressionTransformResult visitLike(final Predicate like) {
294294
children.stream().map(e -> e.outputType).collect(toList()));
295295

296296
return new ExpressionTransformResult(transformedExpression, BooleanType.BOOLEAN);
297+
}
297298

298299
ExpressionTransformResult visitVariantCoalesce(ScalarExpression variantCoalesce) {
299300
checkArgument(

kernel/kernel-defaults/src/test/scala/io/delta/kernel/defaults/DeltaTableWritesSuite.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -508,7 +508,7 @@ class DeltaTableWritesSuite extends DeltaTableWriteSuiteBase with ParquetSuiteBa
508508
val parquetAllTypes = goldenTablePath("parquet-all-types")
509509
val schema = removeUnsupportedTypes(tableSchema(parquetAllTypes))
510510

511-
val data = readTableUsingKernel(engine, parquetAllTypes, schema).to[Seq]
511+
val data = readTableUsingKernel(engine, parquetAllTypes, schema).toSeq
512512
val dataWithPartInfo = Seq(Map.empty[String, Literal] -> data)
513513

514514
appendData(engine, tblPath, isNewTable = true, schema, Seq.empty, dataWithPartInfo)
@@ -551,7 +551,7 @@ class DeltaTableWritesSuite extends DeltaTableWriteSuiteBase with ParquetSuiteBa
551551
"timestampType"
552552
)
553553
val casePreservingPartCols =
554-
casePreservingPartitionColNames(schema, partCols.asJava).asScala.to[Seq]
554+
casePreservingPartitionColNames(schema, partCols.asJava).asScala.toSeq
555555

556556
// get the partition values from the data batch at the given rowId
557557
def getPartitionValues(batch: ColumnarBatch, rowId: Int): Map[String, Literal] = {
@@ -584,7 +584,7 @@ class DeltaTableWritesSuite extends DeltaTableWriteSuiteBase with ParquetSuiteBa
584584
}.toMap
585585
}
586586

587-
val data = readTableUsingKernel(engine, parquetAllTypes, schema).to[Seq]
587+
val data = readTableUsingKernel(engine, parquetAllTypes, schema).toSeq
588588

589589
// From the above table read data, convert each row as a new batch with partition info
590590
// Take the values of the partitionCols from the data and create a new batch with the

kernel/kernel-defaults/src/test/scala/io/delta/kernel/defaults/ScanSuite.scala

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ import org.scalatest.funsuite.AnyFunSuite
3535
import io.delta.kernel.Scan
3636
import io.delta.kernel.data.{ColumnarBatch, ColumnVector, FilteredColumnarBatch, Row}
3737
import io.delta.kernel.defaults.utils.TestRow
38-
import io.delta.kernel.engine.{JsonHandler, ParquetHandler, TableClient}
38+
import io.delta.kernel.engine.{Engine, JsonHandler, ParquetHandler}
3939
import io.delta.kernel.expressions.{AlwaysFalse, AlwaysTrue, And, Column, Or, Predicate, ScalarExpression}
4040
import io.delta.kernel.expressions.Literal._
4141
import io.delta.kernel.types.StructType
@@ -1611,11 +1611,11 @@ class ScanSuite extends AnyFunSuite with TestUtils with ExpressionTestUtils with
16111611
val kernelSchema = tableSchema(path)
16121612

16131613
val snapshot = latestSnapshot(path)
1614-
val scan = snapshot.getScanBuilder(defaultTableClient).build()
1615-
val scanState = scan.getScanState(defaultTableClient)
1614+
val scan = snapshot.getScanBuilder(defaultEngine).build()
1615+
val scanState = scan.getScanState(defaultEngine)
16161616
val physicalReadSchema =
1617-
ScanStateRow.getPhysicalDataReadSchema(defaultTableClient, scanState)
1618-
val scanFilesIter = scan.getScanFiles(defaultTableClient)
1617+
ScanStateRow.getPhysicalDataReadSchema(defaultEngine, scanState)
1618+
val scanFilesIter = scan.getScanFiles(defaultEngine)
16191619

16201620
val readRows = ArrayBuffer[Row]()
16211621
while (scanFilesIter.hasNext()) {
@@ -1625,13 +1625,13 @@ class ScanSuite extends AnyFunSuite with TestUtils with ExpressionTestUtils with
16251625
val scanFileRow = scanFileRows.next()
16261626
val fileStatus = InternalScanFileUtils.getAddFileStatus(scanFileRow)
16271627

1628-
val physicalDataIter = defaultTableClient.getParquetHandler.readParquetFiles(
1628+
val physicalDataIter = defaultEngine.getParquetHandler.readParquetFiles(
16291629
singletonCloseableIterator(fileStatus),
16301630
physicalReadSchema,
16311631
Optional.empty())
16321632

16331633
val transformedRowsIter = Scan.transformPhysicalData(
1634-
defaultTableClient,
1634+
defaultEngine,
16351635
scanState,
16361636
scanFileRow,
16371637
physicalDataIter

0 commit comments

Comments
 (0)