Skip to content

Commit 203cffa

Browse files
committed
fix compile
1 parent 7edcad7 commit 203cffa

File tree

5 files changed

+13
-12
lines changed

5 files changed

+13
-12
lines changed

kernel/kernel-api/src/main/java/io/delta/kernel/Scan.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -206,7 +206,7 @@ public FilteredColumnarBatch next() {
206206
// Transform physical variant columns (struct of binaries) into logical variant
207207
// columns.
208208
nextDataBatch = VariantUtils.withVariantColumns(
209-
tableClient.getExpressionHandler(),
209+
engine.getExpressionHandler(),
210210
nextDataBatch
211211
);
212212

kernel/kernel-api/src/main/java/io/delta/kernel/internal/util/VariantUtils.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,9 +18,9 @@
1818

1919
import java.util.Arrays;
2020

21-
import io.delta.kernel.client.ExpressionHandler;
2221
import io.delta.kernel.data.ColumnVector;
2322
import io.delta.kernel.data.ColumnarBatch;
23+
import io.delta.kernel.engine.ExpressionHandler;
2424
import io.delta.kernel.expressions.*;
2525
import io.delta.kernel.types.*;
2626

kernel/kernel-defaults/src/main/java/io/delta/kernel/defaults/internal/expressions/DefaultExpressionEvaluator.java

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -294,6 +294,7 @@ ExpressionTransformResult visitLike(final Predicate like) {
294294
children.stream().map(e -> e.outputType).collect(toList()));
295295

296296
return new ExpressionTransformResult(transformedExpression, BooleanType.BOOLEAN);
297+
}
297298

298299
ExpressionTransformResult visitVariantCoalesce(ScalarExpression variantCoalesce) {
299300
checkArgument(

kernel/kernel-defaults/src/test/scala/io/delta/kernel/defaults/DeltaTableWritesSuite.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -627,7 +627,7 @@ class DeltaTableWritesSuite extends DeltaTableWriteSuiteBase with ParquetSuiteBa
627627
val parquetAllTypes = goldenTablePath("parquet-all-types")
628628
val schema = removeUnsupportedTypes(tableSchema(parquetAllTypes))
629629

630-
val data = readTableUsingKernel(engine, parquetAllTypes, schema).to[Seq]
630+
val data = readTableUsingKernel(engine, parquetAllTypes, schema).toSeq
631631
val dataWithPartInfo = Seq(Map.empty[String, Literal] -> data)
632632

633633
appendData(engine, tblPath, isNewTable = true, schema, Seq.empty, dataWithPartInfo)
@@ -670,7 +670,7 @@ class DeltaTableWritesSuite extends DeltaTableWriteSuiteBase with ParquetSuiteBa
670670
"timestampType"
671671
)
672672
val casePreservingPartCols =
673-
casePreservingPartitionColNames(schema, partCols.asJava).asScala.to[Seq]
673+
casePreservingPartitionColNames(schema, partCols.asJava).asScala.toSeq
674674

675675
// get the partition values from the data batch at the given rowId
676676
def getPartitionValues(batch: ColumnarBatch, rowId: Int): Map[String, Literal] = {
@@ -703,7 +703,7 @@ class DeltaTableWritesSuite extends DeltaTableWriteSuiteBase with ParquetSuiteBa
703703
}.toMap
704704
}
705705

706-
val data = readTableUsingKernel(engine, parquetAllTypes, schema).to[Seq]
706+
val data = readTableUsingKernel(engine, parquetAllTypes, schema).toSeq
707707

708708
// From the above table read data, convert each row as a new batch with partition info
709709
// Take the values of the partitionCols from the data and create a new batch with the

kernel/kernel-defaults/src/test/scala/io/delta/kernel/defaults/ScanSuite.scala

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ import org.scalatest.funsuite.AnyFunSuite
3535
import io.delta.kernel.Scan
3636
import io.delta.kernel.data.{ColumnarBatch, ColumnVector, FilteredColumnarBatch, Row}
3737
import io.delta.kernel.defaults.utils.TestRow
38-
import io.delta.kernel.engine.{JsonHandler, ParquetHandler, TableClient}
38+
import io.delta.kernel.engine.{Engine, JsonHandler, ParquetHandler}
3939
import io.delta.kernel.expressions.{AlwaysFalse, AlwaysTrue, And, Column, Or, Predicate, ScalarExpression}
4040
import io.delta.kernel.expressions.Literal._
4141
import io.delta.kernel.types.StructType
@@ -1611,11 +1611,11 @@ class ScanSuite extends AnyFunSuite with TestUtils with ExpressionTestUtils with
16111611
val kernelSchema = tableSchema(path)
16121612

16131613
val snapshot = latestSnapshot(path)
1614-
val scan = snapshot.getScanBuilder(defaultTableClient).build()
1615-
val scanState = scan.getScanState(defaultTableClient)
1614+
val scan = snapshot.getScanBuilder(defaultEngine).build()
1615+
val scanState = scan.getScanState(defaultEngine)
16161616
val physicalReadSchema =
1617-
ScanStateRow.getPhysicalDataReadSchema(defaultTableClient, scanState)
1618-
val scanFilesIter = scan.getScanFiles(defaultTableClient)
1617+
ScanStateRow.getPhysicalDataReadSchema(defaultEngine, scanState)
1618+
val scanFilesIter = scan.getScanFiles(defaultEngine)
16191619

16201620
val readRows = ArrayBuffer[Row]()
16211621
while (scanFilesIter.hasNext()) {
@@ -1625,13 +1625,13 @@ class ScanSuite extends AnyFunSuite with TestUtils with ExpressionTestUtils with
16251625
val scanFileRow = scanFileRows.next()
16261626
val fileStatus = InternalScanFileUtils.getAddFileStatus(scanFileRow)
16271627

1628-
val physicalDataIter = defaultTableClient.getParquetHandler.readParquetFiles(
1628+
val physicalDataIter = defaultEngine.getParquetHandler.readParquetFiles(
16291629
singletonCloseableIterator(fileStatus),
16301630
physicalReadSchema,
16311631
Optional.empty())
16321632

16331633
val transformedRowsIter = Scan.transformPhysicalData(
1634-
defaultTableClient,
1634+
defaultEngine,
16351635
scanState,
16361636
scanFileRow,
16371637
physicalDataIter

0 commit comments

Comments
 (0)