Skip to content

Commit 5d0a584

Browse files
committed
[hotfix] Fix compile problem of InternalRow#getMap() interface in Spark connector
1 parent b306ed0 commit 5d0a584

File tree

2 files changed

+12
-5
lines changed

2 files changed

+12
-5
lines changed

fluss-spark/fluss-spark-common/src/main/scala/org/apache/fluss/spark/row/SparkAsFlussArray.scala

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -17,10 +17,9 @@
1717

1818
package org.apache.fluss.spark.row
1919

20-
import org.apache.fluss.row.{BinaryString, Decimal, InternalArray => FlussInternalArray, InternalRow => FlussInternalRow, TimestampLtz, TimestampNtz}
21-
20+
import org.apache.fluss.row.{BinaryString, Decimal, InternalMap, TimestampLtz, TimestampNtz, InternalArray => FlussInternalArray, InternalRow => FlussInternalRow}
2221
import org.apache.spark.sql.catalyst.util.{ArrayData => SparkArrayData}
23-
import org.apache.spark.sql.types.{ArrayType => SparkArrayType, DataType => SparkDataType, StructType}
22+
import org.apache.spark.sql.types.{StructType, ArrayType => SparkArrayType, DataType => SparkDataType}
2423

2524
/** Wraps a Spark [[SparkArrayData]] as a Fluss [[FlussInternalArray]]. */
2625
class SparkAsFlussArray(arrayData: SparkArrayData, elementType: SparkDataType)
@@ -129,4 +128,9 @@ class SparkAsFlussArray(arrayData: SparkArrayData, elementType: SparkDataType)
129128
override def getRow(pos: Int, numFields: Int): FlussInternalRow =
130129
new SparkAsFlussRow(elementType.asInstanceOf[StructType])
131130
.replace(arrayData.getStruct(pos, numFields))
131+
132+
/** Returns the map value at the given position. */
133+
override def getMap(pos: Int): InternalMap = {
134+
throw new UnsupportedOperationException()
135+
}
132136
}

fluss-spark/fluss-spark-common/src/main/scala/org/apache/fluss/spark/row/SparkAsFlussRow.scala

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,7 @@
1717

1818
package org.apache.fluss.spark.row
1919

20-
import org.apache.fluss.row.{BinaryString, Decimal, InternalRow => FlussInternalRow, TimestampLtz, TimestampNtz}
21-
20+
import org.apache.fluss.row.{BinaryString, Decimal, InternalMap, TimestampLtz, TimestampNtz, InternalRow => FlussInternalRow}
2221
import org.apache.spark.sql.catalyst.{InternalRow => SparkInternalRow}
2322
import org.apache.spark.sql.types.StructType
2423

@@ -125,4 +124,8 @@ class SparkAsFlussRow(schema: StructType) extends FlussInternalRow with Serializ
125124
new SparkAsFlussRow(schema.fields(pos).dataType.asInstanceOf[StructType])
126125
.replace(row.getStruct(pos, numFields))
127126

127+
/** Returns the map value at the given position. */
128+
override def getMap(pos: Int): InternalMap = {
129+
throw new UnsupportedOperationException()
130+
}
128131
}

0 commit comments

Comments
 (0)