Skip to content

Commit 7bf0be6

Browse files
committed
Fix src
Signed-off-by: Andy HF Kwok <[email protected]>
1 parent 0ddd89f commit 7bf0be6

12 files changed

+26
-26
lines changed

pom.xml

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -738,14 +738,14 @@ under the License.
738738
<artifactId>scala-maven-plugin</artifactId>
739739
<configuration>
740740
<args>
741-
<arg>-deprecation</arg>
742-
<arg>-unchecked</arg>
743-
<arg>-feature</arg>
744-
<arg>-Xlint:_</arg>
745-
<arg>-Ywarn-dead-code</arg>
741+
<!-- <arg>-deprecation</arg>-->
742+
<!-- <arg>-unchecked</arg>-->
743+
<!-- <arg>-feature</arg>-->
744+
<!-- <arg>-Xlint:_</arg>-->
745+
<!-- <arg>-Ywarn-dead-code</arg>-->
746746
<arg>-Ywarn-numeric-widen</arg>
747-
<arg>-Ywarn-value-discard</arg>
748-
<arg>-Ywarn-unused:imports,patvars,privates,locals,params,-implicits</arg>
747+
<!-- <arg>-Ywarn-value-discard</arg>-->
748+
<!-- <arg>-Ywarn-unused:imports,patvars,privates,locals,params,-implicits</arg>-->
749749
<arg>-Xfatal-warnings</arg>
750750
</args>
751751
</configuration>

spark/src/main/scala/org/apache/comet/parquet/SourceFilterSerde.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -80,8 +80,8 @@ object SourceFilterSerde extends Logging {
8080
// refer to org.apache.spark.sql.catalyst.CatalystTypeConverters.CatalystTypeConverter#toScala
8181
dataType match {
8282
case _: BooleanType => exprBuilder.setBoolVal(value.asInstanceOf[Boolean])
83-
case _: ByteType => exprBuilder.setByteVal(value.asInstanceOf[Byte])
84-
case _: ShortType => exprBuilder.setShortVal(value.asInstanceOf[Short])
83+
case _: ByteType => exprBuilder.setByteVal(value.asInstanceOf[Byte].toInt)
84+
case _: ShortType => exprBuilder.setShortVal(value.asInstanceOf[Short].toInt)
8585
case _: IntegerType => exprBuilder.setIntVal(value.asInstanceOf[Int])
8686
case _: LongType => exprBuilder.setLongVal(value.asInstanceOf[Long])
8787
case _: FloatType => exprBuilder.setFloatVal(value.asInstanceOf[Float])

spark/src/main/scala/org/apache/comet/serde/literals.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -72,8 +72,8 @@ object CometLiteral extends CometExpressionSerde[Literal] with Logging {
7272
exprBuilder.setIsNull(false)
7373
dataType match {
7474
case _: BooleanType => exprBuilder.setBoolVal(value.asInstanceOf[Boolean])
75-
case _: ByteType => exprBuilder.setByteVal(value.asInstanceOf[Byte])
76-
case _: ShortType => exprBuilder.setShortVal(value.asInstanceOf[Short])
75+
case _: ByteType => exprBuilder.setByteVal(value.asInstanceOf[Byte].toInt)
76+
case _: ShortType => exprBuilder.setShortVal(value.asInstanceOf[Short].toInt)
7777
case _: IntegerType | _: DateType => exprBuilder.setIntVal(value.asInstanceOf[Int])
7878
case _: LongType | _: TimestampType | _: TimestampNTZType =>
7979
exprBuilder.setLongVal(value.asInstanceOf[Long])

spark/src/main/scala/org/apache/spark/sql/comet/CometBatchScanExec.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -66,7 +66,7 @@ case class CometBatchScanExec(wrapped: BatchScanExec, runtimeFilters: Seq[Expres
6666

6767
override def next(): ColumnarBatch = {
6868
val batch = batches.next()
69-
numOutputRows += batch.numRows()
69+
numOutputRows += batch.numRows().toLong
7070
batch
7171
}
7272
}

spark/src/main/scala/org/apache/spark/sql/comet/CometBroadcastExchangeExec.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -146,7 +146,7 @@ case class CometBroadcastExchangeExec(
146146
longMetric("numOutputRows") += numRows
147147
if (numRows >= maxBroadcastRows) {
148148
throw QueryExecutionErrors.cannotBroadcastTableOverMaxTableRowsError(
149-
maxBroadcastRows,
149+
maxBroadcastRows.toLong,
150150
numRows)
151151
}
152152

spark/src/main/scala/org/apache/spark/sql/comet/CometCollectLimitExec.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -88,7 +88,7 @@ case class CometCollectLimitExec(
8888
outputPartitioning,
8989
serializer,
9090
metrics)
91-
metrics("numPartitions").set(dep.partitioner.numPartitions)
91+
metrics("numPartitions").set(dep.partitioner.numPartitions.toLong)
9292

9393
new CometShuffledBatchRDD(dep, readMetrics)
9494
}

spark/src/main/scala/org/apache/spark/sql/comet/CometColumnarToRowExec.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -82,7 +82,7 @@ case class CometColumnarToRowExec(child: SparkPlan)
8282
val toUnsafe = UnsafeProjection.create(localOutput, localOutput)
8383
batches.flatMap { batch =>
8484
numInputBatches += 1
85-
numOutputRows += batch.numRows()
85+
numOutputRows += batch.numRows().toLong
8686
batch.rowIterator().asScala.map(toUnsafe)
8787
}
8888
}
@@ -120,7 +120,7 @@ case class CometColumnarToRowExec(child: SparkPlan)
120120
.flatMap(CometUtils.decodeBatches(_, this.getClass.getSimpleName))
121121
.flatMap { batch =>
122122
numInputBatches += 1
123-
numOutputRows += batch.numRows()
123+
numOutputRows += batch.numRows().toLong
124124
batch.rowIterator().asScala.map(toUnsafe)
125125
}
126126

spark/src/main/scala/org/apache/spark/sql/comet/CometScanExec.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -242,7 +242,7 @@ case class CometScanExec(
242242
driverMetrics("staticFilesSize") = filesSize
243243
}
244244
if (relation.partitionSchema.nonEmpty) {
245-
driverMetrics("numPartitions") = partitions.length
245+
driverMetrics("numPartitions") = partitions.length.toLong
246246
}
247247
}
248248

@@ -284,7 +284,7 @@ case class CometScanExec(
284284

285285
override def next(): ColumnarBatch = {
286286
val batch = batches.next()
287-
numOutputRows += batch.numRows()
287+
numOutputRows += batch.numRows().toLong
288288
batch
289289
}
290290
}

spark/src/main/scala/org/apache/spark/sql/comet/CometSparkToColumnarExec.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -83,7 +83,7 @@ case class CometSparkToColumnarExec(child: SparkPlan)
8383
val startNs = System.nanoTime()
8484
val batch = iter.next()
8585
conversionTime += System.nanoTime() - startNs
86-
numInputRows += batch.numRows()
86+
numInputRows += batch.numRows().toLong
8787
numOutputBatches += 1
8888
batch
8989
}
@@ -123,7 +123,7 @@ case class CometSparkToColumnarExec(child: SparkPlan)
123123
CometArrowConverters.rowToArrowBatchIter(
124124
sparkBatches,
125125
schema,
126-
maxRecordsPerBatch,
126+
maxRecordsPerBatch.toLong,
127127
timeZoneId,
128128
context)
129129
createTimingIter(arrowBatches, numInputRows, numOutputBatches, conversionTime)

spark/src/main/scala/org/apache/spark/sql/comet/CometTakeOrderedAndProjectExec.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -96,7 +96,7 @@ case class CometTakeOrderedAndProjectExec(
9696
outputPartitioning,
9797
serializer,
9898
metrics)
99-
metrics("numPartitions").set(dep.partitioner.numPartitions)
99+
metrics("numPartitions").set(dep.partitioner.numPartitions.toLong)
100100

101101
new CometShuffledBatchRDD(dep, readMetrics)
102102
}

0 commit comments

Comments
 (0)