Skip to content

Commit fbca163

Browse files
authored
chore: fix pr_build*.yml (#2434)
* chore: fix pr_build*.yml * clippy * chore: fix pr_build*.yml
1 parent f1fb980 commit fbca163

File tree

4 files changed

+9
-7
lines changed

4 files changed

+9
-7
lines changed

.github/workflows/pr_build_linux.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -151,7 +151,7 @@ jobs:
151151
org.apache.comet.objectstore.NativeConfigSuite
152152
- name: "sql"
153153
value: |
154-
${{ matrix.profile.maven_opts != 'Spark 3.4, JDK 11, Scala 2.12' && 'org.apache.spark.sql.CometToPrettyStringSuite' || ''}}
154+
org.apache.spark.sql.CometToPrettyStringSuite
155155
fail-fast: false
156156
name: ${{ matrix.os }}/${{ matrix.profile.name }} [${{ matrix.suite.name }}]
157157
runs-on: ${{ matrix.os }}
@@ -171,7 +171,7 @@ jobs:
171171
uses: ./.github/actions/java-test
172172
with:
173173
artifact_name: ${{ matrix.os }}-${{ matrix.profile.name }}-${{ matrix.suite.name }}-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
174-
suites: ${{ matrix.suite.value }}
174+
suites: ${{ matrix.suite.name == 'sql' && matrix.profile.name == 'Spark 3.4, JDK 11, Scala 2.12' && '' || matrix.suite.value }}
175175
maven_opts: ${{ matrix.profile.maven_opts }}
176176
scan_impl: ${{ matrix.profile.scan_impl }}
177177
upload-test-reports: true

.github/workflows/pr_build_macos.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -116,7 +116,7 @@ jobs:
116116
org.apache.comet.objectstore.NativeConfigSuite
117117
- name: "sql"
118118
value: |
119-
${{ matrix.profile.maven_opts != 'Spark 3.4, JDK 11, Scala 2.12' && 'org.apache.spark.sql.CometToPrettyStringSuite' || ''}}
119+
org.apache.spark.sql.CometToPrettyStringSuite
120120
fail-fast: false
121121
name: ${{ matrix.os }}/${{ matrix.profile.name }} [${{ matrix.suite.name }}]
122122
runs-on: ${{ matrix.os }}
@@ -133,5 +133,5 @@ jobs:
133133
uses: ./.github/actions/java-test
134134
with:
135135
artifact_name: ${{ matrix.os }}-${{ matrix.profile.name }}-${{ matrix.suite.name }}-${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}
136-
suites: ${{ matrix.suite.value }}
136+
suites: ${{ matrix.suite.name == 'sql' && matrix.profile.name == 'Spark 3.4, JDK 11, Scala 2.12' && '' || matrix.suite.value }}
137137
maven_opts: ${{ matrix.profile.maven_opts }}

native/spark-expr/src/conversion_funcs/cast.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -248,7 +248,7 @@ fn can_cast_from_string(to_type: &DataType, options: &SparkCastOptions) -> bool
248248
}
249249
}
250250

251-
fn can_cast_to_string(from_type: &DataType, options: &SparkCastOptions) -> bool {
251+
fn can_cast_to_string(from_type: &DataType, _options: &SparkCastOptions) -> bool {
252252
use DataType::*;
253253
match from_type {
254254
Boolean | Int8 | Int16 | Int32 | Int64 | Date32 | Date64 | Timestamp(_, _) => true,
@@ -267,7 +267,7 @@ fn can_cast_to_string(from_type: &DataType, options: &SparkCastOptions) -> bool
267267
Binary => true,
268268
Struct(fields) => fields
269269
.iter()
270-
.all(|f| can_cast_to_string(f.data_type(), options)),
270+
.all(|f| can_cast_to_string(f.data_type(), _options)),
271271
_ => false,
272272
}
273273
}

spark/src/main/scala/org/apache/comet/serde/literals.scala

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -132,7 +132,9 @@ object CometLiteral extends CometExpressionSerde[Literal] with Logging {
132132
case ByteType =>
133133
array.foreach(v => {
134134
val casted = v.asInstanceOf[lang.Byte]
135-
listLiteralBuilder.addByteValues(casted.intValue())
135+
listLiteralBuilder.addByteValues(
136+
if (casted != null) casted.intValue()
137+
else null.asInstanceOf[Integer])
136138
listLiteralBuilder.addNullMask(casted != null)
137139
})
138140
case ShortType =>

0 commit comments

Comments
 (0)