2626 steps :
2727 - uses : actions/checkout@v4
2828 - uses : actions/setup-java@v4
29- with : { distribution: temurin, java-version: ${{ env.JAVA_VERSION }} }
29+ with :
30+ distribution : temurin
31+ java-version : ${{ env.JAVA_VERSION }}
3032 - uses : sbt/setup-sbt@v1
3133 - uses : actions/cache@v4
3234 with :
4951 steps :
5052 - uses : actions/checkout@v4
5153 - uses : actions/setup-java@v4
52- with : { distribution: temurin, java-version: ${{ env.JAVA_VERSION }} }
54+ with :
55+ distribution : temurin
56+ java-version : ${{ env.JAVA_VERSION }}
5357 - uses : sbt/setup-sbt@v1
5458 - uses : actions/cache@v4
5559 with :
@@ -81,15 +85,17 @@ jobs:
8185 fail-fast : false
8286 matrix :
8387 include :
84- # Legal pairs only. Spark 3.4 ships Scala 2.12 artifacts, not 2.13.
88+ # Legal Spark– Scala pairs only
8589 - { scala: "2.13.14", spark: "3.5.1" }
8690 - { scala: "2.12.18", spark: "3.4.3" }
8791 - { scala: "2.12.18", spark: "3.5.1" }
8892 name : Test (Scala ${{ matrix.scala }}, Spark ${{ matrix.spark }})
8993 steps :
9094 - uses : actions/checkout@v4
9195 - uses : actions/setup-java@v4
92- with : { distribution: temurin, java-version: ${{ env.JAVA_VERSION }} }
96+ with :
97+ distribution : temurin
98+ java-version : ${{ env.JAVA_VERSION }}
9399 - uses : sbt/setup-sbt@v1
94100 - uses : actions/cache@v4
95101 with :
@@ -110,7 +116,9 @@ jobs:
110116 steps :
111117 - uses : actions/checkout@v4
112118 - uses : actions/setup-java@v4
113- with : { distribution: temurin, java-version: ${{ env.JAVA_VERSION }} }
119+ with :
120+ distribution : temurin
121+ java-version : ${{ env.JAVA_VERSION }}
114122 - uses : sbt/setup-sbt@v1
115123 - uses : actions/cache@v4
116124 with :
@@ -137,17 +145,23 @@ jobs:
137145 steps :
138146 - uses : actions/checkout@v4
139147 - uses : actions/download-artifact@v4
140- with : { name: jars, path: jars }
148+ with :
149+ name : jars
150+ path : jars
141151 - uses : actions/setup-python@v5
142- with : { python-version: "3.11" }
152+ with :
153+ python-version : " 3.11"
143154 - uses : actions/setup-java@v4
144- with : { distribution: temurin, java-version: ${{ env.JAVA_VERSION }} }
155+ with :
156+ distribution : temurin
157+ java-version : ${{ env.JAVA_VERSION }}
145158 - name : Install PySpark
146159 run : |
147160 python -m pip install --upgrade pip
148161 pip install pyspark==${{ env.SPARK_35 }}
149162 - name : Run smoke test with 2.12 JAR
150- env : { PYTHONUNBUFFERED: "1" }
163+ env :
164+ PYTHONUNBUFFERED : " 1"
151165 run : |
152166 JAR_212=$(ls jars/target/scala-2.12/*.jar 2>/dev/null | head -n1 || true)
153167 if [ -z "$JAR_212" ]; then
@@ -164,7 +178,9 @@ jobs:
164178 steps :
165179 - uses : actions/checkout@v4
166180 - uses : actions/setup-java@v4
167- with : { distribution: temurin, java-version: ${{ env.JAVA_VERSION }} }
181+ with :
182+ distribution : temurin
183+ java-version : ${{ env.JAVA_VERSION }}
168184 - uses : sbt/setup-sbt@v1
169185 - uses : actions/cache@v4
170186 with :
@@ -178,15 +194,14 @@ jobs:
178194 - name : Run scoverage
179195 run : |
180196 sbt ++${{ env.SCALA_212 }} -Dspark.version=${{ env.SPARK_35 }} coverage test coverageReport
181- # Optional perf sanity line for log scraping:
182197 echo "perf_sanity_seconds=$(grep -hR --only-matching -E 'perf_sanity_seconds=[0-9.]+' target || true)"
183198 - name : Upload coverage artifact
184199 uses : actions/upload-artifact@v4
185200 with :
186201 name : scoverage-report
187202 path : target/**/scoverage-report/*
188203 retention-days : 7
189- - name : Upload to Codecov (optional)
204+ - name : Upload to Codecov
190205 uses : codecov/codecov-action@v4
191206 with :
192207 files : ./target/scala-2.12/scoverage-report/scoverage.xml
0 commit comments