@@ -35,3 +35,193 @@ jobs:
3535 - name : Check Style
3636 run : sbt ++${{ env.SCALA_213 }} scalastyle
3737
38+ # Job 1: Build once (produce 2.12 & 2.13 jars for reuse)
39+ build :
40+ runs-on : ubuntu-latest
41+ needs : lint
42+ name : Build JARs (2.12 & 2.13)
43+ steps :
44+ - uses : actions/checkout@v4
45+ - uses : actions/setup-java@v4
46+ with :
47+ distribution : temurin
48+ java-version : ${{ env.JAVA_VERSION }}
49+ cache : sbt
50+ - uses : sbt/setup-sbt@v1
51+ - name : Package 2.12 (Spark ${{ env.DEFAULT_SPARK }})
52+ run : sbt ++${{ env.SCALA_212 }} -Dspark.version=${{ env.DEFAULT_SPARK }} clean package
53+ - name : Package 2.13 (Spark ${{ env.DEFAULT_SPARK }})
54+ run : sbt ++${{ env.SCALA_213 }} -Dspark.version=${{ env.DEFAULT_SPARK }} clean package
55+ - name : Upload JARs
56+ uses : actions/upload-artifact@v4
57+ with :
58+ name : jars
59+ path : |
60+ target/scala-2.12/*.jar
61+ target/scala-2.13/*.jar
62+ if-no-files-found : error
63+
64+ # Job 2: Core JVM tests across matrix (re-compiles per combo; ok)
65+ test-jvm :
66+ runs-on : ubuntu-latest
67+ needs : build
68+ strategy :
69+ fail-fast : false
70+ matrix :
71+ scala-version : [ '2.13.14', '2.12.18' ]
72+ spark-version : [ '3.4.3', '3.5.1' ]
73+ include :
74+ # Force Java 17 for Spark 3.5/3.4
75+ - java-version : ' 17'
76+ exclude :
77+ # Spark 3.4 typically ships with Scala 2.12 artifacts
78+ - scala-version : ' 2.13.14'
79+ spark-version : ' 3.4.3'
80+ name : Test (Scala ${{ matrix.scala-version }}, Spark ${{ matrix.spark-version }})
81+ steps :
82+ - uses : actions/checkout@v4
83+ - uses : actions/setup-java@v4
84+ with :
85+ distribution : temurin
86+ java-version : ${{ matrix.java-version }}
87+ cache : sbt
88+ - uses : sbt/setup-sbt@v1
89+ - name : Run All JVM Tests
90+ run : sbt ++${{ matrix.scala-version }} -Dspark.version=${{ matrix.spark-version }} test
91+ - name : Preserve test reports
92+ if : always()
93+ run : |
94+ mkdir -p artifacts/test-reports/${{ matrix.scala-version }}_${{ matrix.spark-version }}
95+ (test -d target/test-reports && cp -r target/test-reports/* artifacts/test-reports/${{ matrix.scala-version }}_${{ matrix.spark-version }}/) || true
96+ - uses : actions/upload-artifact@v4
97+ if : always()
98+ with :
99+ name : test-reports-${{ matrix.scala-version }}-${{ matrix.spark-version }}
100+ path : artifacts/test-reports/${{ matrix.scala-version }}_${{ matrix.spark-version }}
101+
102+ # Job 3: Python smoke test (downloads jar built once; uses non-SE divergence)
103+ test-python :
104+ runs-on : ubuntu-latest
105+ needs : build
106+ name : Python Smoke Test (PySpark ${{ env.PYSPARK_PIN }})
107+ steps :
108+ - uses : actions/checkout@v4
109+ - uses : actions/setup-python@v5
110+ with :
111+ python-version : " 3.11"
112+ - uses : actions/setup-java@v4
113+ with :
114+ distribution : temurin
115+ java-version : ${{ env.JAVA_VERSION }}
116+ - name : Download JARs
117+ uses : actions/download-artifact@v4
118+ with :
119+ name : jars
120+ path : jars
121+ - name : Install PySpark
122+ run : python -m pip install --upgrade pip && pip install pyspark==${{ env.PYSPARK_PIN }}
123+ - name : Run smoke (local[*], non-SE)
124+ run : |
125+ JAR_212=$(ls jars/*scala-2.12*.jar | head -n1)
126+ test -f "$JAR_212"
127+ spark-submit --jars "$JAR_212" python/smoke_test.py
128+
129+ # Job 4: Run examples via runMain with assertions
130+ examples-run :
131+ runs-on : ubuntu-latest
132+ needs : build
133+ name : Examples (runMain)
134+ steps :
135+ - uses : actions/checkout@v4
136+ - uses : actions/setup-java@v4
137+ with :
138+ distribution : temurin
139+ java-version : ${{ env.JAVA_VERSION }}
140+ cache : sbt
141+ - uses : sbt/setup-sbt@v1
142+ - name : Run examples via runMain
143+ run : |
144+ sbt ++${{ env.SCALA_213 }} -Dspark.version=${{ env.DEFAULT_SPARK }} "runMain examples.BisectingExample"
145+ sbt ++${{ env.SCALA_213 }} -Dspark.version=${{ env.DEFAULT_SPARK }} "runMain examples.XMeansExample"
146+ sbt ++${{ env.SCALA_213 }} -Dspark.version=${{ env.DEFAULT_SPARK }} "runMain examples.SoftKMeansExample"
147+
148+ # Job 5: Cross-version persistence check (save on 3.4, load on 3.5; and reverse)
149+ persistence-cross :
150+ runs-on : ubuntu-latest
151+ needs : build
152+ name : Persistence Cross-Version (3.4 ↔ 3.5)
153+ steps :
154+ - uses : actions/checkout@v4
155+ - uses : actions/setup-java@v4
156+ with :
157+ distribution : temurin
158+ java-version : ${{ env.JAVA_VERSION }}
159+ cache : sbt
160+ - uses : sbt/setup-sbt@v1
161+
162+ - name : Save with Spark 3.4.x
163+ run : sbt ++${{ env.SCALA_213 }} -Dspark.version=3.4.3 "runMain examples.PersistenceRoundTrip save ./tmp_model_34"
164+
165+ - name : Load with Spark 3.5.x
166+ run : sbt ++${{ env.SCALA_213 }} -Dspark.version=${{ env.DEFAULT_SPARK }} "runMain examples.PersistenceRoundTrip load ./tmp_model_34"
167+
168+ - name : Save with Spark 3.5.x
169+ run : sbt ++${{ env.SCALA_213 }} -Dspark.version=${{ env.DEFAULT_SPARK }} "runMain examples.PersistenceRoundTrip save ./tmp_model_35"
170+
171+ - name : Load with Spark 3.4.x
172+ run : sbt ++${{ env.SCALA_213 }} -Dspark.version=3.4.3 "runMain examples.PersistenceRoundTrip load ./tmp_model_35"
173+
174+ # Job 6: Perf sanity (SE and non-SE) — logs perf_sanity_seconds=...
175+ perf-sanity :
176+ runs-on : ubuntu-latest
177+ needs : build
178+ name : Perf Sanity
179+ steps :
180+ - uses : actions/checkout@v4
181+ - uses : actions/setup-java@v4
182+ with :
183+ distribution : temurin
184+ java-version : ${{ env.JAVA_VERSION }}
185+ cache : sbt
186+ - uses : sbt/setup-sbt@v1
187+ - name : Run Perf Sanity (SE & KL)
188+ run : |
189+ sbt ++${{ env.SCALA_213 }} -Dspark.version=${{ env.DEFAULT_SPARK }} "testOnly *PerfSanitySuite"
190+ - name : Surface perf metrics
191+ run : |
192+ grep -E 'perf_sanity_seconds=' target/scala-2.13/test-reports/* || true
193+
194+ # Job 7: Coverage (unchanged, runs once)
195+ coverage :
196+ runs-on : ubuntu-latest
197+ needs : build
198+ name : Code Coverage
199+ steps :
200+ - uses : actions/checkout@v4
201+ - uses : actions/setup-java@v4
202+ with :
203+ distribution : temurin
204+ java-version : ${{ env.JAVA_VERSION }}
205+ cache : sbt
206+ - uses : sbt/setup-sbt@v1
207+ - name : Generate Coverage Report
208+ run : sbt ++${{ env.SCALA_212 }} -Dspark.version=${{ env.DEFAULT_SPARK }} coverage test coverageReport
209+ - name : Upload coverage to Codecov
210+ uses : codecov/codecov-action@v4
211+ with :
212+ file : ./target/scala-2.12/scoverage-report/scoverage.xml
213+ fail_ci_if_error : false
214+
215+ # Final gate: only reports success if all validations pass
216+ release-ready :
217+ runs-on : ubuntu-latest
218+ needs :
219+ - lint
220+ - test-jvm
221+ - test-python
222+ - examples-run
223+ - persistence-cross
224+ - perf-sanity
225+ - coverage
226+ steps :
227+ - run : echo "All validations passed."
0 commit comments