Skip to content

Commit 66f5b33

Browse files
derrickburnsclaude
andcommitted
test: simplify CI workflow to minimal version for debugging
Strip down to single echo job to identify what's causing GitHub Actions rejection. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
1 parent ec4f497 commit 66f5b33

File tree

1 file changed

+5
-203
lines changed

1 file changed

+5
-203
lines changed

.github/workflows/ci.yml

Lines changed: 5 additions & 203 deletions
Original file line numberDiff line numberDiff line change
@@ -20,210 +20,12 @@ env:
2020
SBT_OPTS: "-Xms2g -Xmx4g -XX:+UseG1GC -Dsbt.log.noformat=true"
2121

2222
jobs:
23-
# Job 0: Linting and Code Style (fast gate)
24-
lint:
23+
# Job 0: Simple test to verify workflow is accepted
24+
test-minimal:
2525
runs-on: ubuntu-latest
26-
name: Lint & Style Check
26+
name: Minimal Test
2727
steps:
2828
- uses: actions/checkout@v4
29-
- uses: actions/setup-java@v4
30-
with:
31-
distribution: temurin
32-
java-version: ${{ env.JAVA_VERSION }}
33-
cache: sbt
34-
- uses: sbt/setup-sbt@v1
35-
- name: Check Formatting and Style
36-
run: |
37-
sbt ++${{ env.SCALA_213 }} scalafmtCheckAll
38-
sbt ++${{ env.SCALA_213 }} scalastyle
29+
- name: Echo test
30+
run: echo "CI workflow is running successfully"
3931

40-
# Job 1: Build once (produce 2.12 & 2.13 jars for reuse)
41-
build:
42-
runs-on: ubuntu-latest
43-
needs: lint
44-
name: Build JARs (2.12 & 2.13)
45-
steps:
46-
- uses: actions/checkout@v4
47-
- uses: actions/setup-java@v4
48-
with:
49-
distribution: temurin
50-
java-version: ${{ env.JAVA_VERSION }}
51-
cache: sbt
52-
- uses: sbt/setup-sbt@v1
53-
- name: Package 2.12 (Spark ${{ env.DEFAULT_SPARK }})
54-
run: sbt ++${{ env.SCALA_212 }} -Dspark.version=${{ env.DEFAULT_SPARK }} clean package
55-
- name: Package 2.13 (Spark ${{ env.DEFAULT_SPARK }})
56-
run: sbt ++${{ env.SCALA_213 }} -Dspark.version=${{ env.DEFAULT_SPARK }} clean package
57-
- name: Upload JARs
58-
uses: actions/upload-artifact@v4
59-
with:
60-
name: jars
61-
path: |
62-
target/scala-2.12/*.jar
63-
target/scala-2.13/*.jar
64-
if-no-files-found: error
65-
66-
# Job 2: Core JVM tests across matrix (re-compiles per combo; ok)
67-
test-jvm:
68-
runs-on: ubuntu-latest
69-
needs: build
70-
strategy:
71-
fail-fast: false
72-
matrix:
73-
scala-version: [ '2.13.14', '2.12.18' ]
74-
spark-version: [ '3.4.3', '3.5.1' ]
75-
include:
76-
# Force Java 17 for Spark 3.5/3.4
77-
- java-version: '17'
78-
exclude:
79-
# Spark 3.4 typically ships with Scala 2.12 artifacts
80-
- scala-version: '2.13.14'
81-
spark-version: '3.4.3'
82-
name: Test (Scala ${{ matrix.scala-version }}, Spark ${{ matrix.spark-version }})
83-
steps:
84-
- uses: actions/checkout@v4
85-
- uses: actions/setup-java@v4
86-
with:
87-
distribution: temurin
88-
java-version: ${{ matrix.java-version }}
89-
cache: sbt
90-
- uses: sbt/setup-sbt@v1
91-
- name: Run All JVM Tests
92-
run: sbt ++${{ matrix.scala-version }} -Dspark.version=${{ matrix.spark-version }} test
93-
- name: Preserve test reports
94-
if: always()
95-
run: |
96-
mkdir -p artifacts/test-reports/${{ matrix.scala-version }}_${{ matrix.spark-version }}
97-
(test -d target/test-reports && cp -r target/test-reports/* artifacts/test-reports/${{ matrix.scala-version }}_${{ matrix.spark-version }}/) || true
98-
- uses: actions/upload-artifact@v4
99-
if: always()
100-
with:
101-
name: test-reports-${{ matrix.scala-version }}-${{ matrix.spark-version }}
102-
path: artifacts/test-reports/${{ matrix.scala-version }}_${{ matrix.spark-version }}
103-
104-
# Job 3: Python smoke test (downloads jar built once; uses non-SE divergence)
105-
test-python:
106-
runs-on: ubuntu-latest
107-
needs: build
108-
name: Python Smoke Test (PySpark ${{ env.PYSPARK_PIN }})
109-
steps:
110-
- uses: actions/checkout@v4
111-
- uses: actions/setup-python@v5
112-
with:
113-
python-version: "3.11"
114-
- uses: actions/setup-java@v4
115-
with:
116-
distribution: temurin
117-
java-version: ${{ env.JAVA_VERSION }}
118-
- name: Download JARs
119-
uses: actions/download-artifact@v4
120-
with:
121-
name: jars
122-
path: jars
123-
- name: Install PySpark
124-
run: python -m pip install --upgrade pip && pip install pyspark==${{ env.PYSPARK_PIN }}
125-
- name: Run smoke (local[*], non-SE)
126-
run: |
127-
JAR_212=$(ls jars/*scala-2.12*.jar | head -n1)
128-
test -f "$JAR_212"
129-
spark-submit --jars "$JAR_212" python/smoke_test.py
130-
131-
# Job 4: Run examples via runMain with assertions
132-
examples-run:
133-
runs-on: ubuntu-latest
134-
needs: build
135-
name: Examples (runMain)
136-
steps:
137-
- uses: actions/checkout@v4
138-
- uses: actions/setup-java@v4
139-
with:
140-
distribution: temurin
141-
java-version: ${{ env.JAVA_VERSION }}
142-
cache: sbt
143-
- uses: sbt/setup-sbt@v1
144-
- name: Run examples via runMain
145-
run: |
146-
sbt ++${{ env.SCALA_213 }} -Dspark.version=${{ env.DEFAULT_SPARK }} "runMain examples.BisectingExample"
147-
sbt ++${{ env.SCALA_213 }} -Dspark.version=${{ env.DEFAULT_SPARK }} "runMain examples.XMeansExample"
148-
sbt ++${{ env.SCALA_213 }} -Dspark.version=${{ env.DEFAULT_SPARK }} "runMain examples.SoftKMeansExample"
149-
150-
# Job 5: Cross-version persistence check (save on 3.4, load on 3.5; and reverse)
151-
persistence-cross:
152-
runs-on: ubuntu-latest
153-
needs: build
154-
name: Persistence Cross-Version (3.4 ↔ 3.5)
155-
steps:
156-
- uses: actions/checkout@v4
157-
- uses: actions/setup-java@v4
158-
with:
159-
distribution: temurin
160-
java-version: ${{ env.JAVA_VERSION }}
161-
cache: sbt
162-
- uses: sbt/setup-sbt@v1
163-
164-
- name: Save with Spark 3.4.x
165-
run: sbt ++${{ env.SCALA_213 }} -Dspark.version=3.4.3 "runMain examples.PersistenceRoundTrip save ./tmp_model_34"
166-
167-
- name: Load with Spark 3.5.x
168-
run: sbt ++${{ env.SCALA_213 }} -Dspark.version=${{ env.DEFAULT_SPARK }} "runMain examples.PersistenceRoundTrip load ./tmp_model_34"
169-
170-
- name: Save with Spark 3.5.x
171-
run: sbt ++${{ env.SCALA_213 }} -Dspark.version=${{ env.DEFAULT_SPARK }} "runMain examples.PersistenceRoundTrip save ./tmp_model_35"
172-
173-
- name: Load with Spark 3.4.x
174-
run: sbt ++${{ env.SCALA_213 }} -Dspark.version=3.4.3 "runMain examples.PersistenceRoundTrip load ./tmp_model_35"
175-
176-
# Job 6: Perf sanity (SE and non-SE) — logs perf_sanity_seconds=...
177-
perf-sanity:
178-
runs-on: ubuntu-latest
179-
needs: build
180-
name: Perf Sanity
181-
steps:
182-
- uses: actions/checkout@v4
183-
- uses: actions/setup-java@v4
184-
with:
185-
distribution: temurin
186-
java-version: ${{ env.JAVA_VERSION }}
187-
cache: sbt
188-
- uses: sbt/setup-sbt@v1
189-
- name: Run Perf Sanity (SE & KL)
190-
run: |
191-
sbt ++${{ env.SCALA_213 }} -Dspark.version=${{ env.DEFAULT_SPARK }} "testOnly *PerfSanitySuite"
192-
- name: Surface perf metrics
193-
run: |
194-
grep -E 'perf_sanity_seconds=' target/scala-2.13/test-reports/* || true
195-
196-
# Job 7: Coverage (unchanged, runs once)
197-
coverage:
198-
runs-on: ubuntu-latest
199-
needs: build
200-
name: Code Coverage
201-
steps:
202-
- uses: actions/checkout@v4
203-
- uses: actions/setup-java@v4
204-
with:
205-
distribution: temurin
206-
java-version: ${{ env.JAVA_VERSION }}
207-
cache: sbt
208-
- uses: sbt/setup-sbt@v1
209-
- name: Generate Coverage Report
210-
run: sbt ++${{ env.SCALA_212 }} -Dspark.version=${{ env.DEFAULT_SPARK }} coverage test coverageReport
211-
- name: Upload coverage to Codecov
212-
uses: codecov/codecov-action@v4
213-
with:
214-
file: ./target/scala-2.12/scoverage-report/scoverage.xml
215-
fail_ci_if_error: false
216-
217-
# Final gate: only reports success if all validations pass
218-
release-ready:
219-
runs-on: ubuntu-latest
220-
needs:
221-
- lint
222-
- test-jvm
223-
- test-python
224-
- examples-run
225-
- persistence-cross
226-
- perf-sanity
227-
- coverage
228-
steps:
229-
- run: echo "All validations passed."

0 commit comments

Comments
 (0)