1+ name : Build and Test
2+
3+ on :
4+ pull_request :
5+ branches : ['**']
6+ push :
7+ branches : ['master', 'develop', 'release/*', 'spark-3.2']
8+ tags : [v*]
9+ release :
10+ types : [published]
11+
12+ jobs :
13+ build :
14+ runs-on : ubuntu-latest
15+
16+ steps :
17+ - uses : actions/checkout@v2
18+ with :
19+ fetch-depth : 0
20+ - uses : coursier/cache-action@v6
21+ - name : Setup JDK
22+ uses : actions/setup-java@v3
23+ with :
24+ distribution : temurin
25+ java-version : 8
26+ cache : sbt
27+
28+ # Do just the compilation stage to minimize sbt memory footprint
29+ - name : Compile
30+ run : sbt -v -batch compile test:compile it:compile
31+
32+ - name : Core tests
33+ run : sbt -batch core/test
34+
35+ - name : Datasource tests
36+ run : sbt -batch datasource/test
37+
38+ - name : Experimental tests
39+ run : sbt -batch experimental/test
40+
41+ # # TODO: Update python build to be PEP 517 compatible
42+ # - name: Install Conda dependencies
43+ # run: |
44+ # # $CONDA_DIR is an environment variable pointing to the root of the miniconda directory
45+ # $CONDA_DIR/bin/conda install -c conda-forge --yes --file pyrasterframes/src/main/python/requirements-condaforge.txt
46+ # - name: Create PyRasterFrames package
47+ # run: sbt -v -batch pyrasterframes/package
48+ # - name: Python tests
49+ # run: sbt -batch pyrasterframes/test
50+
51+ - name : Collect artifacts
52+ if : ${{ failure() }}
53+ run : |
54+ mkdir -p /tmp/core_dumps
55+ ls -lh /tmp
56+ cp core.* *.hs /tmp/core_dumps/ 2> /dev/null || true
57+ cp ./core/*.log /tmp/core_dumps/ 2> /dev/null || true
58+ cp -r /tmp/hsperfdata* /tmp/*.hprof /tmp/core_dumps/ 2> /dev/null || true
59+ cp repo/core/core/* /tmp/core_dumps/ 2> /dev/null || true
60+
61+ - name : Upload core dumps
62+ if : ${{ failure() }}
63+ uses : actions/upload-artifact@v2
64+ with :
65+ name : core-dumps
66+ path : /tmp/core_dumps
0 commit comments