[dependabot] Bump the actions group across 1 directory with 7 updates #203
Workflow file for this run
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| name: spark | |
| on: | |
| push: | |
| branches: | |
| - main | |
| - '[0-9]+.[0-9]+.x' | |
| tags: | |
| - '**' | |
| pull_request: | |
| permissions: # added using https://github.com/step-security/secure-repo | |
| contents: read | |
| env: | |
| JAVA_VERSION: 17 | |
| MAVEN_CLI_OPTS: -Dhttp.keepAlive=false -Dmaven.wagon.http.pool=false --batch-mode -Dlicense.skip=true | |
| MAVEN_COMPILE_SPARK_ARGS: clean install -Dmaven.test.skip -Dmaven.assembly.skip=true -Dmaven.source.skip | |
| MAVEN_TEST_SPARK_ARGS: test failsafe:integration-test failsafe:verify -Dmaven.main.skip -Dtest.fork.count=2 | |
| jobs: | |
| integration-tests: | |
| name: integration-tests / ${{ matrix.projects.name }} / ${{ matrix.scala-version }} | |
| runs-on: ubuntu-latest | |
| strategy: | |
| fail-fast: false | |
| matrix: | |
| scala-version: [ "2.12" ] # note: spark 3.x doesn't have dockers for scala 2.13 | |
| projects: | |
| - name: accumulo | |
| list: geomesa-accumulo/geomesa-accumulo-spark | |
| - name: fs | |
| list: geomesa-fs/geomesa-fs-spark | |
| - name: gt | |
| list: geomesa-gt/geomesa-gt-spark | |
| # TODO we don't have any tests for hbase spark runtime | |
| - name: hbase | |
| list: geomesa-hbase/geomesa-hbase-spark | |
| - name: pyspark | |
| # note: we're sneaking in a profile to the project list arg here | |
| list: geomesa-spark/geomesa_pyspark -Ppython | |
| steps: | |
| - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 | |
| - uses: actions/setup-java@be666c2fcd27ec809703dec50e508c2fdc7f6654 # v5.2.0 | |
| with: | |
| distribution: temurin | |
| java-version: "${{ env.JAVA_VERSION }}" | |
| - uses: actions/cache@cdf6c1fa76f9f475f3d7449005a359c84ca0f306 # v5.0.3 | |
| with: | |
| key: ${{ hashFiles('**/pom.xml') }}-spark-integration-tests-${{ matrix.scala-version }}-${{ matrix.projects.name }} | |
| path: ~/.m2/repository/ | |
| - uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0 | |
| if: matrix.projects.name=='pyspark' | |
| with: | |
| python-version: '3.10' | |
| cache: 'pip' | |
| cache-dependency-path: geomesa-spark/geomesa_pyspark/src/test/python/requirements.txt | |
| - name: Install python dependencies | |
| if: matrix.projects.name=='pyspark' | |
| run: pip install -r geomesa-spark/geomesa_pyspark/src/test/python/requirements.txt | |
| - name: Set Scala version | |
| run: ./build/scripts/change-scala-version.sh ${{ matrix.scala-version }} | |
| - name: Compile | |
| id: compile | |
| continue-on-error: true | |
| run: | | |
| set -o pipefail | |
| mvn $MAVEN_COMPILE_SPARK_ARGS $MAVEN_CLI_OPTS -am -pl ${{ matrix.projects.list }} | tee -a build.log | |
| - name: Compile (retry) | |
| if: steps.compile.outcome=='failure' | |
| run: | | |
| set -o pipefail | |
| # retry if the failure was due to transient download errors from maven central | |
| if grep -q -e 'Could not transfer artifact' -e 'Failed to read artifact descriptor' build.log; then | |
| RESUME_FROM="$({ grep --text 'mvn <args> -rf ' build.log || test $? = 1; } | tail -n1 | sed 's/.*-rf/-rf/')" | |
| mvn $MAVEN_COMPILE_SPARK_ARGS $MAVEN_CLI_OPTS -am -pl ${{ matrix.projects.list }} $RESUME_FROM | tee -a build.log | |
| else | |
| exit 1 | |
| fi | |
| - name: Run Spark tests | |
| run: mvn $MAVEN_TEST_SPARK_ARGS $MAVEN_CLI_OPTS -pl ${{ matrix.projects.list }} | |
| - name: Remove geomesa artifacts | |
| if: success() || failure() | |
| run: rm -rf ~/.m2/repository/org/locationtech/geomesa |