Skip to content

Release

Release #47

Workflow file for this run

# This workflow builds a release version of Pathling and deploys it to Maven Central and PyPI.
name: Release
# This workflow is only run when a release is published.
on:
release:
types: [published]
workflow_dispatch:
env:
# The add-exports and add-opens flags are required for Java 17
MAVEN_OPTS: --add-exports=java.base/sun.nio.ch=ALL-UNNAMED --add-opens=java.base/java.net=ALL-UNNAMED
permissions:
id-token: write
contents: write # Required to upload release assets
jobs:
release-maven:
name: Release to Maven Central
environment: maven-central
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0
submodules: recursive
- name: Set up JDK
uses: actions/setup-java@v4
with:
java-version: 17
distribution: "zulu"
- name: Set up Python 3.8
uses: actions/setup-python@v4
id: python-install
with:
python-version: 3.8
- name: Set up R
uses: r-lib/actions/setup-r@v2
with:
r-version: "4.1.3"
use-public-rspm: true
- name: Set up Pandoc
uses: r-lib/actions/setup-pandoc@v2
- name: Cache local Maven repository
uses: actions/cache@v4
with:
path: ~/.m2/repository
key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
restore-keys: |-
${{ runner.os }}-maven-
- name: Cache Python packages
uses: actions/cache@v4
with:
path: /home/runner/.cache/pip
key: ${{ runner.os }}-pip-${{ hashFiles('lib/python/requirements/dev.txt', 'lib/python/requirements/package.txt') }}
- name: Cache R packages
uses: actions/cache@v4
with:
path: ${{ runner.temp }}/Library
key: r-packages-${{ runner.os }}-${{ hashFiles('lib/R/DESCRIPTION.src') }}
restore-keys: r-packages-${{ runner.os }}-
- name: Cache SonarQube packages
uses: actions/cache@v4
with:
path: ~/.sonar/cache
key: ${{ runner.os }}-sonar
restore-keys: ${{ runner.os }}-sonar
- name: Install TinyTeX and libcurl
# These are required for building the R package documentation.
run: |
wget -qO- "https://yihui.org/tinytex/install-bin-unix.sh" | sh
echo "$HOME/bin" >> $GITHUB_PATH
sudo apt-get install -y libcurl4-openssl-dev
- name: Install GPG key
run: |
cat <(echo -e "${{ secrets.GPG_KEY }}") | gpg --batch --import
gpg --list-secret-keys --keyid-format LONG
- name: Configure Maven settings
uses: s4u/maven-settings-action@v3.1.0
with:
servers: |
[{
"id": "central",
"username": "${{ secrets.OSSRH_USERNAME }}",
"password": "${{ secrets.OSSRH_PASSWORD }}"
}]
# Release won't be possible if there are outstanding vulnerabilities of medium severity or
# higher as reported by Trivy.
- name: Run security scan
uses: aquasecurity/trivy-action@0.32.0
with:
scan-type: repo
severity: "MEDIUM,HIGH,CRITICAL"
scan-ref: .
format: sarif
output: trivy-results.sarif
skip-files: examples/**/*,**/target/**/*,sql-on-fhir/sof-js/package-lock.json,licenses/**/*,site/package-lock.json
# Upon release, the databases will be updated and scanned to make sure nothing has crept
# in since the last daily update.
cache: false
- name: Run deploy goal
env:
PYSPARK_PYTHON: ${{ steps.python-install.outputs.python-path }}
PYSPARK_DRIVER_PYTHON: ${{ steps.python-install.outputs.python-path }}
R_KEEP_PKG_SOURCE: yes
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
MAVEN_GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }}
run: |
mvn --batch-mode deploy \
org.sonarsource.scanner.maven:sonar-maven-plugin:sonar \
-Dsonar.projectKey=aehrc_pathling -Dsonar.organization=aehrc \
-Dsonar.host.url=https://sonarcloud.io \
-Dsonar.sarifReportPaths=trivy-results.sarif \
-pl '!benchmark' -Pdocs,mavenRelease
timeout-minutes: 60
- name: Save test reports
uses: actions/upload-artifact@v4
with:
name: surefire-reports
path: "**/surefire-reports/"
- name: Save coverage reports
uses: actions/upload-artifact@v4
with:
name: coverage-reports
path: |
**/jacoco.xml
**/target/site/jacoco
**/target/site/jacoco-aggregate
lib/python/**/coverage.xml
- name: Save built JARs
uses: actions/upload-artifact@v4
with:
name: jars
path: |
utilities/target/utilities-*.jar
encoders/target/encoders-*.jar
terminology/target/terminology-*.jar
fhirpath/target/fhirpath-*.jar
library-api/target/library-api-*.jar
library-runtime/target/library-runtime-*.jar
lib/python/target/python-*.jar
lib/R/target/r-*.jar
if-no-files-found: "error"
- name: Save Python wheel
uses: actions/upload-artifact@v4
with:
name: python-wheel
path: lib/python/target/py-dist/pathling-*.whl
- name: Save R package
uses: actions/upload-artifact@v4
with:
name: r-package
path: lib/R/target/pathling_*.tar.gz
- name: Save site
uses: actions/upload-artifact@v4
with:
name: site
path: site/target/site/
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v4
with:
role-to-assume: arn:aws:iam::865780493209:role/PathlingBenchmarkUpload
aws-region: ap-southeast-2
- name: Upload SQL on FHIR test report to S3
run: aws s3 cp fhirpath/target/fhir-view-compliance-test.json s3://pathling-benchmark/test-reports/${{ github.ref }}/sof-test-results.json
- name: Upload release assets
run: |
gh release upload ${{ github.ref_name }} \
--clobber \
library-runtime/target/library-runtime-*.jar \
lib/python/target/py-dist/pathling-*.whl \
lib/R/target/pathling_*.tar.gz
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
release-pypi:
name: Release to PyPI
environment: pypi
runs-on: ubuntu-latest
needs: release-maven
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
# This is required so that git-commit-id-plugin can find the latest tag.
fetch-depth: 0
submodules: recursive
- name: Set up JDK
uses: actions/setup-java@v4
with:
java-version: 17
distribution: "zulu"
- name: Set up Python 3.8
uses: actions/setup-python@v4
id: python-install
with:
python-version: 3.8
- name: Set up R
uses: r-lib/actions/setup-r@v2
with:
r-version: "4.1.3"
use-public-rspm: true
- name: Set up Pandoc
uses: r-lib/actions/setup-pandoc@v2
- name: Cache local Maven repository
uses: actions/cache@v4
with:
path: ~/.m2/repository
key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }}
restore-keys: |
${{ runner.os }}-maven-
- name: Cache Python packages
uses: actions/cache@v4
with:
path: /home/runner/.cache/pip
key: ${{ runner.os }}-pip-${{ hashFiles('lib/python/requirements/dev.txt', 'lib/python/requirements/package.txt') }}
- name: Cache R packages
uses: actions/cache@v4
with:
path: ${{ runner.temp }}/Library
key: r-packages-${{ runner.os }}-${{ hashFiles('lib/R/DESCRIPTION.src') }}
restore-keys: r-packages-${{ runner.os }}-
- name: Install TinyTeX and libcurl
# These are required for building the R package documentation.
run: |
wget -qO- "https://yihui.org/tinytex/install-bin-unix.sh" | sh
echo "$HOME/bin" >> $GITHUB_PATH
sudo apt-get install -y libcurl4-openssl-dev
- name: Run deploy goal
env:
PYSPARK_PYTHON: ${{ steps.python-install.outputs.python-path }}
PYSPARK_DRIVER_PYTHON: ${{ steps.python-install.outputs.python-path }}
TWINE_USERNAME: ${{ secrets.TWINE_USERNAME }}
TWINE_PASSWORD: ${{ secrets.TWINE_PASSWORD }}
R_KEEP_PKG_SOURCE: yes
run: |
mvn --batch-mode deploy \
-pl lib/python -am \
-DskipTests -PpythonRelease
timeout-minutes: 30
upload-to-dap:
name: Upload source code to CSIRO DAP
environment: csiro-dap
runs-on: ubuntu-latest
needs: [release-maven, release-pypi]
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Download source code ZIP from release
run: |
# Extract version from tag (remove 'v' prefix if present)
VERSION="${{ github.ref_name }}"
VERSION="${VERSION#v}" # Remove leading 'v' if present
# Download the automatic source code archive from GitHub
curl -L -o "pathling-${VERSION}.zip" \
"https://api.github.com/repos/${{ github.repository }}/zipball/${{ github.ref_name }}"
# Verify that the ZIP file was downloaded
if [ ! -f "pathling-${VERSION}.zip" ]; then
echo "Error: Failed to download source code archive for ${{ github.ref_name }}"
exit 1
fi
echo "Downloaded source code archive: pathling-${VERSION}.zip"
echo "File size: $(ls -lh pathling-${VERSION}.zip | awk '{print $5}')"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Upload to CSIRO DAP
run: |
# Get the data collection ID for csiro:49524
echo "Getting collection information for csiro:49524..."
COLLECTION_RESPONSE=$(curl -s -u "${{ secrets.DAP_USERNAME }}:${{ secrets.DAP_PASSWORD }}" \
"https://data.csiro.au/dap/api/v2/my-collections")
# Find the collection with PID csiro:49524
COLLECTION_DATA=$(echo "$COLLECTION_RESPONSE" | jq -r ".[] | select(.dataCollectionPid == \"csiro:49524\")")
COLLECTION_ID=$(echo "$COLLECTION_DATA" | jq -r ".dataCollectionId")
COLLECTION_STATUS=$(echo "$COLLECTION_DATA" | jq -r ".status")
if [ "$COLLECTION_ID" = "null" ] || [ -z "$COLLECTION_ID" ]; then
echo "Collection csiro:49524 not found or not accessible"
exit 1
fi
echo "Found collection ID: $COLLECTION_ID (Status: $COLLECTION_STATUS)"
# Create a new version of the collection (or use existing draft)
if [ "$COLLECTION_STATUS" = "Draft" ]; then
echo "Using existing draft version..."
NEW_COLLECTION_ID=$COLLECTION_ID
else
echo "Creating new version of collection..."
UPDATE_RESPONSE=$(curl -s -X POST -u "${{ secrets.DAP_USERNAME }}:${{ secrets.DAP_PASSWORD }}" \
"https://data.csiro.au/dap/api/v2/collections/$COLLECTION_ID/update")
NEW_COLLECTION_ID=$(echo "$UPDATE_RESPONSE" | jq -r '.dataCollectionId')
if [ "$NEW_COLLECTION_ID" = "null" ] || [ -z "$NEW_COLLECTION_ID" ]; then
echo "Failed to create new version of collection"
echo "Response: $UPDATE_RESPONSE"
exit 1
fi
echo "Created new collection version with ID: $NEW_COLLECTION_ID"
fi
# Unlock files for modification
echo "Unlocking files for modification..."
curl -s -X POST -u "${{ secrets.DAP_USERNAME }}:${{ secrets.DAP_PASSWORD }}" \
"https://data.csiro.au/dap/api/v2/collections/$NEW_COLLECTION_ID/files/unlock"
# Wait for files to be unlocked
echo "Waiting for files to be unlocked..."
while true; do
sleep 10
FILE_STATE_RESPONSE=$(curl -s -u "${{ secrets.DAP_USERNAME }}:${{ secrets.DAP_PASSWORD }}" \
"https://data.csiro.au/dap/api/v2/collections/$NEW_COLLECTION_ID/files/fileState")
echo "File state: $FILE_STATE_RESPONSE"
FILE_STATE=$(echo "$FILE_STATE_RESPONSE" | jq -r '.value // .')
if [ "$FILE_STATE" = "unlocked" ]; then
break
fi
if [ "$FILE_STATE" = "error" ]; then
echo "Error unlocking files"
exit 1
fi
done
# Upload the source code zip file
echo "Uploading source code zip file..."
ZIP_FILE=$(ls *.zip | head -n 1)
if [ -z "$ZIP_FILE" ]; then
echo "No zip file found to upload"
exit 1
fi
echo "Uploading file: $ZIP_FILE"
# Get S3 credentials
echo "Getting S3 credentials..."
S3_CREDS=$(curl -s -u "${{ secrets.DAP_USERNAME }}:${{ secrets.DAP_PASSWORD }}" \
"https://data.csiro.au/dap/api/v2/collections/$NEW_COLLECTION_ID/files/s3")
S3_ACCESS_KEY=$(echo "$S3_CREDS" | jq -r '.accessKey')
S3_SECRET_KEY=$(echo "$S3_CREDS" | jq -r '.secretAccessKey')
S3_BUCKET=$(echo "$S3_CREDS" | jq -r '.bucket')
S3_PATH=$(echo "$S3_CREDS" | jq -r '.remoteDirectory')
S3_ENDPOINT=$(echo "$S3_CREDS" | jq -r '.endPointUrl')
echo "S3 Endpoint: $S3_ENDPOINT"
echo "S3 Bucket: $S3_BUCKET"
echo "S3 Path: $S3_PATH"
# Upload using AWS CLI
echo "Uploading to S3..."
# Remove bucket name from path if it's duplicated
S3_PATH_CLEAN=$(echo "$S3_PATH" | sed "s|^$S3_BUCKET/||")
AWS_ACCESS_KEY_ID="$S3_ACCESS_KEY" AWS_SECRET_ACCESS_KEY="$S3_SECRET_KEY" \
aws s3 cp "$ZIP_FILE" "s3://$S3_BUCKET/${S3_PATH_CLEAN}$ZIP_FILE" \
--endpoint-url "$S3_ENDPOINT"
echo "File uploaded successfully"
# Validate the collection
echo "Validating collection..."
VALIDATION_RESPONSE=$(curl -s -u "${{ secrets.DAP_USERNAME }}:${{ secrets.DAP_PASSWORD }}" \
"https://data.csiro.au/dap/api/v2/collections/$NEW_COLLECTION_ID/validate")
echo "Validation response: $VALIDATION_RESPONSE"
# Check if validation passed
VALIDATION_ERRORS=$(echo "$VALIDATION_RESPONSE" | jq -r '.errors // []')
if [ "$VALIDATION_ERRORS" != "[]" ] && [ "$VALIDATION_ERRORS" != "null" ]; then
echo "Validation errors found: $VALIDATION_ERRORS"
exit 1
fi
# Submit collection for publication (bypass approval if possible)
echo "Submitting collection for publication..."
SUBMIT_PAYLOAD='{"approver": null, "businessUnit": null, "notesToApprover": null, "supportingFilesForApprover": []}'
curl -s -X POST -u "${{ secrets.DAP_USERNAME }}:${{ secrets.DAP_PASSWORD }}" \
-H "Content-Type: application/json" \
-d "$SUBMIT_PAYLOAD" \
"https://data.csiro.au/dap/api/v2/collections/$NEW_COLLECTION_ID/submit"
echo "Collection submitted for publication"
timeout-minutes: 20