Skip to content

Update documentation for installation and config #87

Update documentation for installation and config

Update documentation for installation and config #87

name: lint_and_test
on:
push:
branches:
- main
- production
paths:
- 'src/**'
- 'tests/**'
- '.github/workflows/**'
pull_request:
branches:
- main
- dev
- staging
- production
paths:
- 'src/**'
- 'tests/**'
- '.github/workflows/**'
pull_request_target:
types: [ready_for_review]
paths:
- 'src/**'
- 'tests/**'
- '.github/workflows/**'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions:
contents: read
jobs:
lint:
name: ruff
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: "3.12"
- name: Extract ruff version from pyproject.toml
run: |
echo "RUFF_VERSION=$(grep -E 'ruff.*=' pyproject.toml | sed -E 's/.*\"ruff==([^\"]+)\".*/\1/')" >> $GITHUB_ENV
echo "Extracted Ruff version: ${{ env.RUFF_VERSION }}"
- name: Install ruff
run: pip install ruff==${{ env.RUFF_VERSION }}
- name: ruff format (check code formatting)
run: ruff format --diff
- name: ruff check (lint code base)
run: ruff check src tests
test_github:
name: pytest (github)
runs-on: ubuntu-latest
timeout-minutes: 45
needs: lint
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: "3.12"
- name: Install system dependencies
run: |
# Check if rsync is already installed
if ! command -v rsync &> /dev/null; then
echo "Installing rsync..."
sudo apt-get update
sudo apt-get install -y rsync
else
echo "rsync is already available"
fi
- name: Setup uv
uses: astral-sh/setup-uv@v4
with:
version: "latest"
- name: Install dependencies with uv
run: |
uv pip install -e ".[ml,openbabel,dev]" --system
- name: Setup test data
run: |
atomworks setup tests
- name: Run pytest
run: |
export OPENBLAS_NUM_THREADS=1
export OMP_NUM_THREADS=1
# Get number of CPUs available
N_CPU=$(nproc)
coverage erase
echo "Running tests with max $N_CPU CPUs"
# Run the tests with coverage
PDB_MIRROR_PATH=tests/data/pdb CCD_MIRROR_PATH=tests/data/ccd pytest -n auto -m "not benchmark" tests/io --cov=atomworks --cov-append --cov-report=
PDB_MIRROR_PATH=tests/data/pdb CCD_MIRROR_PATH=tests/data/ccd pytest -n auto -m "not benchmark and not slow" tests/ml --cov=atomworks --cov-append --cov-report= --no-cov-on-fail
#run slow test on 1 processor to limit memory usage
PDB_MIRROR_PATH=tests/data/pdb CCD_MIRROR_PATH=tests/data/ccd pytest -n 1 -m "not benchmark and slow" tests/ml --cov=atomworks --cov-append --cov-report=
# Currently, running tests in different steps, so we do not go over memory limits
# PDB_MIRROR_PATH=tests/data/pdb CCD_MIRROR_PATH=tests/data/ccd pytest -m "not benchmark" -n auto --maxprocesses=$N_CPU --dist=worksteal --cov=atomworks --cov-report=xml --cov-report=term --cov-report=term-missing tests/
# combine coverage
coverage report -m
coverage xml -o coverage.xml
- name: Upload coverage report
uses: actions/upload-artifact@v4
with:
name: coverage-report-github
path: coverage.xml
if: always()