diff --git a/.github/scripts/inspect_installed_pyalp.py b/.github/scripts/inspect_installed_pyalp.py new file mode 100644 index 000000000..da8eda299 --- /dev/null +++ b/.github/scripts/inspect_installed_pyalp.py @@ -0,0 +1,23 @@ +#!/usr/bin/env python3 +"""Print basic information about the installed `pyalp` package. + +This script is intended to be invoked from CI after installing the package +from TestPyPI. It prints the package file, available binary modules, and +the runtime build metadata exposed by the package. +""" +import pkgutil +import sys + +try: + import pyalp +except Exception: + print('ERROR: failed to import pyalp', file=sys.stderr) + raise + +print('pyalp package:', getattr(pyalp, '__file__', None)) +print('available modules in package:', [m.name for m in pkgutil.iter_modules(pyalp.__path__)]) +try: + print('build metadata:', pyalp.get_build_metadata()) +except Exception as e: + print('metadata error:', e) +print('listed backends via helper:', pyalp.list_backends()) diff --git a/.github/scripts/run_backend_smoke_installed.py b/.github/scripts/run_backend_smoke_installed.py new file mode 100644 index 000000000..102b4737b --- /dev/null +++ b/.github/scripts/run_backend_smoke_installed.py @@ -0,0 +1,36 @@ +#!/usr/bin/env python3 +"""Run the repo's backend smoke runner against an installed pyalp package. + +This script is intended to be invoked from CI after installing pyalp from +TestPyPI. It accepts a single argument (backend name) and will skip if that +backend is not present in the installed package. +""" +import sys +import subprocess + +try: + import pyalp +except Exception: + print('ERROR: failed to import pyalp', file=sys.stderr) + raise + + +def main(argv): + if len(argv) < 2: + print('Usage: run_backend_smoke_installed.py ', file=sys.stderr) + return 2 + backend = argv[1] + backends = pyalp.list_backends() + print('discovered backends:', backends) + if backend not in backends: + print(f'backend {backend} not present in installed package, skipping') + return 0 + + rc = subprocess.call([sys.executable, 'tests/python/backend_smoke_runner.py', backend]) + if rc != 0: + print(f'backend {backend} smoke runner failed with exit {rc}', file=sys.stderr) + return rc + + +if __name__ == '__main__': + sys.exit(main(sys.argv)) diff --git a/.github/workflows/promote-to-pypi.yml b/.github/workflows/promote-to-pypi.yml new file mode 100644 index 000000000..14b02c2e1 --- /dev/null +++ b/.github/workflows/promote-to-pypi.yml @@ -0,0 +1,73 @@ +name: Promote release to PyPI + +on: + workflow_dispatch: + inputs: + tag: + description: 'Git tag / release to promote (e.g. pyalp.v0.8.14)' + required: true + push: + tags: + - 'pyalp.v*' + +# Request OIDC id-token permissions at workflow level so actions can use +# the GitHub Actions OIDC provider. The pypa publish action requires this +# for trusted publisher flow when using repository-provided credentials. +permissions: + id-token: write + contents: read + +jobs: + promote: + runs-on: ubuntu-latest + # Require approval from the `production` environment before the job can + # access environment-scoped secrets (e.g. the PyPI API token). Create the + # environment in the repository settings and add the secret `PYPI_API_TOKEN`. + environment: production + # Also explicitly request id-token at the job level to be extra clear. + permissions: + id-token: write + contents: read + steps: + - name: Checkout (for local scripts) + uses: actions/checkout@v4 + + - name: Download release assets (via GitHub API) + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + REPO: ${{ github.repository }} + TAG: ${{ github.event.inputs.tag || github.ref_name }} + run: | + set -euo pipefail + echo "Downloading release assets for ${REPO} tag ${TAG}" + mkdir -p release_assets + # Fetch release metadata for the tag + release_json=$(curl -sSf -H "Authorization: Bearer ${GITHUB_TOKEN}" "https://api.github.com/repos/${REPO}/releases/tags/${TAG}") + if [ -z "${release_json}" ]; then + echo "No release metadata found for tag ${TAG}" >&2 + exit 1 + fi + + # Iterate assets and download each one using the assets API (requires Accept header) + echo "$release_json" | jq -r '.assets[] | [.id, .name] | @tsv' | while IFS=$'\t' read -r id name; do + echo "Downloading asset: ${name} (id ${id})" + curl -sSfL -H "Authorization: Bearer ${GITHUB_TOKEN}" -H "Accept: application/octet-stream" "https://api.github.com/repos/${REPO}/releases/assets/${id}" -o "release_assets/${name}" + done + echo "Downloaded files:" && ls -la release_assets || true + + - name: List downloaded assets + run: | + echo "Assets in release_assets:" + ls -la release_assets || true + + - name: Show package name and version (diagnostic) + run: | + python -c "import importlib,importlib.util,sys,pathlib; spec=importlib.util.find_spec('tomllib') or importlib.util.find_spec('tomli'); name=spec.name if spec else sys.exit(print('No TOML parser available (tomllib/tomli), skipping')); toml=importlib.import_module(name); p=pathlib.Path('pyalp/pyproject.toml'); (sys.exit(print('pyalp/pyproject.toml not found at', p)) if not p.exists() else None); data=toml.loads(p.read_text()); proj=data.get('project',{}); print('project.name =', proj.get('name')); print('project.version =', proj.get('version'))" + + - name: Publish to PyPI (alp-graphblas) + uses: pypa/gh-action-pypi-publish@release/v1 + with: + packages-dir: release_assets/ + env: + TWINE_USERNAME: __token__ + TWINE_PASSWORD: ${{ secrets.PYPI_API_TOKEN }} diff --git a/.github/workflows/publish-to-testpypi.yml b/.github/workflows/publish-to-testpypi.yml new file mode 100644 index 000000000..4f17061c0 --- /dev/null +++ b/.github/workflows/publish-to-testpypi.yml @@ -0,0 +1,387 @@ +name: alp-graphblas wheels (cibuildwheel) + +on: + push: + tags: [ 'pyalp.v*' ] + workflow_dispatch: {} + +jobs: + build-wheels: + name: Build wheels on ${{ matrix.os }} + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest, macos-latest] + steps: + - name: Checkout (with submodules) + uses: actions/checkout@v4 + with: + submodules: recursive + fetch-depth: 0 + + - name: Verify pinned pybind11 submodule commit + if: runner.os == 'Linux' || runner.os == 'macOS' + shell: bash + run: | + set -euo pipefail + if [ -f pyalp/PINNED_PYBIND11 ]; + then + PINNED_SHA=$(tr -d '\n' < pyalp/PINNED_PYBIND11) + elif [ -f pyalp/extern/pybind11/PINNED_COMMIT ]; + then + PINNED_SHA=$(tr -d '\n' < pyalp/extern/pybind11/PINNED_COMMIT) + else + echo "No pinned commit file found (pyalp/PINNED_PYBIND11 or pyalp/extern/pybind11/PINNED_COMMIT)" >&2 + exit 2 + fi + ACTUAL=$(git -C pyalp/extern/pybind11 rev-parse HEAD || true) + echo "Expected pybind11 commit: $PINNED_SHA" + echo "Found pybind11 commit: $ACTUAL" + test "$ACTUAL" = "$PINNED_SHA" + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '3.11' + + - name: Install cibuildwheel + run: | + python -m pip install --upgrade pip + python -m pip install cibuildwheel==2.21.3 + + - name: Build wheels + env: + CIBW_BUILD: "cp39-* cp310-* cp311-* cp312-*" + CIBW_SKIP: "*-musllinux* pp*" + CIBW_ARCHS_LINUX: "x86_64" + CIBW_ARCHS_MACOS: "arm64" + CIBW_BUILD_VERBOSITY: "1" + # Ensure submodule headers are used by setup.py + CIBW_ENVIRONMENT: > + PYTHONUTF8=1 + CIBW_ENVIRONMENT_MACOS: > + PYTHONUTF8=1 + MACOSX_DEPLOYMENT_TARGET=15.0 + # Prebuild the CMake-based extension via top-level CMake so all variables/options are defined. + CIBW_BEFORE_BUILD: | + python -m pip install --upgrade pip + python -m pip install cmake ninja + echo "[cibw] Working directory and contents:"; pwd; ls -la + echo "[cibw] Checking for pyalp CMakeLists:"; ls -la pyalp || true; if [ -f pyalp/CMakeLists.txt ]; + then echo "found pyalp/CMakeLists.txt"; else echo "pyalp/CMakeLists.txt NOT found"; fi + # If the wrapper CMakeLists.txt wasn't copied (e.g., untracked file when cibuildwheel uses git ls-files), create a minimal shim + if [ ! -f pyalp/CMakeLists.txt ]; + then + echo "[cibw] Creating pyalp/CMakeLists.txt shim (add_subdirectory(src)) for wheel build" + printf '%s\n' 'add_subdirectory(src)' > pyalp/CMakeLists.txt + fi + # Ensure no stale extension from a previous ABI remains in the source tree + rm -f pyalp/src/pyalp/_pyalp*.so || true + # Overwrite root setup.py inside the container to delegate packaging to pyalp/setup.py (keep git root clean) + printf '%s\n' "import os, runpy; ROOT=os.path.dirname(os.path.abspath(__file__)); PKG=os.path.join(ROOT, 'pyalp'); os.chdir(PKG); runpy.run_path(os.path.join(PKG, 'setup.py'), run_name='__main__')" > setup.py + # Configure from repository root; enable pyalp and choose NUMA setting per-platform + PYEXEC=$(python -c 'import sys; print(sys.executable)') + # Gather Git metadata and package version to pass into CMake so the + # generated runtime metadata contains accurate values even in CI. + # Prefer environment-provided values when available (GITHUB_SHA/REF_NAME) + ALP_GIT_COMMIT="${GITHUB_SHA:-$(git rev-parse --short HEAD)}" + # GITHUB_REF_NAME is available in Actions; fallback to git branch + ALP_GIT_BRANCH="${GITHUB_REF_NAME:-$(git rev-parse --abbrev-ref HEAD)}" + # Try to pick a semantic/alp version from tags (prefer nearest tag) + ALP_VERSION=$(git describe --tags --match "v*" --abbrev=0 2>/dev/null || true) + if [ -z "${ALP_VERSION}" ]; then + # Fall back to a describe-style value + ALP_VERSION=$(git describe --tags --match "v*" --always 2>/dev/null || echo "unknown") + fi + # Read the pyalp package version from pyalp/pyproject.toml (simple grep) + PYALP_VERSION=$(grep -E '^version\s*=\s*"' pyalp/pyproject.toml | head -n1 | sed -E 's/^version\s*=\s*"([^"]+)".*/\1/') + PYALP_VERSION=${PYALP_VERSION:-0.0.0} + echo "[cibw] Derived ALP_VERSION=${ALP_VERSION}, ALP_GIT_COMMIT=${ALP_GIT_COMMIT}, ALP_GIT_BRANCH=${ALP_GIT_BRANCH}, PYALP_VERSION=${PYALP_VERSION}" + # Use a per-ABI build directory to avoid cross-ABI contamination + ABI_TAG=$(python -c 'import sys; print(f"cp{sys.version_info[0]}{sys.version_info[1]}")') + BUILD_DIR="build/${ABI_TAG}" + # Export the per-ABI build dir so setup.py (inside the wheel build) can find + # the CMake-generated metadata file. cibuildwheel runs this before_build + # inside the container and environment variables exported here are visible + # to the subsequent packaging steps in that container. + export CMAKE_BUILD_DIR="${BUILD_DIR}" + echo "[cibw] Exported CMAKE_BUILD_DIR=${CMAKE_BUILD_DIR}" + # Enable NUMA on Linux runners (for linux wheels), keep disabled elsewhere. + if [ "$(uname -s)" = "Linux" ]; + then + echo "[cibw] Linux build container detected — attempting to install NUMA dev libs" + # Try package managers commonly present in manylinux containers. Ignore failures + if command -v yum >/dev/null 2>&1; + then + yum -y install numactl-devel || true + elif command -v apt-get >/dev/null 2>&1; + then + apt-get update || true + apt-get install -y libnuma-dev || true + fi + NUMA_FLAG="-DWITH_NUMA=ON" + else + # On macOS install Homebrew libomp but do NOT export CPPFLAGS/LDFLAGS. + # Exporting CPPFLAGS was the cause of incorrect header ordering; instead + # pass a CMake prefix hint so FindOpenMP can locate libomp without + # prepending include paths to the global compiler invocation. + if command -v brew >/dev/null 2>&1; + then + echo "[cibw] Homebrew detected — ensuring libomp is available" + # Only install if not already present to avoid reinstall warnings + if ! brew list libomp >/dev/null 2>&1; then + brew install libomp + fi + + # Locate libomp installation + if [ -d "/opt/homebrew/opt/libomp" ]; then + HOMEBREW_LIBOMP_DIR="/opt/homebrew/opt/libomp" + elif [ -d "/usr/local/opt/libomp" ]; then + HOMEBREW_LIBOMP_DIR="/usr/local/opt/libomp" + else + HOMEBREW_LIBOMP_DIR="" + fi + + if [ -n "${HOMEBREW_LIBOMP_DIR}" ]; then + CMAKE_PREFIX_HINT="-DCMAKE_PREFIX_PATH=${HOMEBREW_LIBOMP_DIR}" + echo "[cibw] Using libomp from ${HOMEBREW_LIBOMP_DIR}" + else + CMAKE_PREFIX_HINT="" + fi + fi + NUMA_FLAG="-DWITH_NUMA=OFF" + # Set macOS deployment target for arm64 to match libomp requirement + export MACOSX_DEPLOYMENT_TARGET=15.0 + OSX_DEPLOY_FLAG="-DCMAKE_OSX_DEPLOYMENT_TARGET=${MACOSX_DEPLOYMENT_TARGET}" + fi + # Clean build directory to prevent CMake caching issues + rm -rf "${BUILD_DIR}" + # On macOS, add flag to downgrade template keyword warning from error to warning + if [ "$(uname -s)" = "Darwin" ]; + then + MACOS_FLAGS="-DCMAKE_CXX_FLAGS=-Wno-error=missing-template-arg-list-after-template-kw" + else + MACOS_FLAGS="" + fi + # For wheel builds, request portable flags (avoid -march=native) and disable + # interprocedural optimization (LTO) to improve portability of the produced wheels. + PORTABLE_FLAG="-DALP_PORTABLE_BUILD=ON" + LTO_FLAG="-DCMAKE_INTERPROCEDURAL_OPTIMIZATION=OFF" + # Only enable OMP and nonblocking backends on Linux runners where libomp + # and required build support are available. macOS wheels will build the + # stable reference backend only to avoid SDK/ABI compile issues. + if [ "$(uname -s)" = "Linux" ]; then + BACKEND_FLAGS="-DWITH_OMP_BACKEND=ON -DWITH_NONBLOCKING_BACKEND=ON" + BUILD_TARGETS="pyalp_ref pyalp_omp pyalp_nonblocking" + else + BACKEND_FLAGS="-DWITH_OMP_BACKEND=OFF -DWITH_NONBLOCKING_BACKEND=OFF" + BUILD_TARGETS="pyalp_ref" + fi + + cmake -S . -B "${BUILD_DIR}" -G Ninja -DCMAKE_BUILD_TYPE=Release -DENABLE_PYALP=ON -DCMAKE_POSITION_INDEPENDENT_CODE=ON -DCMAKE_FIND_FRAMEWORK=NEVER ${MACOS_FLAGS} ${NUMA_FLAG} ${CMAKE_PREFIX_HINT:-} ${OSX_DEPLOY_FLAG:-} ${PORTABLE_FLAG} ${LTO_FLAG} ${BACKEND_FLAGS} -DPython3_EXECUTABLE="${PYEXEC}" -DALP_VERSION="${ALP_VERSION}" -DALP_GIT_COMMIT_SHA="${ALP_GIT_COMMIT}" -DALP_GIT_BRANCH="${ALP_GIT_BRANCH}" -Dpyalp_VERSION="${PYALP_VERSION}" + cmake --build "${BUILD_DIR}" --target ${BUILD_TARGETS} --parallel + # Debug: show the generated metadata file (if present) to the CI logs + echo "[cibw] Checking for generated metadata file: ${CMAKE_BUILD_DIR}/pyalp_metadata.py" + if [ -f "${CMAKE_BUILD_DIR}/pyalp_metadata.py" ]; then + echo "[cibw] Found metadata file:"; ls -l "${CMAKE_BUILD_DIR}/pyalp_metadata.py" + echo "[cibw] First 100 lines of metadata:"; sed -n '1,100p' "${CMAKE_BUILD_DIR}/pyalp_metadata.py" || true + else + echo "[cibw] Metadata file not found at ${CMAKE_BUILD_DIR}/pyalp_metadata.py" + fi + run: | + # Build from repository root so the full CMake project is available in the container + python -m cibuildwheel --output-dir wheelhouse . + + - name: Upload wheels + uses: actions/upload-artifact@v4 + with: + name: alp-graphblas-wheels-${{ matrix.os }} + path: wheelhouse/*.whl + + publish: + needs: build-wheels + runs-on: ubuntu-latest + environment: + name: testpypi + url: https://test.pypi.org/p/alp-graphblas + permissions: + id-token: write + contents: write + steps: + - name: Checkout repository (for tests) + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Download all wheels + uses: actions/download-artifact@v4 + with: + path: dist + pattern: alp-graphblas-wheels-* + merge-multiple: true + - name: Publish to TestPyPI + uses: pypa/gh-action-pypi-publish@release/v1 + with: + repository-url: https://test.pypi.org/legacy/ + packages-dir: dist/ + verbose: true + + - name: Create GitHub Release and upload wheels + uses: softprops/action-gh-release@v1 + with: + tag_name: ${{ github.ref_name }} + name: ${{ github.ref_name }} + files: dist/*.whl + + - name: Skip in-publish verification + shell: bash + run: | + echo "Installation verification moved to 'verify-installed' job" + + verify-installed: + needs: publish + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + backend: [pyalp_ref, pyalp_omp, pyalp_nonblocking, _pyalp] + steps: + - name: Checkout repository (for tests) + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '3.11' + + - name: Verify installed backend + shell: bash + env: + BACKEND: ${{ matrix.backend }} + run: | + set -euo pipefail + # Determine package version from pyalp/pyproject.toml + PYALP_VERSION=$(grep -E '^version\s*=\s*"' pyalp/pyproject.toml | head -n1 | sed -E 's/^version\s*=\s*"([^\"]+)".*/\1/') + echo "Testing alp-graphblas version: ${PYALP_VERSION}" + + PY=$(which python3 || which python) + echo "Using python: ${PY}" + VENV_DIR="./.venv_test" + rm -rf "${VENV_DIR}" + ${PY} -m venv "${VENV_DIR}" + source "${VENV_DIR}/bin/activate" + python -m pip install --upgrade pip setuptools wheel numpy + + # Short sleep to allow TestPyPI to propagate the newly uploaded files + # before attempting the install. Keep logic minimal to reduce workflow + # complexity (the previous retry loop was removed per request). + echo "Sleeping 60s to allow TestPyPI propagation before install..." + sleep 60 + echo "Installing alp-graphblas==${PYALP_VERSION} from TestPyPI" + python -m pip install --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple alp-graphblas==${PYALP_VERSION} --no-deps -v + + # Inspect installed package using the script moved out of the workflow + echo "Inspecting installed package" + python .github/scripts/inspect_installed_pyalp.py + + # Run the smoke runner script for the backend for this matrix job + echo "Running backend smoke runner for ${BACKEND}" + python .github/scripts/run_backend_smoke_installed.py "${BACKEND}" + + publish-to-pypi: + # Disabled by default to avoid triggering PyPI uploads from this workflow. + # PyPI publisher was configured to accept uploads from `promote-to-pypi.yml`. + # Keep the job present for maintainers, but skip execution unless intentionally enabled. + if: false + needs: verify-installed + runs-on: ubuntu-latest + permissions: + id-token: write + contents: read + # This job publishes the already-built artifacts to the real PyPI index. + # It requires a PyPI API token stored in the repository secrets as PYPI_API_TOKEN. + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Download built wheels + uses: actions/download-artifact@v4 + with: + path: dist + pattern: alp-graphblas-wheels-* + merge-multiple: true + + - name: Publish to PyPI (alp-graphblas) + uses: pypa/gh-action-pypi-publish@release/v1 + with: + packages-dir: dist/ + env: + TWINE_USERNAME: __token__ + TWINE_PASSWORD: ${{ secrets.PYPI_API_TOKEN }} + + verify-installed-inprocess: + needs: publish + runs-on: ubuntu-latest + name: Verify installed wheel (in-process smoke) + steps: + - name: Checkout repository (for tests) + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '3.11' + + - name: Create venv and install prerequisites + shell: bash + run: | + set -euo pipefail + PY=$(which python3 || which python) + VENV_DIR="./.venv_test_inprocess" + rm -rf "${VENV_DIR}" + ${PY} -m venv "${VENV_DIR}" + source "${VENV_DIR}/bin/activate" + python -m pip install --upgrade pip setuptools wheel numpy + + # Retry pip install from TestPyPI with exponential backoff (bounded attempts) + PYALP_VERSION=$(grep -E '^version\s*=\s*"' pyalp/pyproject.toml | head -n1 | sed -E 's/^version\s*=\s*"([^"]+)".*/\1/') + echo "Installing alp-graphblas==${PYALP_VERSION} from TestPyPI (with retries)" + + MAX_ATTEMPTS=6 + SLEEP_BASE=10 + SUCCESS=0 + + for attempt in $(seq 1 ${MAX_ATTEMPTS}); do + echo "--- attempt ${attempt} of ${MAX_ATTEMPTS} ---" + # verbose pip output helps debugging in CI logs + python -m pip install --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple alp-graphblas==${PYALP_VERSION} -v && SUCCESS=1 && break + echo "pip install failed on attempt ${attempt}" + if [ "${attempt}" -lt "${MAX_ATTEMPTS}" ]; then + SLEEP_SECONDS=$((SLEEP_BASE * attempt)) + echo "Sleeping ${SLEEP_SECONDS}s before retry..." + sleep "${SLEEP_SECONDS}" + fi + done + + if [ "${SUCCESS}" -ne 1 ]; then + echo "ERROR: failed to install alp-graphblas from TestPyPI after ${MAX_ATTEMPTS} attempts" >&2 + exit 1 + fi + + # Print a compact JSON summary of installed backends for easy scanning in CI logs + python -c "import json,importlib,sys; print(json.dumps({'backends': importlib.import_module('pyalp').list_backends()}))" + + - name: Run in-process backend import smoke test + shell: bash + run: | + set -euo pipefail + source ./.venv_test_inprocess/bin/activate + echo "Running pyalp/tests/test_bckds_inprocess.py" + python pyalp/tests/test_bckds_inprocess.py diff --git a/.github/workflows/pyalp-ci.yml b/.github/workflows/pyalp-ci.yml new file mode 100644 index 000000000..829275cfb --- /dev/null +++ b/.github/workflows/pyalp-ci.yml @@ -0,0 +1,60 @@ +name: pyalp CI (local-build smoke test) + +on: + push: + tags: [ 'pyalp*' ] + workflow_dispatch: {} + +jobs: + build-and-test-local: + name: Build pyalp with LOCAL profile and run smoke tests + runs-on: ubuntu-latest + steps: + - name: Checkout (with submodules) + uses: actions/checkout@v4 + with: + submodules: recursive + fetch-depth: 0 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '3.11' + + - name: Install system build deps + run: | + sudo apt-get update + sudo apt-get install -y build-essential cmake ninja-build pkg-config python3-venv python3-dev python3-pip libnuma-dev + + - name: Configure top-level CMake with LOCAL profile + run: | + set -euo pipefail + # Configure from repository root using the LOCAL profile to enable native optimizations + cmake -S . -B build/ci_local -G Ninja \ + -DALP_BUILD_PROFILE=LOCAL \ + -DENABLE_PYALP=ON \ + -DCMAKE_POSITION_INDEPENDENT_CODE=ON \ + -DPython3_EXECUTABLE=$(which python3) + + - name: Build pyalp backends + run: | + set -euo pipefail + cmake --build build/ci_local --target pyalp_ref pyalp_omp pyalp_nonblocking --parallel + + - name: Package pyalp wheel from CMake build + run: | + set -euo pipefail + mkdir -p dist_wheel + export CMAKE_BUILD_DIR=$(pwd)/build/ci_local + ( cd pyalp && python -m pip wheel . -w ../dist_wheel ) + + - name: Smoke test wheel in venv + run: | + set -euo pipefail + python3 -V + python3 -m venv venv + . venv/bin/activate + pip install --upgrade pip wheel + pip install dist_wheel/*.whl + # run the smoke test script which should import pyalp and backends e.g. pyalp.pyalp_ref + python tools/smoke_test_pyalp.py diff --git a/.gitignore b/.gitignore index bbb0d673e..95a567357 100644 --- a/.gitignore +++ b/.gitignore @@ -8,4 +8,6 @@ paths.mk [Bb]uild*/ [Oo]bj*/ [Ii]nstall*/ -cmake-build-*/ \ No newline at end of file +cmake-build-*/ +.venv/ +pyalp/src/pyalp.egg-info/ diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 000000000..f0b84739c --- /dev/null +++ b/.gitmodules @@ -0,0 +1,3 @@ +[submodule "pyalp/extern/pybind11"] + path = pyalp/extern/pybind11 + url = https://github.com/pybind/pybind11 diff --git a/CMakeLists.txt b/CMakeLists.txt index 02c49eb37..c388e8956 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -31,12 +31,43 @@ set( MINORVERSION 7 ) set( BUGVERSION 0 ) set( VERSION "${MAJORVERSION}.${MINORVERSION}.${BUGVERSION}" ) +# Export a canonical ALP version string for subprojects and packaging. This +# defaults to the VERSION defined above but can be overridden by -DALP_VERSION +# on the cmake command line (CI may pass this explicitly). +if(NOT DEFINED ALP_VERSION) + set(ALP_VERSION "${VERSION}" CACHE STRING "ALP project version (for packaging)") +else() + # Keep user-provided ALP_VERSION in cache so subprojects see it + set(ALP_VERSION "${ALP_VERSION}" CACHE STRING "ALP project version (for packaging)" FORCE) +endif() + # set the project name project( GraphBLAS VERSION ${VERSION} DESCRIPTION "The ultimate engine for sparse computation" LANGUAGES CXX C ) + +# Find Git and get repository information for metadata +find_package(Git QUIET) +if(GIT_FOUND) + execute_process( + COMMAND ${GIT_EXECUTABLE} rev-parse --short HEAD + WORKING_DIRECTORY ${CMAKE_SOURCE_DIR} + OUTPUT_VARIABLE ALP_GIT_COMMIT + OUTPUT_STRIP_TRAILING_WHITESPACE + ) + execute_process( + COMMAND ${GIT_EXECUTABLE} rev-parse --abbrev-ref HEAD + WORKING_DIRECTORY ${CMAKE_SOURCE_DIR} + OUTPUT_VARIABLE ALP_GIT_BRANCH + OUTPUT_STRIP_TRAILING_WHITESPACE + ) +else() + set(ALP_GIT_COMMIT "unknown") + set(ALP_GIT_BRANCH "unknown") +endif() + set( CMAKE_CXX_STANDARD 11 ) set( CMAKE_CXX_STANDARD_REQUIRED ON ) @@ -243,7 +274,13 @@ endif() # always look for math and rt libraries find_package( LibM REQUIRED ) -find_library( LIBRT rt REQUIRED ) +# librt exists on Linux but not on macOS; only require it on non-Apple UNIX +if(UNIX AND NOT APPLE) + find_library( LIBRT rt REQUIRED ) +else() + # On macOS librt is not provided/needed; define an empty variable for compatibility + set(LIBRT "" CACHE FILEPATH "rt library (not present on Apple platforms)") +endif() # pthreads is needed for hpparser find_package( Threads REQUIRED ) @@ -321,11 +358,13 @@ if( WITH_OMP_BACKEND OR WITH_HYBRID_BACKEND ) set( WITH_OMP_BACKEND_HEADERS ON ) endif() + add_subdirectory( include ) ### BACKEND IMPLEMENTATIONS add_subdirectory( src ) + ### TESTS and EXAMPLES # specify test categories and the directory where ALL tests are stored @@ -335,6 +374,101 @@ include( AddGRBTests ) add_subdirectory( tests ) +if( ENABLE_PYALP ) + # Only add the pyalp subdirectory if it contains a CMakeLists.txt in the source tree. + if(EXISTS "${PROJECT_SOURCE_DIR}/pyalp/CMakeLists.txt") + add_subdirectory(pyalp) + else() + message(STATUS "pyalp subdirectory not present in source tree; skipping add_subdirectory(pyalp)") + endif() +endif() + +# Provide a top-level convenience packaging target for pyalp so callers can run +# cmake --build --target pyalp --parallel +# even if the pyalp CMakeLists placed a packaging target in a subdirectory or +# the generator didn't expose that target at the top-level. This mirrors the +# packaging flow implemented under pyalp/src/CMakeLists.txt and is only added +# when pyalp is enabled and present in source. +if( ENABLE_PYALP AND EXISTS "${PROJECT_SOURCE_DIR}/pyalp/CMakeLists.txt" ) + # Attempt to find a Python interpreter (non-fatal if already found elsewhere) + find_package(PythonInterp QUIET) + + # Build the list of backend targets that should be packaged. Keep this in + # sync with pyalp/src/CMakeLists.txt. + set(pyalp_package_targets "") + if(WITH_REFERENCE_BACKEND) + list(APPEND pyalp_package_targets pyalp_ref) + endif() + if(WITH_OMP_BACKEND) + list(APPEND pyalp_package_targets pyalp_omp) + endif() + if(WITH_NONBLOCKING_BACKEND) + list(APPEND pyalp_package_targets pyalp_nonblocking) + endif() + string(JOIN " " pyalp_package_targets_str ${pyalp_package_targets}) + + # Only add the top-level pyalp target if one is not already defined. + if(NOT TARGET pyalp) + add_custom_target(pyalp + COMMENT "Build enabled pyalp backends and package wheel(s) into ${CMAKE_BINARY_DIR}/dist" + ) + + add_custom_command(TARGET pyalp + # Build each backend target individually (cmake --build --target accepts one target at a time) + VERBATIM + ) + # Add per-backend build commands so each target is invoked separately. + foreach(_pyalp_backend IN LISTS pyalp_package_targets) + add_custom_command(TARGET pyalp + COMMAND ${CMAKE_COMMAND} --build ${CMAKE_BINARY_DIR} --target ${_pyalp_backend} --parallel + VERBATIM + ) + endforeach() + add_custom_command(TARGET pyalp + COMMAND ${CMAKE_COMMAND} -E make_directory ${CMAKE_BINARY_DIR}/dist + COMMAND ${CMAKE_COMMAND} -E env CMAKE_BUILD_DIR=${CMAKE_BINARY_DIR} ${PYTHON_EXECUTABLE} -m pip wheel ${CMAKE_SOURCE_DIR}/pyalp -w ${CMAKE_BINARY_DIR}/dist + COMMAND ${CMAKE_COMMAND} -E echo "" + COMMAND ${CMAKE_COMMAND} -E echo "============================================================" + COMMAND ${CMAKE_COMMAND} -E echo "Packaged wheel(s) into: ${CMAKE_BINARY_DIR}/dist" + COMMAND ${CMAKE_COMMAND} -E echo "To install the wheel(s):" + COMMAND ${CMAKE_COMMAND} -E echo " python -m pip install ${CMAKE_BINARY_DIR}/dist/.whl" + COMMAND ${CMAKE_COMMAND} -E echo "or install all wheels in dist:" + COMMAND ${CMAKE_COMMAND} -E echo " python -m pip install ${CMAKE_BINARY_DIR}/dist/*.whl" + COMMAND ${CMAKE_COMMAND} -E echo "After installation, import the package in Python, e.g.:" + COMMAND ${CMAKE_COMMAND} -E echo " python -c \"import alp_graphblas; print(alp_graphblas.__version__)\"" + VERBATIM + ) + endif() +endif() +## Also expose a clearly-named packaging target that avoids name collisions +if( ENABLE_PYALP AND EXISTS "${PROJECT_SOURCE_DIR}/pyalp/CMakeLists.txt" ) + if(NOT TARGET pyalp-package) + add_custom_target(pyalp-package + COMMENT "(convenience) Build enabled pyalp backends and package wheel(s) into ${CMAKE_BINARY_DIR}/dist" + ) + + foreach(_pyalp_backend IN LISTS pyalp_package_targets) + add_custom_command(TARGET pyalp-package + COMMAND ${CMAKE_COMMAND} --build ${CMAKE_BINARY_DIR} --target ${_pyalp_backend} --parallel + VERBATIM + ) + endforeach() + add_custom_command(TARGET pyalp-package + COMMAND ${CMAKE_COMMAND} -E make_directory ${CMAKE_BINARY_DIR}/dist + COMMAND ${CMAKE_COMMAND} -E env CMAKE_BUILD_DIR=${CMAKE_BINARY_DIR} ${PYTHON_EXECUTABLE} -m pip wheel ${CMAKE_SOURCE_DIR}/pyalp -w ${CMAKE_BINARY_DIR}/dist + COMMAND ${CMAKE_COMMAND} -E echo "" + COMMAND ${CMAKE_COMMAND} -E echo "============================================================" + COMMAND ${CMAKE_COMMAND} -E echo "Packaged wheel(s) into: ${CMAKE_BINARY_DIR}/dist" + COMMAND ${CMAKE_COMMAND} -E echo "To install the wheel(s):" + COMMAND ${CMAKE_COMMAND} -E echo " python -m pip install ${CMAKE_BINARY_DIR}/dist/.whl" + COMMAND ${CMAKE_COMMAND} -E echo "or install all wheels in dist:" + COMMAND ${CMAKE_COMMAND} -E echo " python -m pip install ${CMAKE_BINARY_DIR}/dist/*.whl" + COMMAND ${CMAKE_COMMAND} -E echo "After installation, import the package in Python, e.g.:" + COMMAND ${CMAKE_COMMAND} -E echo " python -c \"import alp_graphblas; print(alp_graphblas.__version__)\"" + VERBATIM + ) + endif() +endif() add_subdirectory( examples ) @@ -377,3 +511,7 @@ add_dependencies( docs userdocs devdocs ) message( "Tests enabled for backends: ${AVAILABLE_TEST_BACKENDS}" ) message( "Enabled backend targets: ${AVAILABLE_BACKENDS}\n" ) + + + + diff --git a/cmake/CompileFlags.cmake b/cmake/CompileFlags.cmake index 4c6c1d862..3eb34adc0 100644 --- a/cmake/CompileFlags.cmake +++ b/cmake/CompileFlags.cmake @@ -92,7 +92,44 @@ target_link_libraries( common_flags INTERFACE ## defaults performance options for all targets (backends and tests) set( COMMON_PERF_DEFS_Release "NDEBUG" ) -set( COMMON_PERF_OPTS_Release "-O3" "-march=native" "-mtune=native" "-funroll-loops" ) + +# Option to produce portable builds (for wheels): avoid per-host microarch +# flags like -march=native/-mtune=native and aggressive unrolling. When +# building wheels in CI set -DALP_PORTABLE_BUILD=ON to get portable artifacts. +option( ALP_PORTABLE_BUILD "Build portable binaries (disable host-specific optimizations)" OFF ) + +# Build profile: controls portability and default LTO/optimization choices. +# Use -DALP_BUILD_PROFILE=LOCAL for developer/local builds (enables native +# host optimizations, enables LTO by default). Use -DALP_BUILD_PROFILE=DEPLOYMENT +# for wheel/deployment builds (portable by default). +set(ALP_BUILD_PROFILE "DEPLOYMENT" CACHE STRING "Build profile: LOCAL or DEPLOYMENT. LOCAL enables native optimizations; DEPLOYMENT favors portability for wheels.") +string(TOUPPER "${ALP_BUILD_PROFILE}" ALP_BUILD_PROFILE_UP) + +if(ALP_BUILD_PROFILE_UP STREQUAL "LOCAL") + # Local builds should prefer host-specific optimizations + set(ALP_PORTABLE_BUILD OFF CACHE BOOL "Build portable binaries (disable host-specific optimizations)" FORCE) + # Enable LTO by default for local performance builds; user may override. + set(CMAKE_INTERPROCEDURAL_OPTIMIZATION ON CACHE BOOL "Enable LTO (interprocedural optimization)" FORCE) +else() + # Deployment builds default to portable flags for maximum wheel compatibility + set(ALP_PORTABLE_BUILD ON CACHE BOOL "Build portable binaries (disable host-specific optimizations)" FORCE) + # Disable LTO for portable deployment builds; user may override explicitly + set(CMAKE_INTERPROCEDURAL_OPTIMIZATION OFF CACHE BOOL "Enable LTO (interprocedural optimization)" FORCE) +endif() + +# Avoid GCC/GNU-specific microarchitecture flags on Apple/Clang toolchains +if(APPLE) + # On macOS with AppleClang, -march/-mtune and aggressive unrolling can + # cause header search/order issues and unsupported-flag errors. Keep -O3 only. + set( COMMON_PERF_OPTS_Release "-O3" ) +else() + if( ALP_PORTABLE_BUILD ) + # Portable: avoid host-specific tuning + set( COMMON_PERF_OPTS_Release "-O3" ) + else() + set( COMMON_PERF_OPTS_Release "-O3" "-march=native" "-mtune=native" "-funroll-loops" ) + endif() +endif() set( COMMON_PERF_DEFS_Debug "" ) set( COMMON_PERF_OPTS_Debug "-O0" ) set( COMMON_PERF_DEFS_Coverage "" ) diff --git a/pyalp/CMakeLists.txt b/pyalp/CMakeLists.txt new file mode 100644 index 000000000..febd4f0ab --- /dev/null +++ b/pyalp/CMakeLists.txt @@ -0,0 +1 @@ +add_subdirectory(src) diff --git a/pyalp/DEVELOPER_GUIDE.md b/pyalp/DEVELOPER_GUIDE.md new file mode 100644 index 000000000..ef0ba4019 --- /dev/null +++ b/pyalp/DEVELOPER_GUIDE.md @@ -0,0 +1,250 @@ +Developer guide — pyalp / alp-graphblas +===================================== + +Author: +Denis Jelovina + +Support: +For support or to report issues, please open an issue on the project's GitHub issue tracker. For direct contact, email denis.jelovina@gmail.com + +This document explains how the Python packaging for the pyalp bindings works, how CI builds wheels, and what to change when you add a new compiled backend (pybind11 module) or Python dependency. + +C++ binding logic and Python usage (summary) +------------------------------------------- +The pyalp package exposes native C++ backends built with pybind11. Each backend is compiled as a separate Python extension module (shared object) with a canonical name like `pyalp_ref`, `pyalp_omp`, or `pyalp_nonblocking`. The packaging layout installs those compiled modules into the `pyalp` package so they are importable as `pyalp.pyalp_ref`, `pyalp.pyalp_omp`, etc. + +How Python code uses the compiled backends +- Direct import: after installation you can import a backend module directly, for example: + + import pyalp.pyalp_ref + M = pyalp.pyalp_ref.Matrix(10, 10) + +- Helper API: the package also provides helper APIs that discover and return backends at runtime, e.g. `pyalp_importname.get_backend('pyalp_ref')` which returns the compiled module object. This is useful for selecting backends dynamically. + +How the Python object maps to C++ +- Each compiled extension is a pybind11 module which registers C++ types (Matrix, Vector, operators) and functions. The pybind11 binding code (in the `pyalp` C++ sources) defines the Python-visible class names and methods, so `pyalp.pyalp_ref.Matrix` is a python wrapper around the C++ Matrix implementation in the native backend. +- At build time, CMake compiles the C++ sources into a platform-specific shared object; the packaging step copies that shared object into the `pyalp` package so the interpreter can import it as a normal module. + +Current functional limitations and caveats +- Cross-backend imports: importing different backend modules in the same Python process can cause pybind11 type-registration collisions (duplicate registrations of the same C++ types across modules). The bindings now use `py::module_local()` for many wrapper types to reduce collisions, but issues can still occur. If you need repeatable cross-backend usage, either run backends in separate processes or design a shared-registration approach (single module that dispatches to backends or explicit shared-type registration across modules). +- Cross-backend bindings: supporting full cross-backend interoperability requires either + - a single compiled extension exporting a stable API and selecting backends internally, or + - explicit cross-registration code that ensures each type is only registered once (or registered with module-local variants and safe conversion functions). Both approaches require C++ changes and careful testing. +- Wheel portability and optimization trade-offs: + - Wheels are built per-ABI and per-OS (CI uses per-ABI build dirs). The project disables aggressive target-specific flags (no `-march=native`, LTO off) to improve portability, but wheels are still platform/ABI-specific (glibc versus musl, macOS SDK versions). Expect different wheel filenames per ABI/OS and possible limitations on older OS versions. + - CI currently skips `*-musllinux*` and does not publish Windows wheels by default (see CI matrix). If you need musl or Windows support, update the CI configuration and the before-build steps to provide appropriate toolchains and packaging options. +- Size and dependency implications: bundling multiple backends increases wheel size. + +If you plan to change the bindings or support cross-backend imports, read the `pybind11` docs on module-local registrations and consider writing small integration tests that import multiple backends in isolated subprocesses. + + +Local builds (tested with `pyalp-ci.yml`) +----------------------------------------- +If you prefer fast iteration or want to debug native build issues locally, build and test wheels on your machine. The repository provides `pyalp-ci.yml` to exercise the build steps in CI (useful to validate local changes on pull requests), but local builds let you iterate without pushing tags or waiting for remote runners. + +When to build locally +- Fast iteration when changing bindings, packaging logic, or test code. +- Debugging native-build problems where you need immediate access to compiler and linker output. +- Packaging-only checks: point `pyalp/setup.py` at an existing `.so` (via `PREBUILT_PYALP_SO`) to validate wheel contents without rebuilding native code. + +How to build wheels locally (quick recipe) +- Prepare a per-ABI build directory and run CMake (example for Python 3.11): +- Build a wheel from the `pyalp` package and point it at the per-ABI build dir so the generated metadata and prebuilt `.so` get picked up: + +```bash + cmake -DENABLE_PYALP=ON -DCMAKE_BUILD_TYPE=Release $ALP_REPO_PATH + make pyalp_ref + # append the new path to PYTHONPATH, ie. export PYTHONPATH=$PYTHONPATH:$(pwd)/python +``` + +Advantage of local builds +- Performance, active optimisations for the build architecture +- Speed: no remote queue or tag/push cycle. +- Control: change CMake flags and environment variables and rebuild immediately. +- Debuggability: full compiler/linker logs and the ability to attach tools. + + +Full publish pipeline (publish-to-testpypi.yml + promote-to-pypi.yml) +----------------------------------------------------------------- +The full repository publish flow is implemented in two primary workflows: + +- `publish-to-testpypi.yml` — builds wheels for multiple ABIs/OSes using `cibuildwheel`, publishes them to TestPyPI, uploads wheel artifacts to a GitHub Release, and runs verification steps that install the TestPyPI package into a clean virtualenv for smoke tests. This workflow is triggered by pushing a tag matching `pyalp.v*`. + +- `promote-to-pypi.yml` — a gated workflow that downloads wheel assets from a GitHub Release and uploads them to PyPI. This job requires the `production` environment and uses the `PYPI_API_TOKEN` secret; the environment gating ensures human approval before the token is available to the workflow. + +Key differences vs local builds +- Scope: the publish pipelines run multiple ABIs and platforms, produce canonical release artifacts, and publish them to TestPyPI/PyPI. +- Reproducibility: CI uses standard manylinux containers and controlled macOS runners to produce wheels intended for distribution; this reduces host-specific variation. +- Approval and secrets: promote-to-pypi requires an environment approval to access the PyPI token, preventing accidental publishes. + +When to use the publish pipeline +- After local validation and CI runs (e.g., `pyalp-ci.yml` for PRs), create an annotated tag `pyalp.vX.Y.Z` and push it to trigger `publish-to-testpypi.yml`. +- Once TestPyPI artifacts are validated, run `promote-to-pypi.yml` (workflow dispatch) to publish to PyPI; this step requires environment approval and the presence of the `PYPI_API_TOKEN` secret. + +Operational note: TestPyPI propagation and verification +- The verification step that installs wheels from TestPyPI can occasionally fail due to propagation delays between upload and index availability. If the TestPyPI install step fails transiently, re-run the workflow or re-trigger the release; the promote job should only be run once test artifacts are available and verified. + + + + +High-level contract +- Inputs: CMake-based native backends built by the top-level CMake tree, a generated Python metadata file produced by CMake, and the Python package source in `pyalp/src`. +- Output: Platform-specific wheels that contain the compiled shared object(s) and a generated `_metadata.py` file. The published PyPI project name is `alp-graphblas`, but the import name inside Python remains `pyalp`. +- Success criteria: pip install alp-graphblas (from TestPyPI or PyPI) yields a package exposing `pyalp.get_build_metadata()` and one or more backend modules accessible via `pyalp.get_backend()`. + +Where things live (important files) +- `pyalp/pyproject.toml` — project metadata used by CI and for the package release (project name, version, runtime dependencies such as numpy). +- `pyalp/setup.py` — custom setuptools glue. It either copies prebuilt shared objects from the CMake build tree into the wheel (preferred for CI-built wheels) or builds from source with pybind11 when no prebuilt artifact is present. +- `pyalp/src/pyalp/_metadata.py.in` — CMake template used to generate `pyalp_metadata.py` (copied into wheels as `_metadata.py`). If you change the runtime metadata shape, update this file and the code that reads it. +- Top-level CMake files (`CMakeLists.txt` and `src/…`) — define native targets such as `pyalp_ref`, `pyalp_omp`, `pyalp_nonblocking`. CI runs a top-level CMake configure/build per-Python-ABI and produces the native `.so` files and the generated metadata file. +- `.github/workflows/publish-to-testpypi.yml` — builds wheels with cibuildwheel and publishes to TestPyPI (trigger: push tag `pyalp.v*`). This workflow also creates a GitHub Release with wheel assets. +- `.github/workflows/promote-to-pypi.yml` — promotes a GitHub Release's wheel assets to PyPI. The job requires `environment: production` (see repository settings) and uses the secret `PYPI_API_TOKEN`. +- `.github/scripts/` — helper scripts used by CI (e.g., verification and TestPyPI wait scripts). + +How the CI build produces a wheel (brief) +- cibuildwheel is used to produce wheels for multiple Python ABIs and OSes. +- Before building each wheel, CI runs a `CIBW_BEFORE_BUILD` script which: + - Installs CMake + Ninja inside the build container. + - Derives Git and package version metadata and sets environment variables. + - Configures a per-ABI CMake build directory (e.g. `build/cp311`) and runs CMake to produce the compiled backends and a generated `pyalp_metadata.py` file inside that build dir. + - Exports `CMAKE_BUILD_DIR` pointing to the per-ABI build directory so `pyalp/setup.py` can locate the generated outputs. +- The packaging step runs `pyalp/setup.py` (setup.py will copy discovered prebuilt `.so` files and the generated metadata file into the package build directory). The wheel built by cibuildwheel therefore contains the prebuilt, ABI-specific `.so` and `_metadata.py`. + +How `pyalp/setup.py` cooperates with CMake +- By default `setup.py` searches the repo `../build/**` tree for prebuilt shared objects named like the native targets (`pyalp_ref`, `pyalp_omp`, `pyalp_nonblocking`). If it finds them it adds Extension entries with empty sources and uses a custom `build_ext` to copy the prebuilt library into the wheel. +- `setup.py` looks for the generated metadata file in the directory pointed to by the `CMAKE_BUILD_DIR` environment variable (set by the CI before_build script). If present it copies `pyalp_metadata.py` -> `_metadata.py` next to the extension in the wheel. +- If no prebuilt modules are detected and `pybind11` is available, `setup.py` will fall back to building from sources with pybind11. +- Environment variables you can use locally: + - `CMAKE_BUILD_DIR` — path to the per-ABI CMake build dir that contains `pyalp_metadata.py` and the built `.so` files. + - `PREBUILT_PYALP_SO` or `PYALP_PREBUILT_SO` — point to a single prebuilt shared object to include in the wheel (helpful for local testing). + +Adding a new compiled backend (step-by-step) +1) Add a CMake target + - Add a target to your CMake configuration (top-level CMake or `pyalp` subdirectory). Name it with the prefix used by `setup.py` (for example `pyalp_mybackend` if you want the backend import name to be `pyalp_mybackend`). + - Ensure the target produces a shared library file named so that it will be discoverable by the existing glob in `pyalp/setup.py` (the packaging code looks for `build/**/*.(so|pyd)`). + - If the backend needs additional compile flags or third-party deps, add those to the CMake target and to the cibuildwheel before-build step where platform-specific dependencies are installed. + +2) Expose the pybind11 module name correctly + - The module name that Python imports must match the filename stem: for a target `pyalp_mybackend` the shared object should become something like `pyalp_mybackend.cpython-311-x86_64-linux-gnu.so` and will be installed into the `pyalp` package as `pyalp/mybackend` importable as `pyalp.pyalp_mybackend` or accessed by the helper APIs. + - `setup.py` maps module names to the extension name `pyalp.`; if you introduce a module with a different naming scheme, update `pyalp/setup.py`'s discovery or add an explicit mapping. + +3) Update CI build targets + - The cibuildwheel `CIBW_BEFORE_BUILD` script exports a `BUILD_TARGETS` variable used by CMake to restrict which targets to build. Edit `.github/workflows/publish-to-testpypi.yml` under `CIBW_BEFORE_BUILD` to include your new target name in `BUILD_TARGETS`. + - If your backend requires platform-specific dependency installation (e.g., libnuma, libomp) ensure those package installs are available in the before-build block. + +4) Update packaging helpers if needed + - If your module uses a new stem that the setup script won't detect, add the module name to the `supported` list in `pyalp/setup.py` or rely on the glob search. + - If you want to bundle multiple backends under a different naming convention, update `find_all_prebuilt()` discovery logic and the code that constructs `Extension(f"pyalp.{modname}")` entries. + +5) Add/adjust tests + - Add small smoke tests (ideally under `tests/python/` or `tests/smoke/`) that run the new backend. Prefer running each backend in its own process where feasible to avoid pybind11 registration collisions. + +6) Build and test locally (quick recipe) + - Ensure system deps installed: cmake, ninja, a C++ toolchain and any library dependencies. + - Create a per-ABI build dir and configure CMake as CI does. Example (for Python 3.11): + ```bash + mkdir -p build/cp311 + cmake -S . -B build/cp311 -G Ninja -DCMAKE_BUILD_TYPE=Release -DENABLE_PYALP=ON -DCMAKE_POSITION_INDEPENDENT_CODE=ON -DPython3_EXECUTABLE=$(which python3) + cmake --build build/cp311 --target pyalp_ref pyalp_mybackend --parallel + ``` + + - Build a wheel locally from the `pyalp` package. From the repository root: + ```bash + export CMAKE_BUILD_DIR="$(pwd)/build/cp311" + cd pyalp + # Build a wheel using the package directory's setup.py + python -m pip wheel . --no-deps -w ../wheelhouse + + # Install and test the wheel in a fresh venv + python -m venv /tmp/venv_test + source /tmp/venv_test/bin/activate + python -m pip install --upgrade pip + python -m pip install ../wheelhouse/alp-graphblas-*.whl + ``` + + - Note: `--no-deps` is optional when building locally; published wheels should contain runtime dependency metadata so that pip will pull `numpy` automatically. + +Releases and publishing (how CI is wired) +- Creating a TestPyPI release (normal path): + 1. Bump the version in `pyalp/pyproject.toml` (recommended) and commit. + 2. Create a git tag of the form `pyalp.vX.Y.Z` and push the tag. The `publish-to-testpypi.yml` workflow is triggered on push tags matching `pyalp.v*`. + 3. The workflow builds wheels (cibuildwheel), uploads wheel artifacts as GitHub workflow artifacts, publishes to TestPyPI, and creates a GitHub Release with the wheel assets. + + - Promoting to PyPI (two-step gated publish): + - The `publish-to-testpypi.yml` workflow automatically builds and deploys wheels to TestPyPI and then attempts to install and verify those wheels in a fresh virtual environment. Occasionally this verification can fail due to propagation delays between upload and availability; if that happens, re-run the workflow (or re-trigger the release) until the verification completes successfully. + - The `promote-to-pypi.yml` workflow is triggered manually (`workflow_dispatch`) and it is enabld only with the `pyalp.v*` tag. It downloads the assets attached to the GitHub Release and uploads them to PyPI using the secret `PYPI_API_TOKEN`. + - The promote job is configured to use the repository `production` environment. Access to the `PYPI_API_TOKEN` secret in that environment requires an approval step by repository administrators (see Settings → Environments → production). + +Checklist before releasing +- Bump `pyalp/pyproject.toml` version. +- Ensure `pyalp/pyproject.toml` includes runtime dependencies (e.g., `numpy>=1.22`) so pip installs them automatically. +- Ensure `CIBW_BEFORE_BUILD` in `.github/workflows/publish-to-testpypi.yml` builds your new backend (`BUILD_TARGETS` updated). + +---------------------- +Local developer workflow (CMake-generated target) +------------------------------------------------ + +The project now exposes a CMake-generated `pyalp` target that builds all +enabled pyalp backends and packages wheel(s) using the same packaging logic +that CI uses. This is the recommended local path and replaces the previous +helper script. + +Usage: + +```bash +# Configure from repo root (LOCAL profile enables host-optimizations) +cmake -S . -B build/host -DALP_BUILD_PROFILE=LOCAL -DENABLE_PYALP=ON -G Ninja + +# Build and package via the CMake target (this will place wheels in build/host/dist) +cmake --build build/host --target pyalp --parallel +``` + +After the target completes you will see a message pointing to the wheel(s). +You can either add the generated python directory to `PYTHONPATH` for quick +iteration: + +```bash +export PYTHONPATH="$PYTHONPATH:$(pwd)/build/host/python" +``` + +Or install the wheel into a venv: + +```bash +python -m venv /tmp/pyalp-venv +source /tmp/pyalp-venv/bin/activate +pip install build/host/dist/*.whl +``` + +If you need to reproduce CI-style portable wheels, configure with the +`DEPLOYMENT` profile instead: + +```bash +cmake -S . -B build/cp311 -DALP_BUILD_PROFILE=DEPLOYMENT -DENABLE_PYALP=ON -G Ninja +cmake --build build/cp311 --target pyalp --parallel +``` + +Notes: +- Ensure system dependencies like `libnuma-dev` and `libomp` are installed when building backends that require them. +- The packaging step relies on `CMAKE_BUILD_DIR` to locate generated metadata and prebuilt `.so` files; the CMake target sets this environment appropriately when invoking `pip wheel`. + +Troubleshooting / common pitfalls +- Missing metadata in wheels: Make sure CMake writes the generated `pyalp_metadata.py` into the per-ABI build dir (CI sets `CMAKE_BUILD_DIR` and `setup.py` copies `pyalp_metadata.py` -> `_metadata.py`). If your metadata template changed, update `pyalp/src/pyalp/_metadata.py.in`. +- Prebuilt `.so` not found: `pyalp/setup.py` discovers prebuilt shared objects under `build/**`. Ensure you used the same target name and that the produced filename contains the Python ABI tag (or set `PREBUILT_PYALP_SO` to the path). +- ABI contamination across wheels: CI uses per-ABI build directories (e.g. `build/cp311`) to avoid cross-ABI contamination. When testing locally, clean build dirs between ABI runs. +- pybind11 registration collisions: If you see type-registration errors when importing multiple different backends in the same process, prefer running backends in separate processes or ensure pybind11 wrappers use `py::module_local()` for types that may be defined in multiple modules. + +Security notes +- The promotion workflow uses a `PYPI_API_TOKEN` stored as a secret (likely in the repository environment `production`). If you did not create this token yourself, check: + - Repository Settings → Secrets and variables → Actions + - Environments → production → Secrets + - Organization-level secrets (if applicable) +- Rotate/revoke tokens if you discover an unexpected token. + +Appendix — quick pointers to edit points +- Add CMake target: top-level CMake / `pyalp/src` CMakeLists. +- Ensure discovery in `pyalp/setup.py`: supported names in `find_all_prebuilt()` and the glob-based discovery. +- Include generated metadata: `pyalp/src/pyalp/_metadata.py.in` (CMake variables are substituted into this template). +- CI build targets: `.github/workflows/publish-to-testpypi.yml` (search for `BUILD_TARGETS` and `BACKEND_FLAGS` in `CIBW_BEFORE_BUILD`). +- Promote workflow: `.github/workflows/promote-to-pypi.yml` (uses `PYPI_API_TOKEN` and `environment: production`). + + diff --git a/pyalp/PINNED_PYBIND11 b/pyalp/PINNED_PYBIND11 new file mode 100644 index 000000000..1cb281060 --- /dev/null +++ b/pyalp/PINNED_PYBIND11 @@ -0,0 +1 @@ +8d503e30be400ad431d3d140707803e87e75fad7 diff --git a/pyalp/README.md b/pyalp/README.md new file mode 100644 index 000000000..aafcee8b2 --- /dev/null +++ b/pyalp/README.md @@ -0,0 +1,175 @@ + +# pyalp (packaged) + +This directory contains the Python package layout for the `pyalp` bindings +that expose parts of the ALP GraphBLAS project via pybind11. + +Quick start +----------- + +Create and activate a virtual environment, then install the package (example +using PyPI (recommended): + +```bash +python -m venv venv +source venv/bin/activate +pip install alp-graphblas +``` + +If you want to try a pre-release from TestPyPI instead, use the TestPyPI +index but install the same package name `alp-graphblas` (pip will pull the +package and its dependencies from the given index): + +```bash +python -m venv venv +source venv/bin/activate +pip install --index-url https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple alp-graphblas +``` + +Basic usage +----------- +# pyalp (packaged) + +This directory contains the Python package layout for the `pyalp` bindings +that expose parts of the ALP GraphBLAS project via pybind11. + +Quick start +----------- + +Create and activate a virtual environment, then install the published package +`alp-graphblas` from PyPI or TestPyPI as shown above. Note: the import name +inside Python remains `pyalp` (the package provides the `pyalp` module), so +your code still does `import pyalp` after installation. + +Basic usage +----------- + +The package exposes a small set of helpers and one or more compiled backend +modules. Use these helpers to list and select available backends and to read +runtime build metadata: + +```python +import pyalp +print('pyalp build metadata:', pyalp.get_build_metadata()) +print('available backends:', pyalp.list_backends()) + +# Import a specific backend module (returns the compiled module) +backend = pyalp.get_backend('pyalp_ref') # or 'pyalp_omp', 'pyalp_nonblocking' +print('backend module:', backend) +``` + +Backends and import caveat +-------------------------- + +Wheels may include multiple compiled backend modules (for example +`pyalp_ref`, `pyalp_omp`, `pyalp_nonblocking`). Historically, importing +multiple different compiled backends in the same Python process could raise +pybind11 registration errors (types duplicated). The bindings now use +`py::module_local()` for core wrapper types, which reduces collisions, but if +you encounter issues importing more than one backend in-process, prefer +testing each backend in a separate process (the supplied test runner does +this). + +Runtime metadata +---------------- + +The package provides a metadata module generated at build time by CMake. Use +`pyalp.get_build_metadata()` to access keys such as: + +- `version` — pyalp package version +- `build_type` — CMake build type used (e.g., Release) +- `alp_version` — ALP repository version or tag used to build +- `alp_git_commit` / `alp_git_branch` — Git information captured by CI +- `license` — detected repository license (e.g. Apache-2.0) + +`pyalp.get_algorithm_metadata()` contains algorithm/backends info and a +`readme` key with packaged README contents. + +Minimal example — conjugate gradient (small test) +------------------------------------------------ + +Save the following as `test_cg.py` and run `python test_cg.py` after installing +`pyalp` and `numpy`. The example shows selecting a backend explicitly via +`pyalp.get_backend()` and then using the backend's `Matrix`, `Vector`, and +`conjugate_gradient` API. + +```python +#!/usr/bin/env python3 +""" +Test script for the pyalp backend (example uses the OpenMP backend name +`pyalp_omp`, but you can use `pyalp_ref` or another available backend). + +Usage: + python test_cg.py + +Dependencies: + - numpy + - pyalp (installed and providing a backend such as pyalp_omp) +""" + +import numpy as np +import pyalp + +# Choose the backend module (change name if you want a different backend) +pyalp = pyalp.get_backend('pyalp_omp') # or 'pyalp_ref', 'pyalp_nonblocking' + +# Generate a small sparse linear system using numpy arrays +N, M = 5, 5 +idata = np.array([0, 1, 2, 3, 3, 4, 2, 3, 3, 4, 1, 4, 1, 4, 4], dtype=np.int32) +jdata = np.array([0, 1, 2, 3, 2, 2, 1, 4, 1, 1, 0, 3, 0, 3, 4], dtype=np.int32) +vdata = np.array([1, 1, 1, 1, 0.5, 2, 1, 4, 4.4, 1, 0, 3.5, 0, 3, 1], dtype=np.float64) +b = np.array([1.0, 1.0, 1.0, 1.0, 1.0], dtype=np.float64) +x = np.array([1.0, 1.0, 0.0, 0.3, -1.0], dtype=np.float64) +r = np.zeros(5, dtype=np.float64) +u = np.zeros(5, dtype=np.float64) +tmp = np.zeros(5, dtype=np.float64) + +# Create the pyalp Matrix and Vector objects +alpmatrixA = pyalp.Matrix(5, 5, idata, jdata, vdata) +alpvectorx = pyalp.Vector(5, x) +alpvectorb = pyalp.Vector(5, b) +alpvectorr = pyalp.Vector(5, r) +alpvectoru = pyalp.Vector(5, u) +alpvectortmp = pyalp.Vector(5, tmp) + +maxiterations = 2000 +verbose = 1 + +# Solve the linear system using the conjugate gradient method in the backend +iterations, residual = pyalp.conjugate_gradient( + alpmatrixA, + alpvectorx, + alpvectorb, + alpvectorr, + alpvectoru, + alpvectortmp, + maxiterations, + verbose, +) +print('iterations =', iterations) +print('residual =', residual) + +# Convert the result vector to a numpy array and print it +x_result = alpvectorx.to_numpy() +print('x_result =', x_result) + +# Check if the result is close to the expected solution +assert np.allclose(x_result, np.array([1.0, 1.0, 0.0, 0.13598679, -0.88396565])), 'solution mismatch' +``` + +Packaging notes (for maintainers) +-------------------------------- + +- The CI uses a top-level CMake configure/build to produce the native shared + object and a CMake-configured `_metadata.py`. The packaging `setup.py` then + copies the built `.so` and `_metadata.py` into the wheel. +- The CI passes Git/version information into CMake so the generated metadata + is populated even in detached/CI environments. + +If you modify the metadata template, update `pyalp/src/pyalp/_metadata.py.in`. + +License +------- + +See the repository `LICENSE` at the project root; the packaging pipeline +attempts to detect and embed the license string in runtime metadata. diff --git a/pyalp/extern/pybind11 b/pyalp/extern/pybind11 new file mode 160000 index 000000000..8d503e30b --- /dev/null +++ b/pyalp/extern/pybind11 @@ -0,0 +1 @@ +Subproject commit 8d503e30be400ad431d3d140707803e87e75fad7 diff --git a/pyalp/pyproject.toml b/pyalp/pyproject.toml new file mode 100644 index 000000000..8b71b194c --- /dev/null +++ b/pyalp/pyproject.toml @@ -0,0 +1,34 @@ +[build-system] +requires = ["setuptools>=61.0", "wheel", "pybind11>=2.6"] +build-backend = "setuptools.build_meta" + +[project] +name = "alp-graphblas" +version = "0.8.41" +description = "Python bindings for ALP GraphBLAS (minimal package layout)" +authors = [ { name = "ALP" } ] +readme = "README.md" +license = { text = "Apache-2.0" } +requires-python = ">=3.8" + +dependencies = [ + "numpy>=1.22", +] + +[tool.setuptools.packages.find] +where = ["src"] + +[tool.cibuildwheel] +# Build CPython 3.9–3.12 wheels; skip PyPy and musllinux for now +build = "cp39-* cp310-* cp311-* cp312-*" +skip = "pp* *-musllinux* *-manylinux_i686 *-win32" +build-verbosity = 1 + +[tool.cibuildwheel.linux] +archs = ["x86_64"] + +[tool.cibuildwheel.macos] +archs = ["x86_64", "arm64"] + +[tool.cibuildwheel.windows] +archs = ["AMD64"] diff --git a/pyalp/setup.py b/pyalp/setup.py new file mode 100644 index 000000000..4ff84196d --- /dev/null +++ b/pyalp/setup.py @@ -0,0 +1,277 @@ +from setuptools import setup, Extension +from setuptools import find_packages +from setuptools.command.build_ext import build_ext as _build_ext +import sys +import os +import glob +import shutil +import sysconfig +import pathlib +bdist_wheel_cmd = None +try: + # Used to mark wheel as non-pure when bundling a prebuilt .so + from wheel.bdist_wheel import bdist_wheel as _bdist_wheel + + class bdist_wheel(_bdist_wheel): + def finalize_options(self): + super().finalize_options() + # wheel contains a native shared object; mark as platform-specific + self.root_is_pure = False + + bdist_wheel_cmd = bdist_wheel +except Exception: + bdist_wheel_cmd = None +_have_pybind11 = False +try: + # import lazily — only needed when we build from sources + from pybind11.setup_helpers import Pybind11Extension, build_ext + _have_pybind11 = True +except Exception: + Pybind11Extension = None + build_ext = None + +here = os.path.abspath(os.path.dirname(__file__)) + +prebuilt_env = os.environ.get("PREBUILT_PYALP_SO") or os.environ.get("PYALP_PREBUILT_SO") + +# Discover prebuilt backend shared objects in the CMake build tree. +def find_all_prebuilt(): + """Discover prebuilt shared objects only inside the directory explicitly + provided by the caller via the CMAKE_BUILD_DIR (or PYALP_BUILD_DIR) + environment variable. + + Per the packaging policy, this function will not probe arbitrary + sibling directories or search the source tree; callers must provide a + well-defined build directory. If no build directory is set, an empty + mapping is returned (so callers can fall back to building from + sources when pybind11 is available). + """ + supported = ["pyalp_ref", "pyalp_omp", "pyalp_nonblocking", "_pyalp"] + py_tag = f"cp{sys.version_info[0]}{sys.version_info[1]}" + mapping = {} + + cmake_build_dir = os.environ.get("CMAKE_BUILD_DIR") or os.environ.get("PYALP_BUILD_DIR") + # If no explicit build dir is provided, fall back to the conventional + # out-of-source `../build` directory. This keeps discovery inside a + # single well-defined location and preserves prior CI behavior. + if not cmake_build_dir: + cmake_build_dir = os.path.abspath(os.path.join(here, '..', 'build')) + else: + cmake_build_dir = os.path.abspath(cmake_build_dir) + + for mod in supported: + found = [] + patterns = [ + os.path.join(cmake_build_dir, '**', f'{mod}*.so'), + os.path.join(cmake_build_dir, '**', f'{mod}*.pyd'), + ] + for pat in patterns: + try: + found.extend(glob.glob(pat, recursive=True)) + except Exception: + pass + if not found: + continue + # Prefer candidate matching current ABI tag in filename or parent dir + matching = [c for c in found if py_tag in os.path.basename(c) or py_tag in os.path.basename(os.path.dirname(c))] + chosen = (matching or found)[0] + mapping[mod] = os.path.abspath(chosen) + + return mapping + +# Determine prebuilt modules mapping. If user specified a single PREBUILT env var, +# map it to its basename (module name) where possible; otherwise search the build tree. +prebuilt_modules = {} +if prebuilt_env: + # map provided path to module name by deriving filename stem + bn = os.path.basename(prebuilt_env) + modname = bn.split('.', 1)[0] + prebuilt_modules[modname] = os.path.abspath(prebuilt_env) +else: + prebuilt_modules = find_all_prebuilt() + +package_data = {} +ext_modules = [] + +class build_ext_copy_prebuilt(_build_ext): + """Custom build_ext that copies a prebuilt shared object into the build dir. + + This ensures the extension is installed into platlib and the wheel is valid + for auditwheel repair. + """ + + def build_extension(self, ext): + # Determine target path for the extension + target_path = self.get_ext_fullpath(ext.name) + os.makedirs(os.path.dirname(target_path), exist_ok=True) + # Choose the source prebuilt file corresponding to this extension + # ext.name is like 'pyalp.' + mod_fullname = ext.name + modname = mod_fullname.split('.', 1)[1] if '.' in mod_fullname else mod_fullname + + # Priority: explicit env var -> mapping discovered earlier -> glob search + src = os.environ.get("PREBUILT_PYALP_SO") or os.environ.get("PYALP_PREBUILT_SO") + if not src: + src = prebuilt_modules.get(modname) + if not src: + # No explicit PREBUILT path or discovered prebuilt module in the + # provided build directory. Do not search arbitrary locations. + src = None + + if not src or not os.path.exists(src): + raise RuntimeError(f"Prebuilt pyalp shared object not found for module '{modname}' during build_ext") + shutil.copyfile(src, target_path) + + # The _metadata.py file is generated by CMake in the build directory. + # We need to find it and copy it to the same directory as the extension. + ext_build_dir = os.path.dirname(target_path) + # Only copy generated metadata when an explicit build directory is + # provided via CMAKE_BUILD_DIR or PYALP_BUILD_DIR. We do not search the + # source tree or other locations for generated metadata. + cmake_build_dir = os.environ.get("CMAKE_BUILD_DIR") or os.environ.get("PYALP_BUILD_DIR") + if cmake_build_dir: + metadata_src_path = os.path.join(os.path.abspath(cmake_build_dir), "pyalp_metadata.py") + metadata_dest_path = os.path.join(ext_build_dir, "_metadata.py") + if os.path.exists(metadata_src_path): + print(f"Copying generated metadata from {metadata_src_path} to {metadata_dest_path}") + shutil.copyfile(metadata_src_path, metadata_dest_path) + else: + print(f"Warning: Generated metadata file not found at {metadata_src_path}. Skipping copy.") + else: + print("CMAKE_BUILD_DIR / PYALP_BUILD_DIR not set; skipping metadata file copy.") + +if prebuilt_modules: + # Create an Extension for each discovered prebuilt module so setuptools will + # place the shared object into the package (platlib). + for modname in prebuilt_modules.keys(): + ext_modules.append(Extension(f"pyalp.{modname}", sources=[])) +else: + if not _have_pybind11: + raise RuntimeError("pybind11 is required to build the extension from sources. Install pybind11 or provide PREBUILT_PYALP_SO to bundle a prebuilt .so.") + assert Pybind11Extension is not None + ext_modules = [ + Pybind11Extension( + "pyalp._pyalp", + ["src/pyalp/module_entry.cpp"], + include_dirs=[ + os.path.join(here, "src"), + os.path.join(here, "src", "pyalp"), + os.path.join(here, "extern", "pybind11", "include"), + os.path.normpath(os.path.join(here, "..", "include")), + ], + define_macros=[("PYALP_MODULE_NAME", "_pyalp"), ("PYALP_MODULE_LOCAL", "1")], + cxx_std=14, + ) + ] + +# Read metadata from pyproject.toml when available to avoid mismatched values +def _read_pyproject_toml(path): + if not os.path.exists(path): + return {} + # Prefer the stdlib tomllib on Python 3.11+, otherwise fall back to + # the third-party `toml` package if available. Avoid importing + # tomllib at module import time on older Pythons to prevent SyntaxError + # when cibuildwheel invokes builds using older interpreters. + try: + if sys.version_info >= (3, 11): + import tomllib + with open(path, "rb") as f: + return tomllib.load(f) or {} + except Exception: + pass + try: + import toml + with open(path, "r", encoding="utf-8") as f: + return toml.load(f) or {} + except Exception: + return {} + +pyproject_path = os.path.abspath(os.path.join(here, "..", "pyproject.toml")) +_pyproject = _read_pyproject_toml(pyproject_path) + +_name = None +_version = None +_description = None + +# PEP 621 [project] table +if isinstance(_pyproject, dict) and "project" in _pyproject: + proj = _pyproject.get("project", {}) + _name = proj.get("name") or _name + _version = proj.get("version") or _version + _description = proj.get("description") or _description +# poetry configuration [tool.poetry] +elif isinstance(_pyproject, dict) and _pyproject.get("tool", {}).get("poetry"): + poetry = _pyproject["tool"]["poetry"] + _name = poetry.get("name") or _name + _version = poetry.get("version") or _version + _description = poetry.get("description") or _description + +setup_kwargs = { + "name": _name or "pyalp", + "version": _version or "0.8.1", + "description": _description or "pyalp package (C++ bindings)", + "packages": find_packages(where="src"), + "package_dir": {"": "src"}, + # Ensure generated metadata is included in the wheel. The build process + # will copy the generated file to the package build dir as `_metadata.py`. + "package_data": {"pyalp": ["_metadata.py"]}, + "ext_modules": ext_modules, + "include_package_data": True, +} + +# Prefer generating egg-info in the out-of-source build directory so the +# source tree is not polluted during wheel builds. If the CI or caller set +# CMAKE_BUILD_DIR we use that; otherwise default to ../build relative to the +# package directory. +egg_base = os.environ.get("CMAKE_BUILD_DIR") +if not egg_base: + # Try to auto-detect an out-of-source CMake build directory that is a + # sibling of the repository root. We consider a directory to be a CMake + # build if it contains a CMakeCache.txt file. This supports build trees + # named arbitrarily (for example `test_build_dirname`) instead of assuming + # a literal `build` directory. + repo_parent = os.path.abspath(os.path.join(here, '..')) + candidates = [] + try: + for entry in os.listdir(repo_parent): + p = os.path.join(repo_parent, entry) + if os.path.isdir(p) and os.path.exists(os.path.join(p, 'CMakeCache.txt')): + candidates.append(p) + except Exception: + candidates = [] + if candidates: + # Prefer a directory literally named 'build' if present, else pick the + # first candidate found. + build_dir = None + for c in candidates: + if os.path.basename(c) == 'build': + build_dir = c + break + if not build_dir: + build_dir = candidates[0] + egg_base = os.path.abspath(build_dir) + else: + egg_base = os.path.abspath(os.path.join(here, '..', 'build')) + +# Supply setuptools options to place egg-info under the build directory +# Only set egg_info when an explicit build directory environment variable is +# provided. Do not attempt to auto-detect or write egg-info into the source +# tree when no build dir is specified. +if egg_base: + setup_kwargs.setdefault("options", {}) + setup_kwargs["options"]["egg_info"] = {"egg_base": egg_base} + +# Supply cmdclass entries for build_ext (copy-prebuilt or pybind11) and bdist_wheel +cmdclass = {} +# If we detected prebuilt modules, use the copy-prebuilt build_ext which copies +# each discovered shared object into the package build directory. +if prebuilt_modules: + cmdclass["build_ext"] = build_ext_copy_prebuilt +elif build_ext is not None: + cmdclass["build_ext"] = build_ext +if bdist_wheel_cmd is not None: + cmdclass["bdist_wheel"] = bdist_wheel_cmd +if cmdclass: + setup_kwargs["cmdclass"] = cmdclass + +setup(**setup_kwargs) diff --git a/pyalp/src/CMakeLists.txt b/pyalp/src/CMakeLists.txt new file mode 100644 index 000000000..56759a637 --- /dev/null +++ b/pyalp/src/CMakeLists.txt @@ -0,0 +1,293 @@ +cmake_minimum_required(VERSION 3.14) +project(python_module LANGUAGES CXX) +# Include pybind11 relative to this CMakeLists.txt directory +get_filename_component(PYBIND11_SRC_DIR "${CMAKE_CURRENT_LIST_DIR}/../extern/pybind11" ABSOLUTE) +add_subdirectory(${PYBIND11_SRC_DIR} ${CMAKE_BINARY_DIR}/pyalp/extern/pybind11) + +# Try to find OpenMP - on macOS this may require Homebrew libomp to be installed. +# We do this quietly here and link the imported target to the module targets that +# require OpenMP so CMake's FindOpenMP is used consistently instead of relying on +# environment CPPFLAGS/LDFLAGS. +find_package(OpenMP QUIET) + +# When configuring from the pyalp package directory (e.g. in cibuildwheel +# containers) the top-level CMake that usually defines these options is not +# executed. Provide sensible cached defaults so this CMakeLists can be used +# standalone. The top-level configuration will override these cached values +# when present. +if(NOT DEFINED WITH_REFERENCE_BACKEND) + set(WITH_REFERENCE_BACKEND ON CACHE BOOL "Build Reference backend (default for pyalp package)") +endif() +if(NOT DEFINED WITH_OMP_BACKEND) + set(WITH_OMP_BACKEND ON CACHE BOOL "Build OMP backend (default for pyalp package)") +endif() + +# Allow callers to choose whether backend modules use py::module_local() +# for type registration. Enabling module-local registrations avoids +# duplicate-type registration errors when importing multiple backend +# extension modules into the same interpreter. However, module-local +# registrations isolate types per-module and will prevent passing +# pybind11-wrapped C++ objects between modules unless extra conversion +# glue is implemented. Default: ON (safe for multi-backend imports). +if(NOT DEFINED PYALP_MODULE_LOCAL_DEFAULT) + option(PYALP_MODULE_LOCAL_DEFAULT "Use py::module_local for backend modules" ON) +endif() + +if(PYALP_MODULE_LOCAL_DEFAULT) + set(PYALP_MODULE_LOCAL_VAL 1) +else() + set(PYALP_MODULE_LOCAL_VAL 0) +endif() + +assert_defined_variables( WITH_REFERENCE_BACKEND WITH_OMP_BACKEND ) + +# target listing all examples, to build them at once with 'make examples' + + +if( WITH_REFERENCE_BACKEND ) + set(PYALP_MODULE_NAME pyalp_ref) + + pybind11_add_module( ${PYALP_MODULE_NAME} pyalp/module_entry.cpp conjugate_gradient.hpp matrix_wrappers.hpp utils.hpp vector_wrappers.hpp ) + target_compile_definitions(${PYALP_MODULE_NAME} PRIVATE PYALP_MODULE_NAME=${PYALP_MODULE_NAME}) + # By default enable module-local pybind11 registrations so multiple backend + # extension modules can be imported into the same interpreter without + # colliding over identical C++ type registrations. Set to 0 only when + # explicit cross-module type sharing is required. + target_compile_definitions(${PYALP_MODULE_NAME} PRIVATE PYALP_MODULE_LOCAL=${PYALP_MODULE_LOCAL_VAL}) + # Ensure compiler can find headers placed under pyalp/src and pyalp/src/pyalp + target_include_directories(${PYALP_MODULE_NAME} PRIVATE ${CMAKE_SOURCE_DIR}/include ${CMAKE_CURRENT_SOURCE_DIR} ${CMAKE_CURRENT_SOURCE_DIR}/pyalp) + # Link your required libraries + target_link_libraries(${PYALP_MODULE_NAME} PRIVATE backend_shmem_shared backend_reference common_flags) + # If OpenMP support is available, ensure the module links the OpenMP imported target + if(OpenMP_CXX_FOUND) + target_link_libraries(${PYALP_MODULE_NAME} PRIVATE OpenMP::OpenMP_CXX) + endif() + # (Optional) If you need extra compile flags or C++ standard: + set_target_properties(${PYALP_MODULE_NAME} PROPERTIES CXX_STANDARD 14 CXX_STANDARD_REQUIRED YES) + # target_compile_options(conjugate_gradient_python PRIVATE -Wall -Wextra) + + # On Apple platforms with newer Clang, downgrade the template keyword warning from error to warning + if(APPLE) + target_compile_options(${PYALP_MODULE_NAME} PRIVATE -Wno-error=missing-template-arg-list-after-template-kw) + endif() + + add_custom_command(TARGET ${PYALP_MODULE_NAME} POST_BUILD + COMMAND ${CMAKE_COMMAND} -E echo "" + COMMAND ${CMAKE_COMMAND} -E echo "============================================================" + COMMAND ${CMAKE_COMMAND} -E echo "Build complete!" + # Print the exact built module path and a PYTHONPATH suggestion pointing + # to the directory where the extension module is placed inside the + # top-level build tree for pyalp: ${CMAKE_BINARY_DIR}/pyalp/src + COMMAND ${CMAKE_COMMAND} -E echo "Built module: $" + COMMAND ${CMAKE_COMMAND} -E echo "To use the Python bindings, append ${CMAKE_BINARY_DIR}/pyalp/src to your PYTHONPATH:" + COMMAND ${CMAKE_COMMAND} -E echo "Or import the compiled module by name: ${PYALP_MODULE_NAME}" + COMMAND ${CMAKE_COMMAND} -E echo "============================================================" + ) + +endif() + + +if( WITH_OMP_BACKEND ) + set(PYALP_MODULE_NAME pyalp_omp) + + pybind11_add_module( ${PYALP_MODULE_NAME} pyalp/module_entry.cpp conjugate_gradient.hpp matrix_wrappers.hpp utils.hpp vector_wrappers.hpp ) + target_compile_definitions( ${PYALP_MODULE_NAME} PRIVATE PYALP_MODULE_NAME=${PYALP_MODULE_NAME}) + # Enable module-local registration for this backend (see note above) + target_compile_definitions(${PYALP_MODULE_NAME} PRIVATE PYALP_MODULE_LOCAL=${PYALP_MODULE_LOCAL_VAL}) + target_include_directories(${PYALP_MODULE_NAME} PRIVATE ${CMAKE_SOURCE_DIR}/include ${CMAKE_CURRENT_SOURCE_DIR} ${CMAKE_CURRENT_SOURCE_DIR}/pyalp) + # Link your required libraries + target_link_libraries(${PYALP_MODULE_NAME} PRIVATE backend_shmem_shared backend_reference_omp common_flags) + # If OpenMP support is available, ensure the module links the OpenMP imported target + if(OpenMP_CXX_FOUND) + target_link_libraries(${PYALP_MODULE_NAME} PRIVATE OpenMP::OpenMP_CXX) + endif() + # (Optional) If you need extra compile flags or C++ standard: + set_target_properties(${PYALP_MODULE_NAME} PROPERTIES CXX_STANDARD 14 CXX_STANDARD_REQUIRED YES) + # target_compile_options(conjugate_gradient_python PRIVATE -Wall -Wextra) + + add_custom_command(TARGET ${PYALP_MODULE_NAME} POST_BUILD + COMMAND ${CMAKE_COMMAND} -E echo "" + COMMAND ${CMAKE_COMMAND} -E echo "============================================================" + COMMAND ${CMAKE_COMMAND} -E echo "Build complete!" + COMMAND ${CMAKE_COMMAND} -E echo "Built module: $" + COMMAND ${CMAKE_COMMAND} -E echo "To use the Python bindings, append ${CMAKE_BINARY_DIR}/pyalp/src to your PYTHONPATH:" + COMMAND ${CMAKE_COMMAND} -E echo "Or import the compiled module by name: ${PYALP_MODULE_NAME}" + COMMAND ${CMAKE_COMMAND} -E echo "============================================================" + ) + +endif() + + +if( WITH_NONBLOCKING_BACKEND ) + set(PYALP_MODULE_NAME pyalp_nonblocking) + + pybind11_add_module( ${PYALP_MODULE_NAME} pyalp/module_entry.cpp conjugate_gradient.hpp matrix_wrappers.hpp utils.hpp vector_wrappers.hpp ) + target_compile_definitions( ${PYALP_MODULE_NAME} PRIVATE PYALP_MODULE_NAME=${PYALP_MODULE_NAME}) + # Enable module-local registration for this backend (see note above) + target_compile_definitions(${PYALP_MODULE_NAME} PRIVATE PYALP_MODULE_LOCAL=${PYALP_MODULE_LOCAL_VAL}) + target_include_directories(${PYALP_MODULE_NAME} PRIVATE ${CMAKE_SOURCE_DIR}/include ${CMAKE_CURRENT_SOURCE_DIR} ${CMAKE_CURRENT_SOURCE_DIR}/pyalp) + # Link your required libraries + target_link_libraries(${PYALP_MODULE_NAME} PRIVATE backend_shmem_shared backend_reference_omp common_flags) + # If OpenMP support is available, ensure the module links the OpenMP imported target + if(OpenMP_CXX_FOUND) + target_link_libraries(${PYALP_MODULE_NAME} PRIVATE OpenMP::OpenMP_CXX) + endif() + # (Optional) If you need extra compile flags or C++ standard: + set_target_properties(${PYALP_MODULE_NAME} PROPERTIES CXX_STANDARD 14 CXX_STANDARD_REQUIRED YES) + # target_compile_options(conjugate_gradient_python PRIVATE -Wall -Wextra) + + add_custom_command(TARGET ${PYALP_MODULE_NAME} POST_BUILD + COMMAND ${CMAKE_COMMAND} -E echo "" + COMMAND ${CMAKE_COMMAND} -E echo "============================================================" + COMMAND ${CMAKE_COMMAND} -E echo "Build complete!" + COMMAND ${CMAKE_COMMAND} -E echo "Built module: $" + COMMAND ${CMAKE_COMMAND} -E echo "To use the Python bindings, append ${CMAKE_BINARY_DIR}/pyalp/src to your PYTHONPATH:" + COMMAND ${CMAKE_COMMAND} -E echo "Or import the compiled module by name: ${PYALP_MODULE_NAME}" + COMMAND ${CMAKE_COMMAND} -E echo "============================================================" + ) + +endif() + +# --- Metadata generation --- +# These variables are expected to be set by the top-level CMakeLists.txt, +# but we provide defaults for standalone builds. +if(NOT DEFINED pyalp_VERSION) + set(pyalp_VERSION "0.0.0") +endif() +if(NOT DEFINED ALP_VERSION) + set(ALP_VERSION "unknown") + # If the top-level project provided ALP_VERSION (cached) this will be used. + # Otherwise keep 'unknown'. When building from the top-level CMakeLists + # ALP_VERSION should already be set via cache. + if(DEFINED ALP_VERSION) + # already defined (likely cached) — keep it + else() + set(ALP_VERSION "unknown") + endif() +endif() +if(NOT DEFINED ALP_BUILD_TYPE) + set(ALP_BUILD_TYPE "unknown") + # Use CMAKE_BUILD_TYPE as the ALP build type unless explicitly supplied + if(DEFINED CMAKE_BUILD_TYPE) + set(ALP_BUILD_TYPE "${CMAKE_BUILD_TYPE}") + else() + set(ALP_BUILD_TYPE "unknown") + endif() +endif() +if(NOT DEFINED ALP_GIT_COMMIT_SHA) + set(ALP_GIT_COMMIT_SHA "unknown") +endif() +if(NOT DEFINED ALP_GIT_BRANCH) + set(ALP_GIT_BRANCH "unknown") +endif() + +# Determine repository license (simple heuristic from LICENSE file) if not provided +if(NOT DEFINED ALP_LICENSE) + if(EXISTS "${CMAKE_SOURCE_DIR}/LICENSE") + file(READ "${CMAKE_SOURCE_DIR}/LICENSE" ALP_LICENSE_RAW) + string(FIND "${ALP_LICENSE_RAW}" "Apache" _has_apache) + if(_has_apache GREATER -1) + set(ALP_LICENSE "Apache-2.0") + else() + string(FIND "${ALP_LICENSE_RAW}" "BSD" _has_bsd) + if(_has_bsd GREATER -1) + set(ALP_LICENSE "BSD-3-Clause") + else() + set(ALP_LICENSE "unknown") + endif() + endif() + else() + set(ALP_LICENSE "unknown") + endif() +endif() + +# If a top-level README.md exists, read it and make it available to the +# metadata template. We escape triple quotes so the Python triple-quoted +# string in the template remains valid. +if(EXISTS "${CMAKE_SOURCE_DIR}/README.md") + file(READ "${CMAKE_SOURCE_DIR}/README.md" PYALP_README_RAW) + # Escape triple-quotes to avoid breaking the Python triple-quoted string + string(REPLACE "\"\"\"" "\\\"\\\"\\\"" PYALP_README_ESCAPED "${PYALP_README_RAW}") +else() + set(PYALP_README_ESCAPED "") +endif() +if(EXISTS "${CMAKE_SOURCE_DIR}/pyalp/README.md") + file(READ "${CMAKE_SOURCE_DIR}/pyalp/README.md" PYALP_README_RAW) +elseif(EXISTS "${CMAKE_SOURCE_DIR}/README.md") + file(READ "${CMAKE_SOURCE_DIR}/README.md" PYALP_README_RAW) +else() + set(PYALP_README_ESCAPED "") +endif() +if(DEFINED PYALP_README_RAW) + # Escape triple-quotes to avoid breaking the Python triple-quoted string + string(REPLACE "\"\"\"" "\\\"\\\"\\\"" PYALP_README_ESCAPED "${PYALP_README_RAW}") +else() + set(PYALP_README_ESCAPED "") +endif() + +# Get Python and pybind11 versions +find_package(PythonInterp REQUIRED) +set(PYTHON_VERSION ${PYTHON_VERSION_STRING}) +set(pybind11_VERSION ${pybind11_VERSION}) + +# This is a simplified list. A more robust solution would inspect the build targets. +set(pyalp_ALGORITHMS "conjugate_gradient") +set(pyalp_BACKENDS "reference, reference_omp") + +# Configure the metadata file from the template +set(METADATA_TEMPLATE "${CMAKE_CURRENT_SOURCE_DIR}/pyalp/_metadata.py.in") +set(METADATA_OUTPUT "${CMAKE_BINARY_DIR}/pyalp_metadata.py") +# Make the README content available as @PYALP_README_ESCAPED@ to the +# template (CMake configure_file will perform the substitution). +configure_file(${METADATA_TEMPLATE} ${METADATA_OUTPUT} @ONLY) + +# This command is useful for debugging inside a cibuildwheel container +# to verify that the file is being generated correctly. +# add_custom_command(TARGET ${PYALP_MODULE_NAME} POST_BUILD +# COMMAND ${CMAKE_COMMAND} -E echo "Generated metadata file content:" +# COMMAND ${CMAKE_COMMAND} -E cat ${METADATA_OUTPUT} +# ) + + +# Add a convenience CMake target to build all enabled pyalp backends and +# package them into a wheel using the standard Python packaging path. +# This creates a "pyalp" top-level build target you can invoke via +# cmake --build --target pyalp --parallel +# The target will build the enabled backend extension targets and then +# run `python -m pip wheel pyalp` with CMAKE_BUILD_DIR set so +# `pyalp/setup.py` can discover the prebuilt shared objects and metadata. + +set(pyalp_package_targets "") +if(WITH_REFERENCE_BACKEND) + list(APPEND pyalp_package_targets pyalp_ref) +endif() +if(WITH_OMP_BACKEND) + list(APPEND pyalp_package_targets pyalp_omp) +endif() +if(WITH_NONBLOCKING_BACKEND) + list(APPEND pyalp_package_targets pyalp_nonblocking) +endif() + +if(NOT pyalp_package_targets) + # No backends enabled: provide a dummy target that still attempts packaging + list(APPEND pyalp_package_targets "") +endif() + +string(JOIN " " pyalp_package_targets_str ${pyalp_package_targets}) + +# Create a simple, top-level alias target `pyalp` that just builds the +# enabled backend extension targets. This avoids packing logic in the +# subdirectory and makes `make pyalp` (or `cmake --build --target pyalp`) a +# straightforward way to build the Python extension modules. +if(NOT pyalp_package_targets) + # No enabled backends: provide an empty phony target. + add_custom_target(pyalp + COMMENT "pyalp: no backends enabled" + ) +else() + add_custom_target(pyalp + DEPENDS ${pyalp_package_targets} + COMMENT "Build enabled pyalp backend extension modules" + ) +endif() + diff --git a/pyalp/src/conjugate_gradient.hpp b/pyalp/src/conjugate_gradient.hpp new file mode 100644 index 000000000..e663f6256 --- /dev/null +++ b/pyalp/src/conjugate_gradient.hpp @@ -0,0 +1,105 @@ +#include +#include +#include +#include +#include +#include + +#ifdef _CG_COMPLEX + #include +#endif + +#include + +#include + +#include + +#include + +#include +#include +#include + +#include + +using BaseScalarType = double; +#ifdef _CG_COMPLEX + using ScalarType = std::complex< BaseScalarType >; +#else + using ScalarType = BaseScalarType; +#endif + + +constexpr const BaseScalarType tol = 0.000001; + +/** The default number of maximum iterations. */ +constexpr const size_t max_iters = 10000; + +constexpr const double c1 = 0.0001; +constexpr const double c2 = 0.0001; + +std::tuple +conjugate_gradient( + grb::Matrix< ScalarType > & L, + grb::Vector< ScalarType > & x, + grb::Vector< ScalarType > & b, + grb::Vector< ScalarType > & r, + grb::Vector< ScalarType > & u, + grb::Vector< ScalarType > & temp, + size_t solver_iterations = 1000, + size_t verbose = 0 + //const struct input &data_in, struct output &out + ) { + size_t iterations = 0; + BaseScalarType residual; + + if( !verbose ) + std::cout << "conjugate_gradient: start \n"; + // get user process ID + const size_t s = grb::spmd<>::pid(); + (void)s; + assert( s < grb::spmd<>::nprocs() ); + + // get input n + grb::utils::Timer timer; + timer.reset(); + + grb::RC rc = grb::SUCCESS; + rc = grb::algorithms::conjugate_gradient( + x, L, b, + solver_iterations, tol, + iterations, residual, + r, u, temp + ); + double single_time = timer.time(); + if( !(rc == grb::SUCCESS || rc == grb::FAILED) ) { + std::cerr << "Failure: call to conjugate_gradient did not succeed (" + << grb::toString( rc ) << ")." << std::endl; + } + if( rc == grb::FAILED ) { + if( !verbose ) { + std::cout << "Warning: call to conjugate_gradient did not converge\n"; + } + } + if( rc == grb::SUCCESS ) { + rc = grb::collectives<>::reduce( single_time, 0, grb::operators::max< double >() ); + } + + if( !verbose ) { + // output + std::cout << " solver_iterations = " << solver_iterations << "\n"; + std::cout << " tol = " << tol << "\n"; + std::cout << " iterations = " << iterations << "\n"; + std::cout << " residual = " << residual << "\n"; + } + + if( !verbose ) { + std::cout << "conjugate_gradient: end \n"; + } + + // Return as a tuple: (int, float) + return std::make_tuple(iterations, residual); +} + + diff --git a/pyalp/src/matrix_wrappers.hpp b/pyalp/src/matrix_wrappers.hpp new file mode 100644 index 000000000..b7173d005 --- /dev/null +++ b/pyalp/src/matrix_wrappers.hpp @@ -0,0 +1,126 @@ +#include +#include +#include + +#include + +#include +#include +#include +#include + +namespace py = pybind11; + +template< + typename IntType + , typename ScalarType + > +void buildMatrix( + grb::Matrix< ScalarType >& M, + py::array_t arri, + py::array_t arrj, + py::array_t arrv + ) { + // Check array is 1D + py::buffer_info info_i = arri.request(); + if (info_i.ndim != 1) throw std::runtime_error("Array must be 1D"); + IntType* data_ptr_i = static_cast(info_i.ptr); + auto nnz = info_i.size; + + // Check array is 1D + py::buffer_info info_j = arrj.request(); + if (info_j.ndim != 1) throw std::runtime_error("Array must be 1D"); + IntType* data_ptr_j = static_cast(info_j.ptr); + assert( nnz == info_j.size ); + + // Check array is 1D + py::buffer_info info_v = arrv.request(); + if (info_v.ndim != 1) throw std::runtime_error("Array must be 1D"); + ScalarType* data_ptr_v = static_cast(info_v.ptr); + assert( nnz == info_v.size ); + + grb::RC io_rc; + (void)io_rc; + io_rc = grb::buildMatrixUnique( M, data_ptr_i, data_ptr_j , data_ptr_v, nnz, grb::SEQUENTIAL ); + assert( io_rc == grb::SUCCESS ); +} + +// helper for template specialisation +template +grb::Matrix matrix_factory( + size_t m, size_t n, + py::array data1, + py::array data2, + py::array_t data3) +{ + grb::Matrix mat(m, n); + + // Helper for dispatch + bool handled = false; + auto try_type = [&](auto dummy) { + using IntType = decltype(dummy); + if (py::dtype::of().is(data1.dtype()) && py::dtype::of().is(data2.dtype())) { + buildMatrix( + mat, + data1.cast>(), + data2.cast>(), + data3 + ); + handled = true; + } + }; + + // List of supported integer types + (try_type(int8_t{}), try_type(int16_t{}), try_type(int32_t{}), try_type(int64_t{}), + try_type(uint8_t{}), try_type(uint16_t{}), try_type(uint32_t{}), try_type(uint64_t{})); + + if (!handled) + throw std::runtime_error("Unsupported integer dtype for data1/data2 or nonmatching types of data1 and data2 "); + + return mat; +} + +// Convert a GraphBLAS matrix to COO (i, j, values) numpy arrays and return +// a tuple: (i_array, j_array, values_array, nrows, ncols) +template +py::tuple matrix_to_coo(grb::Matrix &M) { + // Iterate using the matrix const iterators directly. Using the + // nonzeroIterator adapter here triggered instantiation issues due to + // incomplete iterator types in some compilation units. Iterating via the + // matrix's own const_iterator works across backends and avoids the + // incomplete-type problem. + std::vector rows; + std::vector cols; + std::vector vals; + + for (auto it = M.cbegin(); it != M.cend(); ++it) { + // Dereferenced iterator is expected to be a pair where the first + // element contains a pair (i,j) and the second element is the value. + // This matches the ALP/GraphBLAS iterator contract used by backends. + auto entry = *it; + rows.push_back( static_cast( entry.first.first ) ); + cols.push_back( static_cast( entry.first.second ) ); + vals.push_back( static_cast( entry.second ) ); + } + + // Create numpy arrays (copies are fine for interoperability) + py::array_t i_arr(rows.size()); + py::buffer_info i_info = i_arr.request(); + size_t *i_ptr = static_cast(i_info.ptr); + for (size_t k = 0; k < rows.size(); ++k) i_ptr[k] = rows[k]; + + py::array_t j_arr(cols.size()); + py::buffer_info j_info = j_arr.request(); + size_t *j_ptr = static_cast(j_info.ptr); + for (size_t k = 0; k < cols.size(); ++k) j_ptr[k] = cols[k]; + + py::array_t v_arr(vals.size()); + py::buffer_info v_info = v_arr.request(); + ScalarType *v_ptr = static_cast(v_info.ptr); + for (size_t k = 0; k < vals.size(); ++k) v_ptr[k] = vals[k]; + + size_t nrows = grb::nrows(M); + size_t ncols = grb::ncols(M); + + return py::make_tuple(i_arr, j_arr, v_arr, nrows, ncols); +} diff --git a/pyalp/src/pyalp/CMakeLists.txt b/pyalp/src/pyalp/CMakeLists.txt new file mode 100644 index 000000000..ddd4524ca --- /dev/null +++ b/pyalp/src/pyalp/CMakeLists.txt @@ -0,0 +1,17 @@ +cmake_minimum_required(VERSION 3.14) +project(pyalp_bindings LANGUAGES CXX) + +pybind11_add_module(_pyalp module_entry.cpp ../matrix_wrappers.hpp ../utils.hpp ../vector_wrappers.hpp ../conjugate_gradient.hpp) +target_compile_definitions(_pyalp PRIVATE PYALP_MODULE_NAME=_pyalp) +target_compile_definitions(_pyalp PRIVATE PYALP_MODULE_LOCAL=1) +set_target_properties(_pyalp PROPERTIES CXX_STANDARD 14 CXX_STANDARD_REQUIRED YES) +target_include_directories(_pyalp PRIVATE ${CMAKE_SOURCE_DIR}/include ${CMAKE_CURRENT_SOURCE_DIR}/..) + +# Honor top-level NUMA switch: add NO_NUMA_DEF when WITH_NUMA=OFF +if(DEFINED WITH_NUMA AND NOT WITH_NUMA) + if(DEFINED NO_NUMA_DEF) + target_compile_definitions(_pyalp PRIVATE ${NO_NUMA_DEF}) + else() + target_compile_definitions(_pyalp PRIVATE _GRB_NO_LIBNUMA) + endif() +endif() diff --git a/pyalp/src/pyalp/__init__.py b/pyalp/src/pyalp/__init__.py new file mode 100644 index 000000000..bca735792 --- /dev/null +++ b/pyalp/src/pyalp/__init__.py @@ -0,0 +1,107 @@ +"""pyalp Python package init. + +Expose a small Python surface and import compiled extension if available. +""" +from importlib import metadata +import importlib +import pathlib +import sys +import os + +# Do NOT auto-import any compiled backend at package import time. +# Importing compiled extension modules here could cause pybind11 type +# registration conflicts if multiple backends are present. Users should +# explicitly select a backend via `get_backend()` or import a specific +# submodule (e.g. `import pyalp.pyalp_ref`). +_pyalp = None + +# compiled metadata will be available after installation or build +try: + from ._metadata import get_build_metadata, get_algorithm_metadata +except ImportError: # pragma: no cover - fallback for source tree + + def get_build_metadata(): + """Return an empty dictionary if metadata is not available.""" + return {} + + def get_algorithm_metadata(): + """Return an empty dictionary if metadata is not available.""" + return {} + + +__all__ = ["version", "get_build_metadata", "get_algorithm_metadata", "get_backend", "list_backends"] + + +def version(): + try: + return metadata.version("pyalp") + except Exception: + return "0.0.0" + + +# Expose available backend submodules (if present in the installed wheel) so users +# Backend discovery and selection helpers. +import pkgutil + + +def list_backends(): + """Return a sorted list of backend module names available in the package. + + This inspects the package directory for compiled extension modules with + expected names (e.g. pyalp_ref, pyalp_omp, pyalp_nonblocking, _pyalp). + """ + pkgdir = pathlib.Path(__file__).parent + found = set() + # Use pkgutil.iter_modules on the package path to discover installed modules + try: + for mod in pkgutil.iter_modules([str(pkgdir)]): + name = mod.name + if name in ("_pyalp",) or name.startswith("pyalp_"): + found.add(name) + except Exception: + # fallback: scan filenames + for p in pkgdir.iterdir(): + if p.is_file() and p.suffix in (".so", ".pyd"): + stem = p.name.split(".", 1)[0] + if stem == "_pyalp" or stem.startswith("pyalp_"): + found.add(stem) + return sorted(found) + + +def import_backend(name: str): + """Import and return the backend module `pyalp.`. + + Raises ImportError with a helpful message if the backend is not present. + """ + try: + return importlib.import_module(f"{__package__}.{name}") + except Exception as e: + raise ImportError(f"Backend module '{name}' is not available: {e}") from e + + +def get_backend(name: str | None = None, preferred=("pyalp_omp", "pyalp_nonblocking", "pyalp_ref", "_pyalp")): + """Return an imported backend module. + + Selection order: + - If ``name`` is provided, import that backend or raise ImportError. + - If environment variable PYALP_BACKEND is set, try to import that. + - Otherwise iterate over ``preferred`` and return the first available. + + Raises ImportError if no backend is available. + """ + # explicit name wins + if name: + return import_backend(name) + + # environment override + env = os.environ.get("PYALP_BACKEND") + if env: + return import_backend(env) + + # try preferred list + available = set(list_backends()) + for pref in preferred: + if pref in available: + return import_backend(pref) + + raise ImportError(f"No pyalp backend available. Found: {sorted(available)}") diff --git a/pyalp/src/pyalp/_metadata.py.in b/pyalp/src/pyalp/_metadata.py.in new file mode 100644 index 000000000..d40c3aeda --- /dev/null +++ b/pyalp/src/pyalp/_metadata.py.in @@ -0,0 +1,39 @@ +# pyalp/_metadata.py.in +""" +Runtime metadata for the pyalp package. + +This file is generated by CMake from _metadata.py.in. +""" + +__all__ = ["get_build_metadata", "get_algorithm_metadata"] + +_build_metadata = { + "version": "@pyalp_VERSION@", + "build_type": "@CMAKE_BUILD_TYPE@", + "alp_version": "@ALP_VERSION@", + "alp_build_type": "@ALP_BUILD_TYPE@", + "alp_git_commit": "@ALP_GIT_COMMIT_SHA@", + "alp_git_branch": "@ALP_GIT_BRANCH@", + "python_version": "@PYTHON_VERSION@", + "pybind11_version": "@pybind11_VERSION@", + "license": "@ALP_LICENSE@", + "homepage": "https://github.com/Algebraic-Programming/graphblas", +} + +_algorithm_metadata = { + "algorithms": "@pyalp_ALGORITHMS@", + "backends": "@pyalp_BACKENDS@", +} + +# README content (may be large); inserted as a Python triple-quoted string. +_algorithm_metadata["readme"] = """@PYALP_README_ESCAPED@""" + + +def get_build_metadata(): + """Return a dictionary of build-time metadata.""" + return _build_metadata + + +def get_algorithm_metadata(): + """Return a dictionary of available algorithms and backends.""" + return _algorithm_metadata diff --git a/pyalp/src/pyalp/common_bindings.hpp b/pyalp/src/pyalp/common_bindings.hpp new file mode 100644 index 000000000..cc14b19aa --- /dev/null +++ b/pyalp/src/pyalp/common_bindings.hpp @@ -0,0 +1,94 @@ +// Common pybind11 bindings shared by CMake targets and setuptools builds. +#pragma once + +#include +#include + +#include + +#include "utils.hpp" +#include "matrix_wrappers.hpp" +#include "vector_wrappers.hpp" +#include "conjugate_gradient.hpp" + +namespace py = pybind11; + +// Register all pyalp bindings. Module-local registration can be enabled by +// instantiating with ModuleLocal = true. When ModuleLocal==true the +// py::module_local() policy is applied to class bindings to avoid symbol +// collisions when multiple compiled variants are imported in the same +// interpreter. +template +void register_pyalp(py::module_ &m) { + // Common bindings for all backends + m.def("backend_name", [](){ return "backend"; }); + + if constexpr (ModuleLocal) { + py::class_>(m, "Matrix", py::module_local()) + .def(py::init([](size_t m_, size_t n_, + py::array data1, + py::array data2, + py::array_t data3) { + return matrix_factory(m_, n_, data1, data2, data3); + }), + py::arg("m"), py::arg("n"), + py::arg("i_array"), py::arg("j_array"), py::arg("k_array")); + + // Expose a COO serializer so Matrix instances can be moved between + // modules/processes without depending on pybind11 cross-module + // type registration. Returns (i_array, j_array, values_array, nrows, ncols). + m.def("matrix_to_coo", &matrix_to_coo, "Serialize Matrix to COO arrays"); + + py::class_>(m, "Vector", py::module_local()) + .def(py::init()) + .def(py::init([](size_t m, + py::array_t data3) { + grb::Vector< ScalarType > vec(m); // call the basic constructor + buildVector(vec, data3); // initialize with data + return vec; + }), + py::arg("m"), + py::arg("k_array") + ) + .def("to_numpy", &to_numpy, "Convert to numpy array"); + } else { + py::class_>(m, "Matrix") + .def(py::init([](size_t m_, size_t n_, + py::array data1, + py::array data2, + py::array_t data3) { + return matrix_factory(m_, n_, data1, data2, data3); + }), + py::arg("m"), py::arg("n"), + py::arg("i_array"), py::arg("j_array"), py::arg("k_array")); + + // Expose the matrix_to_coo helper in the non-module_local case as well. + m.def("matrix_to_coo", &matrix_to_coo, "Serialize Matrix to COO arrays"); + + py::class_>(m, "Vector") + .def(py::init()) + .def(py::init([](size_t m, + py::array_t data3) { + grb::Vector< ScalarType > vec(m); // call the basic constructor + buildVector(vec, data3); // initialize with data + return vec; + }), + py::arg("m"), + py::arg("k_array") + ) + .def("to_numpy", &to_numpy, "Convert to numpy array"); + } + + m.def("buildVector", &buildVector, "Fill Vector from 1 NumPy array"); + m.def("print_my_numpy_array", &print_my_numpy_array, "Print a numpy array as a flattened std::vector"); + m.def("conjugate_gradient", &conjugate_gradient, "Pass alp data to alp CG solver", + py::arg("L"), + py::arg("x"), + py::arg("b"), + py::arg("r"), + py::arg("u"), + py::arg("temp"), + py::arg("solver_iterations") = 1000, + py::arg("verbose") = 0 + ); +} diff --git a/pyalp/src/pyalp/module_entry.cpp b/pyalp/src/pyalp/module_entry.cpp new file mode 100644 index 000000000..f9bf49e06 --- /dev/null +++ b/pyalp/src/pyalp/module_entry.cpp @@ -0,0 +1,14 @@ +#include +#include "common_bindings.hpp" + +#ifndef PYALP_MODULE_LOCAL +#define PYALP_MODULE_LOCAL 1 +#endif + +PYBIND11_MODULE(PYALP_MODULE_NAME, m) { +#if PYALP_MODULE_LOCAL + register_pyalp(m); +#else + register_pyalp(m); +#endif +} diff --git a/pyalp/src/utils.hpp b/pyalp/src/utils.hpp new file mode 100644 index 000000000..037eda952 --- /dev/null +++ b/pyalp/src/utils.hpp @@ -0,0 +1,20 @@ +#include +#include +#include +#include +#include + +namespace py = pybind11; + +// Print a NumPy array as a std::vector (flattened) +void print_my_numpy_array(py::array_t input) { + py::buffer_info buf = input.request(); + double* ptr = static_cast(buf.ptr); + std::vector vec(ptr, ptr + buf.size); + + std::cout << "Vector contents (flattened): "; + for (double v : vec) { + std::cout << v << " "; + } + std::cout << std::endl; +} diff --git a/pyalp/src/vector_wrappers.hpp b/pyalp/src/vector_wrappers.hpp new file mode 100644 index 000000000..13e2de05f --- /dev/null +++ b/pyalp/src/vector_wrappers.hpp @@ -0,0 +1,65 @@ +#include +#include +#include +#include +#include +#include + +#include + +#include + +#include +#include +#include +#include + +#include + + +namespace py = pybind11; + + +using BaseScalarType = double; +#ifdef _CG_COMPLEX + using ScalarType = std::complex< BaseScalarType >; +#else + using ScalarType = BaseScalarType; +#endif + +void buildVector(grb::Vector< ScalarType >& V, py::array_t arrv) { + + // Check array is 1D + py::buffer_info info_v = arrv.request(); + if (info_v.ndim != 1) throw std::runtime_error("Array must be 1D"); + ScalarType* data_ptr_v = static_cast(info_v.ptr); + + grb::RC io_rc; + (void)io_rc; + io_rc = grb::buildVector( V, data_ptr_v, data_ptr_v + info_v.size, grb::SEQUENTIAL ); + assert( io_rc == grb::SUCCESS ); +} + +py::array_t +to_numpy(grb::Vector< ScalarType >& x) { + grb::PinnedVector< ScalarType > pinnedVector; + pinnedVector = grb::PinnedVector< ScalarType >( x, grb::SEQUENTIAL ); + + std::cout << "create numpy array from grb::vector\n"; + + ScalarType* data = new ScalarType[grb::size(x)]; + for( size_t k = 0; k < grb::size(x); ++k ) { + const auto &value = pinnedVector.getNonzeroValue( k ); + data[k]=value; + } + + // Capsule to manage memory (will delete[] when array is destroyed in Python) + py::capsule free_when_done(data, [](void *f) { + delete[] reinterpret_cast(f); + }); + + // Create NumPy array that shares memory with C++ + py::array_t arr({grb::size(x)}, {sizeof(ScalarType)}, data, free_when_done); + return arr; + +} diff --git a/pyalp/tests/test_bckds_inprocess.py b/pyalp/tests/test_bckds_inprocess.py new file mode 100644 index 000000000..b8af3f074 --- /dev/null +++ b/pyalp/tests/test_bckds_inprocess.py @@ -0,0 +1,105 @@ +""" +Simple in-process test to verify multiple pyalp backend extension modules +can be imported and used in the same Python interpreter without pybind11 +duplicate-type registration collisions. + +Usage (when building locally): + + # configure & build top-level project with pyalp enabled + cmake -S . -B build -DENABLE_PYALP=ON + cmake --build build --target pyalp + + # run the test pointing PYTHONPATH to the build output + PYTHONPATH=build/pyalp/src python3 pyalp/tests/test_bckds_inprocess.py + +If the build places extensions elsewhere, adjust PYTHONPATH to include that +directory. +""" + +import sys +import importlib +import numpy as np + +# Optionally prepend a build directory. If you're running inside the repo and +# built into ../build, uncomment and adjust the path below. +# sys.path.insert(0, '/path/to/your/build/pyalp/src') + +BACKENDS = ['pyalp_ref', 'pyalp_omp', 'pyalp_nonblocking'] + +def make_simple_matrix(): + # Create arrays for a single non-zero entry at (0,0) with value 1.0 + i = np.array([0], dtype=np.int64) + j = np.array([0], dtype=np.int64) + v = np.array([1.0], dtype=np.float64) + return 1, 1, i, j, v + + +def main(): + m,n,i,j,v = make_simple_matrix() + exercised = 0 + + # If the installed package exposes a `pyalp` package, prefer to query + # it for the list of available backends and skip any that aren't present + # (useful for platform-specific wheels that omit some backends). + installed_backends = None + try: + pkg = importlib.import_module('pyalp') + try: + installed_backends = set(pkg.list_backends()) + except Exception: + installed_backends = None + except ModuleNotFoundError: + installed_backends = None + + for backend in BACKENDS: + # If we detected an installed pyalp package and it doesn't list this + # backend, skip it rather than failing the whole test. + if installed_backends is not None and backend not in installed_backends: + print(f"Backend {backend} not present in installed package, skipping") + continue + + # Try importing the module as a top-level module first (old-style), + # then as a submodule of the installed `pyalp` package. This mirrors + # how the wheel packages the compiled extensions under the `pyalp` + # package (pyalp.pyalp_ref, etc.). We also attach the imported + # submodule to the `pyalp` package object for convenience. + mod = None + try: + mod = importlib.import_module(backend) + except ModuleNotFoundError: + try: + fq = f"pyalp.{backend}" + mod = importlib.import_module(fq) + # Attach to pyalp package so attribute access works + try: + pkg = importlib.import_module('pyalp') + setattr(pkg, backend, mod) + except Exception: + pass + except Exception as e: + print(f"FAILED IMPORT {backend}: {e}") + raise + print(f"Imported {backend}: {mod}") + try: + Matrix = getattr(mod, 'Matrix') + except AttributeError: + print(f"{backend} does not expose Matrix") + raise + # Construct an instance + try: + mat = Matrix(m, n, i, j, v) + exercised += 1 + print(f"Constructed Matrix from {backend}:", type(mat)) + except Exception as e: + print(f"FAILED TO CONSTRUCT Matrix from {backend}: {e}") + raise + + print('\nALL BACKENDS IMPORTED AND INSTANCES CREATED SUCCESSFULLY') + if exercised == 0: + print('ERROR: no backends were exercised (none installed).', file=sys.stderr) + raise SystemExit(2) + else: + print(f'SUCCESS: exercised {exercised} backend(s).') + +if __name__ == '__main__': + main() diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 000000000..db8750212 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,4 @@ +[build-system] +requires = ["setuptools>=61.0", "wheel"] +# Use the legacy backend so setup.py is executed, which delegates to pyalp/setup.py. +build-backend = "setuptools.build_meta:__legacy__" diff --git a/setup.py b/setup.py new file mode 100644 index 000000000..0f07871b4 --- /dev/null +++ b/setup.py @@ -0,0 +1,10 @@ +"""This repository is not meant to be built as a Python package at the root. + +Please build wheels from the 'pyalp' subdirectory. +""" + +from setuptools import setup + +raise SystemExit( + "Use 'pip wheel ./pyalp' (cibuildwheel points to the 'pyalp' subdirectory)." +) diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt index d90ca5cdb..d12a0fb6d 100644 --- a/tests/CMakeLists.txt +++ b/tests/CMakeLists.txt @@ -163,6 +163,10 @@ if( GNN_DATASET_PATH ) ) endif() +if( ENABLE_PYALP ) + add_subdirectory( python ) +endif() + add_subdirectory( unit ) add_subdirectory( smoke ) diff --git a/tests/python/CMakeLists.txt b/tests/python/CMakeLists.txt new file mode 100644 index 000000000..0bf2ac485 --- /dev/null +++ b/tests/python/CMakeLists.txt @@ -0,0 +1,23 @@ +# +# Copyright 2021 Huawei Technologies Co., Ltd. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +find_package(Python3 COMPONENTS Interpreter REQUIRED) + +set( TEST_CATEGORY "python" ) + +enable_testing() + + diff --git a/tests/python/backend_smoke_runner.py b/tests/python/backend_smoke_runner.py new file mode 100644 index 000000000..697aaf94f --- /dev/null +++ b/tests/python/backend_smoke_runner.py @@ -0,0 +1,61 @@ +#!/usr/bin/env python3 +""" +Run a small conjugate-gradient smoke test for a single pyalp backend. + +This script is intended to be invoked as a subprocess by tests so each backend +is exercised in a fresh interpreter (avoiding pybind11 registration conflicts). + +Usage: + python backend_smoke_runner.py pyalp_ref + +It prints the iterations, residual, and resulting solution vector to stdout. +""" +import sys +import importlib +import argparse +import numpy as np + + +def run_smoke(backend_name: str) -> int: + # Import backend module as pyalp., fallback to top-level name + try: + m = importlib.import_module(f"pyalp.{backend_name}") + except Exception as e: + print(f"Failed to import backend 'pyalp.{backend_name}': {e}", file=sys.stderr) + return 2 + + idata = np.array([0, 1, 2, 3, 3, 4, 2, 3, 3, 4, 1, 4, 1, 4, 4], dtype=np.int32) + jdata = np.array([0, 1, 2, 3, 2, 2, 1, 4, 1, 1, 0, 3, 0, 3, 4], dtype=np.int32) + vdata = np.array([1, 1, 1, 1, 0.5, 2, 1, 4, 4.4, 1, 0, 3.5, 0, 3, 1], dtype=np.float64) + b = np.array([1.0, 1.0, 1.0, 1.0, 1.0], dtype=np.float64) + x = np.array([1.0, 1.0, 0.0, 0.3, -1.0], dtype=np.float64) + r = np.zeros(5, dtype=np.float64) + u = np.zeros(5, dtype=np.float64) + tmp = np.zeros(5, dtype=np.float64) + + try: + A = m.Matrix(5, 5, idata, jdata, vdata) + xv = m.Vector(5, x) + bv = m.Vector(5, b) + rv = m.Vector(5, r) + uv = m.Vector(5, u) + tv = m.Vector(5, tmp) + + iterations, residual = m.conjugate_gradient(A, xv, bv, rv, uv, tv, 2000, 0) + print("iterations=", iterations, "residual=", residual) + print("x_result=", xv.to_numpy()) + except Exception as e: + print("Backend test failed:", e, file=sys.stderr) + return 3 + return 0 + + +def main(argv=None): + parser = argparse.ArgumentParser(description="Run pyalp backend smoke test") + parser.add_argument("backend", help="backend module name (e.g. pyalp_ref)") + args = parser.parse_args(argv) + return run_smoke(args.backend) + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/tests/python/numpy_array_print.py b/tests/python/numpy_array_print.py new file mode 100644 index 000000000..283449ced --- /dev/null +++ b/tests/python/numpy_array_print.py @@ -0,0 +1,5 @@ +import pyalp_ref as pyalp +import numpy as np + +arr = np.array([1.1, 2.2, 3.3]) +pyalp.print_my_numpy_array(arr) diff --git a/tests/python/test.py b/tests/python/test.py new file mode 100644 index 000000000..b6628612f --- /dev/null +++ b/tests/python/test.py @@ -0,0 +1,65 @@ +""" +Test script for the pyalp_ref (GraphBLAS-like) Python module. + +This script sets up a small sparse linear system and solves it using the +conjugate gradient method implemented in pyalp_ref. It verifies the solution +against an expected result using numpy's allclose. + +Steps performed: +- Defines a 5x5 sparse matrix in coordinate (COO) format. +- Initializes vectors for the right-hand side (b), initial guess (x), and workspace. +- Constructs pyalp_ref Matrix and Vector objects. +- Runs the conjugate gradient solver. +- Prints the number of iterations, residual, and resulting solution vector. +- Asserts that the computed solution is close to the expected values. + +Usage: + python test.py + +Dependencies: + - numpy + - pyalp_ref (should be available in the Python path) +""" + +import pyalp.pyalp_ref as pyalp +import numpy as np + +# Gnerate a small sparse linear system using numpy arrays +N, M = 5 , 5 +idata = np.array([ 0, 1, 2, 3, 3, 4, 2, 3, 3, 4, 1, 4, 1, 4, 4 ],dtype=np.int32) +jdata = np.array([ 0, 1, 2, 3, 2, 2, 1, 4, 1, 1, 0, 3, 0, 3, 4 ],dtype=np.int32) +vdata = np.array([ 1, 1, 1, 1, .5, 2, 1, 4, 4.4, 1, 0, 3.5, 0, 3, 1 ], dtype=np.float64) +b = np.array([ 1., 1., 1., 1., 1. ], dtype=np.float64) +x = np.array([ 1, 1., 0., 0.3, -1. ], dtype=np.float64) +r = np.zeros(5) +u = np.zeros(5) +tmp = np.zeros(5) + +A=np.zeros((M,N)) +for i,j,v in zip(idata,jdata,vdata): + A[i,j]=v + +pyalp.print_my_numpy_array(b) + +maxiterations = 2000 +verbose = 1 + +######################### +# Create the pyalp_ref Matrix and Vector objects +alpmatrixA = pyalp.Matrix(5,5,idata,jdata,vdata) +alpvectorx = pyalp.Vector(5,x) +alpvectorb = pyalp.Vector(5,b) +alpvectorr = pyalp.Vector(5,r) +alpvectoru = pyalp.Vector(5,u) +alpvectortmp = pyalp.Vector(5,tmp) + +#solve the linear system using conjugate gradient method in pyalp_ref +iterations,residual = pyalp.conjugate_gradient( alpmatrixA, alpvectorx, alpvectorb, alpvectorr, alpvectoru, alpvectortmp, maxiterations, verbose ) +print(" iterations = ", iterations ) +print(" residual = ", residual ) + +# Convert the result vector to a numpy array and print it +x_result=alpvectorx.to_numpy() +print(x_result) +# Check if the result is close to the expected solution +assert(np.allclose(x_result,np.array([ 1., 1., 0., 0.13598679, -0.88396565]))) diff --git a/tests/python/test_backends.py b/tests/python/test_backends.py new file mode 100644 index 000000000..bb0ba2658 --- /dev/null +++ b/tests/python/test_backends.py @@ -0,0 +1,67 @@ +#!/usr/bin/env python3 +""" +Test script for the pyalp backend (example uses the OpenMP backend name +`pyalp_omp`, but you can use `pyalp_ref` or another available backend). + +Usage: + python test_cg.py + +Dependencies: + - numpy + - pyalp (installed and providing a backend such as pyalp_omp) +""" + +import numpy as np + + + +for backendname in ['pyalp_ref','pyalp_omp','pyalp_nonblocking']: + + import pyalp + # Choose the backend module (change name if you want a different backend) + pyalp = pyalp.get_backend(backendname) # or 'pyalp_ref', 'pyalp_nonblocking' + + # Generate a small sparse linear system using numpy arrays + N, M = 5, 5 + idata = np.array([0, 1, 2, 3, 3, 4, 2, 3, 3, 4, 1, 4, 1, 4, 4], dtype=np.int32) + jdata = np.array([0, 1, 2, 3, 2, 2, 1, 4, 1, 1, 0, 3, 0, 3, 4], dtype=np.int32) + vdata = np.array([1, 1, 1, 1, 0.5, 2, 1, 4, 4.4, 1, 0, 3.5, 0, 3, 1], dtype=np.float64) + b = np.array([1.0, 1.0, 1.0, 1.0, 1.0], dtype=np.float64) + x = np.array([1.0, 1.0, 0.0, 0.3, -1.0], dtype=np.float64) + r = np.zeros(5, dtype=np.float64) + u = np.zeros(5, dtype=np.float64) + tmp = np.zeros(5, dtype=np.float64) + + # Create the pyalp Matrix and Vector objects + alpmatrixA = pyalp.Matrix(5, 5, idata, jdata, vdata) + alpvectorx = pyalp.Vector(5, x) + alpvectorb = pyalp.Vector(5, b) + alpvectorr = pyalp.Vector(5, r) + alpvectoru = pyalp.Vector(5, u) + alpvectortmp = pyalp.Vector(5, tmp) + + maxiterations = 2000 + verbose = 1 + + # Solve the linear system using the conjugate gradient method in the backend + iterations, residual = pyalp.conjugate_gradient( + alpmatrixA, + alpvectorx, + alpvectorb, + alpvectorr, + alpvectoru, + alpvectortmp, + maxiterations, + verbose, + ) + print('iterations =', iterations) + print('residual =', residual) + + # Convert the result vector to a numpy array and print it + x_result = alpvectorx.to_numpy() + print('x_result =', x_result) + + # Check if the result is close to the expected solution + assert np.allclose(x_result, np.array([1.0, 1.0, 0.0, 0.13598679, -0.88396565])), 'solution mismatch' + + print("backend ", backendname, " OK") diff --git a/tools/make_wheel_from_so.py b/tools/make_wheel_from_so.py new file mode 100755 index 000000000..f3a901198 --- /dev/null +++ b/tools/make_wheel_from_so.py @@ -0,0 +1,109 @@ +#!/usr/bin/env python3 +"""Simple helper: build a wheel that packages a prebuilt .so into the pyalp package. + +Usage: make_wheel_from_so.py --out-dir + +The script will create /--.whl containing: + - pyalp/__init__.py (minimal stub) + - pyalp/ + - pyalp-.dist-info/{METADATA,WHEEL,RECORD} + +This is intentionally minimal and meant for CI-snapshots where the compiled .so +is produced by your CMake job and we only need to bundle it into a wheel. +""" + +import argparse +import re +import sys +import zipfile +import sysconfig +from pathlib import Path + +NAME = "pyalp" +VERSION = "0.0.0" + + +def infer_cp_tag_from_filename(name: str) -> str | None: + # try to find cp311/cp312 style + m = re.search(r"cp(\d{2,3})", name) + if m: + return f"cp{m.group(1)}" + # try to find cpython-311 style + m = re.search(r"cpython-(\d{3})", name) + if m: + return f"cp{m.group(1)}" + return None + + +def make_wheel(so_path: Path, out_dir: Path) -> Path: + if not so_path.exists(): + raise FileNotFoundError(f".so not found: {so_path}") + so_name = so_path.name + cp_tag = infer_cp_tag_from_filename(so_name) + if not cp_tag: + # Fallback to current interpreter if tag cannot be inferred + cp_tag = f"cp{sys.version_info.major}{sys.version_info.minor}" + py_tag = cp_tag + abi_tag = cp_tag + plat = sysconfig.get_platform().replace("-", "_").replace(".", "_") + wheel_fname = f"{NAME}-{VERSION}-{py_tag}-{abi_tag}-{plat}.whl" + out_dir.mkdir(parents=True, exist_ok=True) + wheel_path = out_dir / wheel_fname + + init_py = ( + "try:\n" + " from . import _pyalp\n" + "except Exception:\n" + " _pyalp = None\n" + "__all__ = [\"_pyalp\"]\n" + ) + + dist_info = f"{NAME}-{VERSION}.dist-info" + metadata = ( + "Metadata-Version: 2.1\n" + f"Name: {NAME}\n" + f"Version: {VERSION}\n" + "Summary: pyalp packaged wheel (prebuilt .so)\n" + ) + wheel_meta = ( + "Wheel-Version: 1.0\n" + "Generator: make_wheel_from_so.py\n" + "Root-Is-Purelib: false\n" + f"Tag: {py_tag}-{abi_tag}-{plat}\n" + ) + + with zipfile.ZipFile(wheel_path, "w", compression=zipfile.ZIP_DEFLATED) as z: + z.writestr(f"{NAME}/__init__.py", init_py) + # Normalize the extension module name to _pyalp.so so the package can import it as pyalp._pyalp + so_target_name = "_pyalp.so" + z.write(so_path, f"{NAME}/{so_target_name}") + z.writestr(f"{dist_info}/METADATA", metadata) + z.writestr(f"{dist_info}/WHEEL", wheel_meta) + # RECORD should list files; for minimal CI, leave entries empty (tools may complain but pip accepts) + z.writestr(f"{dist_info}/RECORD", "") + + return wheel_path + + +def parse_args(argv): + p = argparse.ArgumentParser(description="Make simple wheel from prebuilt .so") + p.add_argument("so", help="Path to prebuilt .so file") + p.add_argument("--out-dir", default="dist_wheel", help="Output directory") + return p.parse_args(argv) + + +def main(argv): + args = parse_args(argv) + so_path = Path(args.so) + out_dir = Path(args.out_dir) + try: + wheel = make_wheel(so_path, out_dir) + print("Wheel written to", wheel) + except Exception as e: + print("ERROR:", e, file=sys.stderr) + return 2 + return 0 + + +if __name__ == "__main__": + raise SystemExit(main(sys.argv[1:])) diff --git a/tools/smoke_test_pyalp.py b/tools/smoke_test_pyalp.py new file mode 100755 index 000000000..7cc6bf65e --- /dev/null +++ b/tools/smoke_test_pyalp.py @@ -0,0 +1,66 @@ +#!/usr/bin/env python3 +"""Smoke-test installer for the pyalp wheel. + +Usage: python tools/smoke_test_pyalp.py + +This script imports `pyalp`, checks for presence of `_pyalp` extension and +tries to call `backend_name()` if present. It exits non-zero on failure and +prints helpful tracebacks. +""" +import sys +import traceback + + +def main(): + try: + import pyalp + except Exception as e: + print("ERROR: importing pyalp failed:", e, file=sys.stderr) + traceback.print_exc() + return 2 + ext = getattr(pyalp, "_pyalp", None) + ok_ext = ext is not None + print("pyalp import OK, compiled ext loaded:", ok_ext) + if not ok_ext: + # Try to import the extension directly and print diagnostics + try: + import importlib + ext = importlib.import_module("pyalp._pyalp") + print("Direct import succeeded after fallback.") + ok_ext = True + except Exception as e: + print("Extension import failed:", e, file=sys.stderr) + traceback.print_exc() + try: + import importlib.util + import pathlib + spec = importlib.util.find_spec("pyalp") + pkgdir = None + if spec and spec.submodule_search_locations: + pkgdir = pathlib.Path(list(spec.submodule_search_locations)[0]) + else: + pkgdir = pathlib.Path(__import__("pyalp").__file__).parent # type: ignore[attr-defined] + print("pyalp dir:", pkgdir) + print(".so files:") + for p in pkgdir.iterdir(): + if p.suffix == ".so": + print(" -", p) + except Exception: + pass + if ok_ext: + try: + if ext is not None and hasattr(ext, "backend_name"): + name = ext.backend_name() + print("backend_name:", name) + else: + print("Extension module loaded but missing backend_name()", file=sys.stderr) + return 3 + except Exception as e: + print("calling backend failed:", e, file=sys.stderr) + traceback.print_exc() + return 3 + return 0 + + +if __name__ == "__main__": + sys.exit(main())