diff --git a/.github/workflows/build-test-and-sonar.yml b/.github/workflows/build-test-release.yml similarity index 65% rename from .github/workflows/build-test-and-sonar.yml rename to .github/workflows/build-test-release.yml index aa06862..30e4258 100644 --- a/.github/workflows/build-test-and-sonar.yml +++ b/.github/workflows/build-test-release.yml @@ -3,16 +3,9 @@ # SPDX-License-Identifier: MPL-2.0 -name: Build, Test, Sonar and Publish +name: Build, Test and Release on: - push: - branches: - - main - # run pipeline on pull request - pull_request: - # run pipeline on merge queue - merge_group: # run pipeline from another workflow workflow_call: inputs: @@ -45,15 +38,20 @@ jobs: - name: Checkout source code uses: actions/checkout@v4 - - name: Setup Python 3.11 + - name: Setup Python 3.13 uses: actions/setup-python@v5 with: - python-version: "3.11" + python-version: "3.13" + + - name: Set PyPI version + uses: PowerGridModel/pgm-version-bump@main + with: + token: ${{ secrets.GITHUB_TOKEN }} - name: Build run: | - pip install requests build - python set_pypi_version.py + cat PYPI_VERSION + pip install build python -m build --outdir wheelhouse . - name: Save version @@ -66,39 +64,6 @@ jobs: name: power-grid-model-ds path: wheelhouse/ - sonar-cloud: - permissions: - contents: write - runs-on: ubuntu-latest - steps: - - - name: Checkout source code - uses: actions/checkout@v4 - with: - fetch-depth: 0 # Shallow clones should be disabled for a better relevancy of analysis - - - name: Setup Python 3.11 - uses: actions/setup-python@v5 - with: - python-version: "3.11" - - - name: Install in develop mode - run: | - pip install -e .[dev] - - - name: Test and Coverage - run: | - coverage run -m pytest - coverage xml - coverage report --fail-under=80 - - - name: SonarCloud Scan - if: ${{ (github.event_name == 'push') || (github.event.pull_request.head.repo.owner.login == 'PowerGridModel') }} - uses: SonarSource/sonarqube-scan-action@v5 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} - tests: needs: build-python strategy: @@ -129,27 +94,22 @@ jobs: - name: Unit test and coverage run: pytest --verbose - publish: + github-release: needs: - build-python - tests - - sonar-cloud permissions: contents: write - env: - TWINE_USERNAME: ${{ secrets.PYPI_USER }} - TWINE_PASSWORD: ${{ secrets.PYPI_PASS }} runs-on: ubuntu-latest steps: + - name: Setup Python 3.13 + uses: actions/setup-python@v5 + with: + python-version: "3.13" - name: Checkout source code uses: actions/checkout@v4 # needed by 'Prevent automatic major/minor release' - - name: Setup Python 3.11 - uses: actions/setup-python@v5 - with: - python-version: "3.11" - - name: Load built wheel file uses: actions/download-artifact@v4 with: @@ -174,20 +134,20 @@ jobs: env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: Upload wheels - if: (github.event_name == 'push') || ((github.event_name == 'workflow_dispatch') && (github.event.inputs.create_release == 'true')) - run: | - pip install twine - echo "Publish to PyPI..." - twine upload --verbose wheelhouse/* + - name: Get tag + id: tag + run: echo "tag=v${{ needs.build-python.outputs.version }}" >> $GITHUB_OUTPUT + + - name: Display tag + run: echo "${{ steps.tag.outputs.tag }}" - name: Release - if: (github.event_name == 'push') || ((github.event_name == 'workflow_dispatch') && (github.event.inputs.create_release == 'true')) + if: (inputs.create_release) uses: softprops/action-gh-release@v2 with: files: | ./wheelhouse/* - tag_name: v${{ needs.build-python.outputs.version }} + tag_name: ${{ steps.tag.outputs.tag }} prerelease: ${{github.ref != 'refs/heads/main'}} generate_release_notes: true target_commitish: ${{ github.sha }} diff --git a/.github/workflows/check-code-quality.yml b/.github/workflows/check-code-quality.yml index 38c8f20..8ae23f4 100644 --- a/.github/workflows/check-code-quality.yml +++ b/.github/workflows/check-code-quality.yml @@ -6,13 +6,6 @@ name: Check Code Quality on: - push: - branches: - - main - # run pipeline on pull request - pull_request: - # run pipeline on merge queue - merge_group: # run pipeline from another workflow workflow_call: diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..4f46c26 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,100 @@ +# SPDX-FileCopyrightText: Contributors to the Power Grid Model project +# +# SPDX-License-Identifier: MPL-2.0 + + +name: CI Build + +on: + push: + branches: + - main # run pipeline on pull request + pull_request: + merge_group: # run pipeline on merge queue + workflow_dispatch: # run this workflow manually from the Actions tab + inputs: + create_release: + type: boolean + description: Create a (pre-)release when CI passes + default: false + required: true + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }}-ci-build + cancel-in-progress: true + +jobs: + ci-started: + runs-on: ubuntu-latest + steps: + - run: echo "ci started" + + build-test-release: + name: build-test-release + uses: "./.github/workflows/build-test-release.yml" + permissions: + contents: write + with: + create_release: ${{ (github.event_name == 'workflow_dispatch' && inputs.create_release) || (github.event_name == 'push') }} + + check-code-quality: + uses: "./.github/workflows/check-code-quality.yml" + + reuse-compliance: + uses: "./.github/workflows/reuse-compliance.yml" + + ci-passed: + runs-on: ubuntu-latest + needs: [ci-started, build-test-release, check-code-quality, reuse-compliance] + if: always() + + steps: + # this explicit check is needed cfr. https://github.com/orgs/community/discussions/75568 + - name: "Check whether all jobs passed" + run: echo '${{ toJSON(needs) }}' | jq -e 'to_entries | all(.value.result == "success")' + - run: echo "ci passed" + + publish: + name: Publish to PyPI + runs-on: ubuntu-latest + permissions: + contents: write + id-token: write # Required for Trusted Publishing + needs: build-test-release + if: (github.event_name == 'workflow_dispatch' && inputs.create_release) || github.event_name == 'push' + + steps: + - name: Download assets from GitHub release + uses: robinraju/release-downloader@v1 + with: + repository: ${{ github.repository }} + # download the latest release + latest: true + # don't download pre-releases + preRelease: false + fileName: "*" + # don't download GitHub-generated source tar and zip files + tarBall: false + zipBall: false + # create a directory to store the downloaded assets + out-file-path: assets-to-publish + # don't extract downloaded files + extract: false + + - name: List downloaded assets + run: ls -la assets-to-publish + + - name: Upload assets to PyPI + uses: pypa/gh-action-pypi-publish@release/v1 + with: + # To test, use the TestPyPI: + repository-url: https://test.pypi.org/legacy/ + # You must also create an account and project on TestPyPI, + # as well as set the trusted-publisher in the project settings: + # https://docs.pypi.org/trusted-publishers/adding-a-publisher/ + # To publish to the official PyPI repository, just keep + # repository-url commented out. + packages-dir: assets-to-publish + skip-existing: true + print-hash: true + verbose: true diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index 8f2bfcf..955162a 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -15,8 +15,8 @@ concurrency: cancel-in-progress: true jobs: - main: - uses: "./.github/workflows/build-test-and-sonar.yml" + build-test-release: + uses: "./.github/workflows/build-test-release.yml" permissions: contents: write with: diff --git a/.github/workflows/reuse-compliance.yml b/.github/workflows/reuse-compliance.yml index dbed588..5c342b7 100644 --- a/.github/workflows/reuse-compliance.yml +++ b/.github/workflows/reuse-compliance.yml @@ -6,13 +6,6 @@ name: REUSE Compliance Check on: - push: - branches: - - main - # run pipeline on pull request - pull_request: - # run pipeline on merge queue - merge_group: # run pipeline from another workflow workflow_call: # run this workflow manually from the Actions tab diff --git a/.github/workflows/sonar.yml b/.github/workflows/sonar.yml new file mode 100644 index 0000000..4113997 --- /dev/null +++ b/.github/workflows/sonar.yml @@ -0,0 +1,53 @@ +# SPDX-FileCopyrightText: Contributors to the Power Grid Model project +# +# SPDX-License-Identifier: MPL-2.0 + +name: Sonar Cloud + +on: + # run pipeline on push event of main branch + push: + branches: + - main + # run pipeline on pull request + pull_request: + # run pipeline on merge queue + merge_group: + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }}-sonar + cancel-in-progress: true + +jobs: + sonar-cloud: + permissions: + contents: write + runs-on: ubuntu-latest + steps: + + - name: Checkout source code + uses: actions/checkout@v4 + with: + fetch-depth: 0 # Shallow clones should be disabled for a better relevancy of analysis + + - name: Setup Python 3.11 + uses: actions/setup-python@v5 + with: + python-version: "3.11" + + - name: Install in develop mode + run: | + pip install -e .[dev] + + - name: Test and Coverage + run: | + coverage run -m pytest + coverage xml + coverage report --fail-under=80 + + - name: SonarCloud Scan + if: ${{ (github.event_name == 'push') || (github.event.pull_request.head.repo.owner.login == 'PowerGridModel') }} + uses: SonarSource/sonarqube-scan-action@v5 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} diff --git a/README.md b/README.md index a415ba9..6c43d37 100644 --- a/README.md +++ b/README.md @@ -9,9 +9,7 @@ SPDX-License-Identifier: MPL-2.0 [![Downloads](https://static.pepy.tech/badge/power-grid-model-ds)](https://pepy.tech/project/power-grid-model-ds) [![Downloads](https://static.pepy.tech/badge/power-grid-model-ds/month)](https://pepy.tech/project/power-grid-model-ds) -[![Build and Test Python](https://github.com/PowerGridModel/power-grid-model-ds/actions/workflows/build-test-and-sonar.yml/badge.svg)](https://github.com/PowerGridModel/power-grid-model-ds/actions/workflows/build-test-and-sonar.yml) -[![Check Code Quality](https://github.com/PowerGridModel/power-grid-model-ds/actions/workflows/check-code-quality.yml/badge.svg)](https://github.com/PowerGridModel/power-grid-model-ds/actions/workflows/check-code-quality.yml) -[![REUSE Compliance Check](https://github.com/PowerGridModel/power-grid-model-ds/actions/workflows/reuse-compliance.yml/badge.svg)](https://github.com/PowerGridModel/power-grid-model-ds/actions/workflows/reuse-compliance.yml) +[![CI Build](https://github.com/PowerGridModel/power-grid-model-ds/actions/workflows/ci.yml/badge.svg)](https://github.com/PowerGridModel/power-grid-model-ds/actions/workflows/ci.yml) [![docs](https://readthedocs.org/projects/power-grid-model-ds/badge/)](https://power-grid-model-ds.readthedocs.io/en/stable/) [![Quality Gate Status](https://sonarcloud.io/api/project_badges/measure?project=PowerGridModel_power-grid-model-ds&metric=alert_status)](https://sonarcloud.io/summary/new_code?id=PowerGridModel_power-grid-model-ds) @@ -25,7 +23,7 @@ SPDX-License-Identifier: MPL-2.0 [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.14825565.svg)](https://zenodo.org/record/14825565) -[![](https://github.com/PowerGridModel/.github/blob/main/artwork/svg/color.svg)](#) +[![Power Grid Model logo](https://github.com/PowerGridModel/.github/blob/main/artwork/svg/color.svg)](#) # Power Grid Model Data Science (DS) diff --git a/set_pypi_version.py b/set_pypi_version.py deleted file mode 100644 index 234fda7..0000000 --- a/set_pypi_version.py +++ /dev/null @@ -1,75 +0,0 @@ -# SPDX-FileCopyrightText: Contributors to the Power Grid Model project -# -# SPDX-License-Identifier: MPL-2.0 - -# script to set version dynamically -# read VERSION and PyPI, set PYPI_VERSION - - -import os -import re -from pathlib import Path - -import requests - - -def set_version(pkg_dir: Path): - with open(pkg_dir / "VERSION") as f: - version = f.read().strip().strip("\n") - major, minor = (int(x) for x in version.split(".")) - latest_major, latest_minor, latest_patch = get_pypi_latest() - # get version - version = get_new_version(major, minor, latest_major, latest_minor, latest_patch) - # mutate version in GitHub Actions - if ("GITHUB_SHA" in os.environ) and ("GITHUB_REF" in os.environ) and ("GITHUB_RUN_NUMBER" in os.environ): - sha = os.environ["GITHUB_SHA"] - ref = os.environ["GITHUB_REF"] - build_number = os.environ["GITHUB_RUN_NUMBER"] - # short hash number in numeric - short_hash = f"{int(f'0x{sha[0:6]}', base=16):08}" - - if "main" in ref: - # main branch - # major.minor.patch - # do nothing - pass - else: - # feature branch - # major.minor.patch a 1 build_number short_hash - version += f"a1{build_number}{short_hash}" - with open(pkg_dir / "PYPI_VERSION", "w") as f: - f.write(version) - - -def get_pypi_latest(): - response = requests.get("https://pypi.org/pypi/power-grid-model-ds/json") - if response.status_code == 404: - return 0, 0, 0 - version = str(response.json()["info"]["version"]) - - version_pattern = re.compile(r"^\d+\.\d+\.\d+") - match = version_pattern.match(version) - if not match: - raise ValueError(f"Invalid version format: {version}") - return (int(x) for x in match.group(0).split(".")) - - -def get_new_version(major, minor, latest_major, latest_minor, latest_patch): - if (major > latest_major) or ((major == latest_major) and minor > latest_minor): - # brand-new version with patch zero - return f"{major}.{minor}.0" - - if major == latest_major and minor == latest_minor: - # current version, increment path - return f"{major}.{minor}.{latest_patch + 1}" - - # does not allow building older version - raise ValueError( - "Invalid version number!\n" - f"latest version: {latest_major}.{latest_minor}.{latest_patch}\n" - f"to be built version: {major}.{minor}\n" - ) - - -if __name__ == "__main__": - set_version(Path(__file__).parent) diff --git a/src/power_grid_model_ds/_core/model/arrays/base/_build.py b/src/power_grid_model_ds/_core/model/arrays/base/_build.py index 6b0d757..7752d21 100644 --- a/src/power_grid_model_ds/_core/model/arrays/base/_build.py +++ b/src/power_grid_model_ds/_core/model/arrays/base/_build.py @@ -25,13 +25,13 @@ def build_array(*args: tuple[Any], dtype: np.dtype, defaults: dict[str, np.gener return array if isinstance(parsed_input, np.ndarray) and parsed_input.dtype.names: - _check_missing_columns(array.dtype.names, defaults, set(parsed_input.dtype.names)) + _check_missing_columns(array.dtype.names or (), defaults, set(parsed_input.dtype.names)) return _parse_structured_array(parsed_input, array) if isinstance(parsed_input, np.ndarray): # Note: defaults are not supported when working with unstructured arrays return _parse_array(parsed_input, array.dtype) - _check_missing_columns(array.dtype.names, defaults, set(parsed_input.keys())) + _check_missing_columns(array.dtype.names or (), defaults, set(parsed_input.keys())) _fill_with_kwargs(array, parsed_input) return array @@ -54,7 +54,7 @@ def _parse_input(*args: Any, dtype: np.dtype, **kwargs): return {}, 0 -def _check_missing_columns(array_columns: tuple, defaults: dict[str, np.generic], provided_columns: set[str]): +def _check_missing_columns(array_columns: tuple[str, ...], defaults: dict[str, np.generic], provided_columns: set[str]): required_columns = set(array_columns) - set(defaults.keys()) if missing_columns := required_columns - provided_columns: raise ValueError(f"Missing required columns: {missing_columns}") @@ -64,7 +64,8 @@ def _fill_defaults(array: np.ndarray, defaults: dict[str, np.generic]): """Fills the defaults into the array.""" for column, default in defaults.items(): if default is empty: - array[column] = empty(array.dtype[column]) # type: ignore[call-overload] + column_type: type = array.dtype[column] + array[column] = empty(column_type) # type: ignore[call-overload] else: array[column] = default # type: ignore[call-overload] @@ -87,8 +88,8 @@ def _parse_structured_array(from_array: np.ndarray, to_array: np.ndarray) -> np. def _determine_column_overlap(from_array: np.ndarray, to_array: np.ndarray) -> tuple[list[str], list[str]]: """Returns two lists: columns present in both arrays and the columns that are only present in from_array""" - from_columns = set(from_array.dtype.names) - to_columns = set(to_array.dtype.names) + from_columns = set(from_array.dtype.names or ()) + to_columns = set(to_array.dtype.names or ()) return list(from_columns & to_columns), list(from_columns - to_columns) diff --git a/src/power_grid_model_ds/_core/model/arrays/base/_filters.py b/src/power_grid_model_ds/_core/model/arrays/base/_filters.py index c21ab35..5d1e719 100644 --- a/src/power_grid_model_ds/_core/model/arrays/base/_filters.py +++ b/src/power_grid_model_ds/_core/model/arrays/base/_filters.py @@ -20,7 +20,7 @@ def get_filter_mask( """Returns a mask that matches the input parameters.""" parsed_kwargs = _parse(args, kwargs) - if invalid_kwargs := set(parsed_kwargs.keys()) - set(array.dtype.names): + if invalid_kwargs := set(parsed_kwargs.keys()) - set(array.dtype.names or ()): raise ValueError(f"Invalid kwargs: {invalid_kwargs}") filter_mask = _initialize_filter_mask(mode_, array.size) diff --git a/src/power_grid_model_ds/_core/model/arrays/base/_modify.py b/src/power_grid_model_ds/_core/model/arrays/base/_modify.py index 43918fc..8568986 100644 --- a/src/power_grid_model_ds/_core/model/arrays/base/_modify.py +++ b/src/power_grid_model_ds/_core/model/arrays/base/_modify.py @@ -12,7 +12,7 @@ def re_order(array: np.ndarray, new_order: ArrayLike, column: str = "id") -> np. """Re-order an id-array by the id column so that it follows a new_order. Expects the new_order input to contain the same values as self.id """ - if column not in array.dtype.names: + if column not in (array.dtype.names or ()): raise ValueError(f"Cannot re-order array: column {column} does not exist.") if not np.array_equal(np.sort(array[column]), np.sort(new_order)): raise ValueError(f"Cannot re-order array: mismatch between new_order and values in '{column}'-column.") @@ -50,7 +50,7 @@ def update_by_id(array: np.ndarray, ids: ArrayLike, allow_missing: bool, **kwarg def check_ids(array: np.ndarray, return_duplicates: bool = False) -> NDArray | None: """Check for duplicate ids within the array""" - if "id" not in array.dtype.names: + if "id" not in (array.dtype.names or ()): raise AttributeError("Array has no 'id' column.") unique, counts = np.unique(array["id"], return_counts=True)