diff --git a/.ci/build_wheel.py b/.ci/build_wheel.py new file mode 100644 index 00000000000..79801ad4f47 --- /dev/null +++ b/.ci/build_wheel.py @@ -0,0 +1,94 @@ +# This script generates the different versions of the ansys-dpf-core wheels based on a given input. +# Input can be one of ["any", "win", "manylinux1", "manylinux_2_17"] + +import argparse +import pathlib +import subprocess +import os +import sys +import shutil +import tempfile + + +supported_platforms = { + "any": "any", + "win": "win_amd64", + "manylinux1": "manylinux1_x86_64", + "manylinux_2_17": "manylinux_2_17_x86_64" +} + +argParser = argparse.ArgumentParser() +argParser.add_argument("-p", "--platform", help="platform") +argParser.add_argument("-w", "--wheelhouse", help="platform", action='store_true') + +args = argParser.parse_args() + +if args.platform not in supported_platforms: + raise ValueError(f"Platform {args.platform} is not supported. " + f"Supported platforms are: {list(supported_platforms.keys())}") +else: + requested_platform = supported_platforms[args.platform] +print(requested_platform) + +# Move binaries out of the source depending on platform requested +# any: move all binaries out before building +# win: move .so binaries out before building +# lin: move .dll binaries out before building +with tempfile.TemporaryDirectory() as tmpdirname: + print('Created temporary directory: ', tmpdirname) + + # Create the temporary build-opts.cfg + build_opts_path = os.path.join(tmpdirname, "build-opts.cfg") + with open(build_opts_path, "w") as build_opts_file: + build_opts_file.write(f"[bdist_wheel]\nplat-name={requested_platform}") + os.environ["DIST_EXTRA_CONFIG"] = build_opts_path + + # Move the binaries + gatebin_folder_path = os.path.join( + os.path.curdir, + os.path.join("src", "ansys", "dpf", "gatebin") + ) + binaries_to_move = [] + moved = [] + if "win" in requested_platform or "any" == requested_platform: + # Move linux binaries + binaries_to_move.extend(["libAns.Dpf.GrpcClient.so", "libDPFClientAPI.so"]) + if "linux" in requested_platform or "any" == requested_platform: + # Move windows binaries + binaries_to_move.extend(["Ans.Dpf.GrpcClient.dll", "DPFClientAPI.dll"]) + if "any" == requested_platform: + binaries_to_move.extend(["_version.py"]) + + for binary_name in binaries_to_move: + src = os.path.join(gatebin_folder_path, binary_name) + dst = os.path.join(tmpdirname, binary_name) + print(f"Moving {src} to {dst}") + shutil.move(src=src, dst=dst) + moved.append([dst, src]) + + if "any" == requested_platform: + # Also remove the gatebin folder + os.rmdir(gatebin_folder_path) + + # Call the build + if not args.wheelhouse: + cmd = [sys.executable, "-m", "build", "--wheel"] + else: + cmd = [sys.executable, "-m", "pip", "wheel", "-w", "dist", "."] + try: + subprocess.run(cmd, capture_output=False, text=True) + print("Done building the wheel.") + except Exception as e: + print(f"Build failed with error: {e}") + + if "any" == requested_platform: + # Recreate the gatebin folder + os.mkdir(gatebin_folder_path) + + # Move binaries back + for move_back in moved: + print(f"Moving back {move_back[0]} to {move_back[1]}") + shutil.move(src=move_back[0], dst=move_back[1]) + print("Binaries moved back.") + + print(f"Done building {requested_platform} wheel for ansys-dpf-core!") diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 575111be6f0..b46e7f34226 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -50,6 +50,32 @@ jobs: - name: "Run pre-commit" run: pre-commit run --all-files --show-diff-on-failure + build_linux1: + name: "Build linux1 wheel" + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + + - name: "Install requirements" + run: pip install -r requirements/requirements_build.txt + + - name: "Build the manylinux1 wheel" + shell: bash + id: wheel + run: | + python .ci/build_wheel.py -p manylinux1 + cd dist + export name=`ls ansys_dpf_core*.whl` + echo ${name} + echo "wheel_name=${name[0]}" >> $GITHUB_OUTPUT + cd .. + + - name: "Upload wheel any as artifact" + uses: actions/upload-artifact@v3 + with: + name: ${{ steps.wheel.outputs.wheel_name }} + path: dist/${{ steps.wheel.outputs.wheel_name }} + tests: uses: ./.github/workflows/tests.yml with: @@ -60,6 +86,17 @@ jobs: standalone_suffix: ${{ github.event.inputs.standalone_branch_suffix || '' }} secrets: inherit + tests_any: + uses: ./.github/workflows/tests.yml + with: + ANSYS_VERSION: "241" + python_versions: '["3.8"]' + wheel: true + wheelhouse: false + standalone_suffix: ${{ github.event.inputs.standalone_branch_suffix || '' }} + test_any: true + secrets: inherit + docker_tests: name: "Build and Test on Docker" uses: ./.github/workflows/test_docker.yml diff --git a/.github/workflows/ci_release.yml b/.github/workflows/ci_release.yml index 18ce9a69ea1..5f67710831a 100644 --- a/.github/workflows/ci_release.yml +++ b/.github/workflows/ci_release.yml @@ -54,6 +54,32 @@ jobs: - name: "Run pre-commit" run: pre-commit run --all-files --show-diff-on-failure + build_linux1: + name: "Build linux1 wheel" + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + + - name: "Install requirements" + run: pip install -r requirements/requirements_build.txt + + - name: "Build the manylinux1 wheel" + shell: bash + id: wheel + run: | + python .ci/build_wheel.py -p manylinux1 + cd dist + export name=`ls ansys_dpf_core*.whl` + echo ${name} + echo "wheel_name=${name[0]}" >> $GITHUB_OUTPUT + cd .. + + - name: "Upload wheel any as artifact" + uses: actions/upload-artifact@v3 + with: + name: ${{ steps.wheel.outputs.wheel_name }} + path: dist/${{ steps.wheel.outputs.wheel_name }} + tests: uses: ./.github/workflows/tests.yml with: @@ -64,6 +90,17 @@ jobs: standalone_suffix: ${{ github.event.inputs.standalone_branch_suffix || '.pre0' }} secrets: inherit + tests_any: + uses: ./.github/workflows/tests.yml + with: + ANSYS_VERSION: "241" + python_versions: '["3.8", "3.9", "3.10"]' + wheel: true + wheelhouse: false + standalone_suffix: ${{ github.event.inputs.standalone_branch_suffix || '.pre0' }} + test_any: true + secrets: inherit + docs: uses: ./.github/workflows/docs.yml with: diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 81a5e2da0ea..7af112e4144 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -66,24 +66,53 @@ jobs: echo "ANSYS_DPF_ACCEPT_LA=Y" >> $GITHUB_ENV echo "ANSYSLMD_LICENSE_FILE=1055@${{ secrets.LICENSE_SERVER }}" >> $GITHUB_ENV - - name: Setup Python - uses: actions/setup-python@v4.6.0 + - name: "Setup Python" + uses: actions/setup-python@v4.6.1 with: python-version: ${{ inputs.python_version }} - - name: "Build Package" - id: build-package - uses: ansys/pydpf-actions/build_package@v2.3 + - name: "Install requirements" + run: pip install -r requirements/requirements_build.txt + + - name: "Build the wheel" + shell: bash + run: | + if [ ${{ matrix.os }} == "ubuntu-latest" ]; then + export platform="manylinux_2_17" + else + export platform="win" + fi + python .ci/build_wheel.py -p $platform -w + + - name: "Expose the wheel" + shell: bash + id: wheel + working-directory: dist + run: | + export name=`ls ansys_dpf_core*.whl` + echo ${name} + echo "wheel_name=${name[0]}" >> $GITHUB_OUTPUT + + - name: "Install package wheel" + shell: bash + run: | + pip install dist/${{ steps.wheel.outputs.wheel_name }}[plotting] + + - name: "Install DPF" + id: set-server-path + uses: ansys/pydpf-actions/install-dpf-server@v2.3 with: - python-version: ${{ inputs.python_version }} - ANSYS_VERSION: ${{inputs.ANSYS_VERSION}} - PACKAGE_NAME: ${{env.PACKAGE_NAME}} - MODULE: ${{env.MODULE}} dpf-standalone-TOKEN: ${{secrets.DPF_PIPELINE}} - install_extras: plotting - wheel: false - wheelhouse: false standalone_suffix: ${{ inputs.standalone_suffix }} + ANSYS_VERSION : ${{inputs.ANSYS_VERSION}} + + - name: "Check licences of packages" + uses: ansys/pydpf-actions/check-licenses@v2.3 + + - name: "Test import" + shell: bash + working-directory: tests + run: python -c "from ansys.dpf import core" - name: "Setup headless display" uses: pyvista/setup-headless-display-action@v1 diff --git a/.github/workflows/examples.yml b/.github/workflows/examples.yml index 09ef1825a0f..a0dc1df79ed 100644 --- a/.github/workflows/examples.yml +++ b/.github/workflows/examples.yml @@ -71,23 +71,53 @@ jobs: echo "ANSYS_DPF_ACCEPT_LA=Y" >> $GITHUB_ENV echo "ANSYSLMD_LICENSE_FILE=1055@${{ secrets.LICENSE_SERVER }}" >> $GITHUB_ENV - - name: Setup Python + - name: "Setup Python" uses: actions/setup-python@v4.6.0 with: python-version: ${{ matrix.python-version }} - - name: "Build Package" - uses: ansys/pydpf-actions/build_package@v2.3 + - name: "Install requirements" + run: pip install -r requirements/requirements_build.txt + + - name: "Build the wheel" + shell: bash + run: | + if [ ${{ matrix.os }} == "ubuntu-latest" ]; then + export platform="manylinux_2_17" + else + export platform="win" + fi + python .ci/build_wheel.py -p $platform -w + + - name: "Expose the wheel" + shell: bash + id: wheel + working-directory: dist + run: | + export name=`ls ansys_dpf_core*.whl` + echo ${name} + echo "wheel_name=${name[0]}" >> $GITHUB_OUTPUT + + - name: "Install package wheel" + shell: bash + run: | + pip install dist/${{ steps.wheel.outputs.wheel_name }}[plotting] + + - name: "Install DPF" + id: set-server-path + uses: ansys/pydpf-actions/install-dpf-server@v2.3 with: - python-version: ${{ matrix.python-version }} - ANSYS_VERSION: ${{inputs.ANSYS_VERSION}} - PACKAGE_NAME: ${{ env.PACKAGE_NAME }} - MODULE: ${{ env.MODULE }} dpf-standalone-TOKEN: ${{secrets.DPF_PIPELINE}} - install_extras: plotting - wheelhouse: false - wheel: false standalone_suffix: ${{ inputs.standalone_suffix }} + ANSYS_VERSION : ${{inputs.ANSYS_VERSION}} + + - name: "Check licences of packages" + uses: ansys/pydpf-actions/check-licenses@v2.3 + + - name: "Test import" + shell: bash + working-directory: tests + run: python -c "from ansys.dpf import core" - name: "Prepare Testing Environment" uses: ansys/pydpf-actions/prepare_tests@v2.3 diff --git a/.github/workflows/examples_docker.yml b/.github/workflows/examples_docker.yml index ee409e11def..b20ac52b057 100644 --- a/.github/workflows/examples_docker.yml +++ b/.github/workflows/examples_docker.yml @@ -67,23 +67,53 @@ jobs: steps: - uses: actions/checkout@v3 - - name: Setup Python - uses: actions/setup-python@v4.6.0 + - name: "Setup Python" + uses: actions/setup-python@v4.6.1 with: python-version: ${{ matrix.python-version }} - - name: "Build Docker Package" - uses: ansys/pydpf-actions/install-docker-server-and-python-packages@v2.3 + - name: "Install requirements" + run: pip install -r requirements/requirements_build.txt + + - name: "Build the wheel" + shell: bash + run: | + if [ ${{ matrix.os }} == "ubuntu-latest" ]; then + export platform="manylinux_2_17" + else + export platform="win" + fi + python .ci/build_wheel.py -p $platform -w + + - name: "Expose the wheel" + shell: bash + id: wheel + working-directory: dist + run: | + export name=`ls ansys_dpf_core*.whl` + echo ${name} + echo "wheel_name=${name[0]}" >> $GITHUB_OUTPUT + + - name: "Install package wheel" + shell: bash + run: | + pip install dist/${{ steps.wheel.outputs.wheel_name }}[plotting] + + - name: "Install DPF" + id: set-server-path + uses: ansys/pydpf-actions/install-dpf-docker@v2.3 with: - python-version: ${{ matrix.python-version }} - ANSYS_VERSION: ${{inputs.ANSYS_VERSION}} - PACKAGE_NAME: ${{env.PACKAGE_NAME}} - MODULE: ${{env.MODULE}} dpf-standalone-TOKEN: ${{secrets.DPF_PIPELINE}} - install_extras: plotting - wheel: false - wheelhouse: false standalone_suffix: ${{ inputs.standalone_suffix }} + ANSYS_VERSION : ${{inputs.ANSYS_VERSION}} + + - name: "Check licences of packages" + uses: ansys/pydpf-actions/check-licenses@v2.3 + + - name: "Test import" + shell: bash + working-directory: tests + run: python -c "from ansys.dpf import core" - name: "Prepare Testing Environment" uses: ansys/pydpf-actions/prepare_tests@v2.3 diff --git a/.github/workflows/gate.yml b/.github/workflows/gate.yml deleted file mode 100644 index 809a406dc37..00000000000 --- a/.github/workflows/gate.yml +++ /dev/null @@ -1,155 +0,0 @@ -name: gate - -on: -# Can be called by the CI - workflow_call: - inputs: - python_versions: - required: false - type: string - default: '["3.8"]' - ANSYS_VERSION: - required: false - type: string - default: "241" - standalone_suffix: - description: "Suffix of the branch on standalone" - required: false - type: string - default: '' -# Can be called manually - workflow_dispatch: - inputs: - python_versions: - description: "Python interpreters to test." - required: true - type: string - default: '["3.8"]' - ANSYS_VERSION: - description: "ANSYS version" - required: true - type: string - default: "241" - standalone_suffix: - description: "Suffix of the branch on standalone" - required: false - type: string - default: '' - -env: - PACKAGE_NAME: ansys-dpf-core - MODULE: core - -jobs: - setup: - name: "Setup" - runs-on: ubuntu-latest - outputs: - python_versions: ${{ steps.set_array.outputs.python_versions }} - steps: - - id: set_array - run: | - echo "python_versions=${{ toJSON(inputs.python_versions) }}" >> $GITHUB_OUTPUT - - Gate: - name: "Gate" - needs: setup - runs-on: ${{ matrix.os }} - strategy: - fail-fast: false - matrix: - python-version: ${{ fromJSON(needs.setup.outputs.python_versions) }} - os: ["windows-latest", "ubuntu-latest"] - - steps: - - uses: actions/checkout@v3 - - - name: "Set licensing if necessary" - if: inputs.ANSYS_VERSION > 231 - shell: bash - run: | - echo "ANSYS_DPF_ACCEPT_LA=Y" >> $GITHUB_ENV - echo "ANSYSLMD_LICENSE_FILE=1055@${{ secrets.LICENSE_SERVER }}" >> $GITHUB_ENV - - - name: Setup Python - uses: actions/setup-python@v4.6.0 - with: - python-version: ${{ matrix.python-version }} - - - name: "Build Package" - uses: ansys/pydpf-actions/build_package@v2.3 - with: - python-version: ${{ matrix.python-version }} - ANSYS_VERSION: ${{inputs.ANSYS_VERSION}} - PACKAGE_NAME: ${{env.PACKAGE_NAME}} - MODULE: ${{env.MODULE}} - dpf-standalone-TOKEN: ${{secrets.DPF_PIPELINE}} - install_extras: plotting - wheelhouse: false - wheel: false - standalone_suffix: ${{ inputs.standalone_suffix }} - - - name: "Prepare Testing Environment" - uses: ansys/pydpf-actions/prepare_tests@v2.3 - with: - DEBUG: true - - - name: "List installed packages" - shell: bash - run: pip list - - - name: "Check examples with gatebin" - shell: bash - working-directory: .ci - run: | - echo on - python run_examples.py - - - name: "Kill all servers" - uses: ansys/pydpf-actions/kill-dpf-servers@v2.3 - if: always() - - - name: "Uninstall gatebin" - shell: bash - run: | - pip uninstall -y ansys-dpf-gatebin - if: always() - - - name: "Check sanity without gatebin INPROCESS" - shell: bash - working-directory: .ci - run: | - python run_non_regression_examples.py - env: - DPF_SERVER_TYPE: INPROCESS - if: always() - - - name: "Kill all servers" - uses: ansys/pydpf-actions/kill-dpf-servers@v2.3 - if: always() - - - name: "Check sanity without gatebin GRPC" - shell: bash - working-directory: .ci - run: | - python run_non_regression_examples.py - env: - DPF_SERVER_TYPE: GRPC - if: always() - - - name: "Kill all servers" - uses: ansys/pydpf-actions/kill-dpf-servers@v2.3 - if: always() - - - name: "Check sanity without gatebin LEGACYGRPC" - shell: bash - working-directory: .ci - run: | - python run_non_regression_examples.py - env: - DPF_SERVER_TYPE: LEGACYGRPC - if: always() - - - name: "Kill all servers" - uses: ansys/pydpf-actions/kill-dpf-servers@v2.3 - if: always() diff --git a/.github/workflows/pydpf-post.yml b/.github/workflows/pydpf-post.yml index 0c714857236..b9005e12904 100644 --- a/.github/workflows/pydpf-post.yml +++ b/.github/workflows/pydpf-post.yml @@ -73,18 +73,48 @@ jobs: with: python-version: "3.8" - - name: "Build Core Package" - uses: ansys/pydpf-actions/build_package@v2.3 + - name: "Install ansys-dpf-core build requirements" + run: pip install -r requirements/requirements_build.txt + + - name: "Build ansys-dpf-core wheel" + shell: bash + run: | + if [ ${{ matrix.os }} == "ubuntu-latest" ]; then + export platform="manylinux_2_17" + else + export platform="win" + fi + python .ci/build_wheel.py -p $platform -w + + - name: "Expose ansys-dpf-core wheel" + shell: bash + id: wheel + working-directory: dist + run: | + export name=`ls ansys_dpf_core*.whl` + echo ${name} + echo "wheel_name=${name[0]}" >> $GITHUB_OUTPUT + + - name: "Install ansys-dpf-core wheel" + shell: bash + run: | + pip install dist/${{ steps.wheel.outputs.wheel_name }}[plotting] + + - name: "Install DPF" + id: set-server-path + uses: ansys/pydpf-actions/install-dpf-server@v2.3 with: - python-version: ${{ matrix.python-version }} - ANSYS_VERSION: ${{inputs.ANSYS_VERSION}} - PACKAGE_NAME: ${{env.PACKAGE_NAME}} - MODULE: ${{env.MODULE}} dpf-standalone-TOKEN: ${{secrets.DPF_PIPELINE}} - install_extras: plotting - wheel: false - wheelhouse: false standalone_suffix: ${{ inputs.standalone_suffix }} + ANSYS_VERSION : ${{inputs.ANSYS_VERSION}} + + - name: "Check licences of packages" + uses: ansys/pydpf-actions/check-licenses@v2.3 + + - name: "Test import" + shell: bash + working-directory: tests + run: python -c "from ansys.dpf import core" - name: "Clone PyDPF-Post" shell: bash diff --git a/.github/workflows/releaser.yml b/.github/workflows/releaser.yml index 7c648a9c6b3..a8780ed5acc 100644 --- a/.github/workflows/releaser.yml +++ b/.github/workflows/releaser.yml @@ -11,18 +11,24 @@ env: jobs: Publish_to_PyPI: - name: Publish to PyPI + name: "Publish Release to PyPI" runs-on: ubuntu-latest if: startsWith(github.ref, 'refs/tags/v') steps: - - uses: actions/checkout@v3 + - name: "Download Release Assets" + uses: robinraju/release-downloader@v1.8 + with: + fileName: "*.whl" + tarBall: true + zipBall: true + out-file-path: "assets" + extract: false + token: ${{ secrets.GITHUB_TOKEN }} - name: "Upload to Public PyPI" run: | pip install twine - pip install build - python -m build - twine upload --skip-existing dist/* + twine upload --skip-existing assets/* env: TWINE_USERNAME: __token__ TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }} diff --git a/.github/workflows/test_docker.yml b/.github/workflows/test_docker.yml index 09249744c06..18f1aa138c2 100644 --- a/.github/workflows/test_docker.yml +++ b/.github/workflows/test_docker.yml @@ -45,18 +45,54 @@ jobs: steps: - uses: actions/checkout@v3 - - name: "Build Docker Package" - uses: ansys/pydpf-actions/install-docker-server-and-python-packages@v2.3 + + - name: "Setup Python" + uses: actions/setup-python@v4.6.1 with: python-version: ${{ matrix.python-version }} - ANSYS_VERSION: ${{inputs.ANSYS_VERSION}} - PACKAGE_NAME: ${{env.PACKAGE_NAME}} - MODULE: ${{env.MODULE}} + + - name: "Install requirements" + run: pip install -r requirements/requirements_build.txt + + - name: "Build the wheel" + shell: bash + run: | + if [ ${{ matrix.os }} == "ubuntu-latest" ]; then + export platform="manylinux_2_17" + else + export platform="win" + fi + python .ci/build_wheel.py -p $platform -w + + - name: "Expose the wheel" + shell: bash + id: wheel + working-directory: dist + run: | + export name=`ls ansys_dpf_core*.whl` + echo ${name} + echo "wheel_name=${name[0]}" >> $GITHUB_OUTPUT + + - name: "Install package wheel" + shell: bash + run: | + pip install dist/${{ steps.wheel.outputs.wheel_name }}[plotting] + + - name: "Install DPF" + id: set-server-path + uses: ansys/pydpf-actions/install-dpf-docker@v2.3 + with: dpf-standalone-TOKEN: ${{secrets.DPF_PIPELINE}} - install_extras: plotting - wheel: false - wheelhouse: false standalone_suffix: ${{ inputs.standalone_suffix }} + ANSYS_VERSION : ${{inputs.ANSYS_VERSION}} + + - name: "Check licences of packages" + uses: ansys/pydpf-actions/check-licenses@v2.3 + + - name: "Test import" + shell: bash + working-directory: tests + run: python -c "from ansys.dpf import core" - name: "Prepare Testing Environment" uses: ansys/pydpf-actions/prepare_tests@v2.3 diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 57e9f056c05..7b0d49e6362 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -7,7 +7,7 @@ on: python_versions: required: false type: string - default: '["3.7"]' + default: '["3.8"]' ANSYS_VERSION: required: false type: string @@ -29,6 +29,11 @@ on: required: false type: string default: '' + test_any: + description: "Test the any version of the wheel" + required: false + type: string + default: false # Can be called manually workflow_dispatch: inputs: @@ -62,6 +67,11 @@ on: required: false type: string default: '' + test_any: + description: "Test the any version of the wheel" + required: false + type: string + default: 'false' env: PACKAGE_NAME: ansys-dpf-core @@ -81,6 +91,7 @@ jobs: tests: name: "Tests" needs: setup + timeout-minutes: 30 runs-on: ${{ matrix.os }} strategy: fail-fast: false @@ -96,20 +107,89 @@ jobs: shell: bash run: | echo "ANSYS_DPF_ACCEPT_LA=Y" >> $GITHUB_ENV - echo "ANSYSLMD_LICENSE_FILE=1055@${{ secrets.LICENSE_SERVER }}" >> $GITHUB_ENV + echo "ANSYSLMD_LICENSE_FILE=1055@${{ secrets.LICENSE_SERVER }}" >> $GITHUB_ENV - - name: "Build Package" - uses: ansys/pydpf-actions/build_package@v2.3 + - name: "Setup Python" + uses: actions/setup-python@v4.6.1 with: python-version: ${{ matrix.python-version }} - ANSYS_VERSION: ${{inputs.ANSYS_VERSION}} - PACKAGE_NAME: ${{env.PACKAGE_NAME}} - MODULE: ${{env.MODULE}} + + - name: "Install requirements" + run: pip install -r requirements/requirements_build.txt + + - name: "Build the wheel" + shell: bash + run: | + if [ ${{ inputs.test_any }} == 'true' ]; then + export platform="any" + elif [ ${{ matrix.os }} == "ubuntu-latest" ]; then + export platform="manylinux_2_17" + else + export platform="win" + fi + python .ci/build_wheel.py -p $platform -w + + - name: "Expose the wheel" + shell: bash + id: wheel + working-directory: dist + run: | + export name=`ls ansys_dpf_core*.whl` + echo ${name} + echo "wheel_name=${name[0]}" >> $GITHUB_OUTPUT + + - name: "Upload wheel to artifacts" + if: inputs.wheel == 'true' + uses: actions/upload-artifact@v3 + with: + name: ${{ steps.wheel.outputs.wheel_name }} + path: dist/${{ steps.wheel.outputs.wheel_name }} + + - name: "Define wheelhouse name" + if: inputs.wheelhouse == 'true' + shell: bash + id: wheelhouse + run: | + export wheel_name=${{ steps.wheel.outputs.wheel_name }} + export version=${wheel_name:15:5} + export name=ansys-dpf-core-v$version-wheelhouse-${{ runner.os }}-${{ matrix.python-version }}.zip + echo "name=${name}" >> $GITHUB_OUTPUT + + - name: "Zip wheelhouse" + if: inputs.wheelhouse == 'true' + uses: vimtor/action-zip@v1.1 + with: + files: dist + dest: ${{ steps.wheelhouse.outputs.name }} + + - name: "Upload wheelhouse to artifacts" + if: inputs.wheelhouse == 'true' + uses: actions/upload-artifact@v3.1.2 + with: + name: ${{ steps.wheelhouse.outputs.name }} + path: ${{ steps.wheelhouse.outputs.name }} + retention-days: 7 + + - name: "Install package wheel" + shell: bash + run: | + pip install dist/${{ steps.wheel.outputs.wheel_name }}[plotting] + + - name: "Install DPF" + id: set-server-path + uses: ansys/pydpf-actions/install-dpf-server@v2.3 + with: dpf-standalone-TOKEN: ${{secrets.DPF_PIPELINE}} - install_extras: plotting - wheel: ${{ inputs.wheel }} - wheelhouse: ${{ inputs.wheelhouse }} standalone_suffix: ${{ inputs.standalone_suffix }} + ANSYS_VERSION : ${{inputs.ANSYS_VERSION}} + + - name: "Check licences of packages" + uses: ansys/pydpf-actions/check-licenses@v2.3 + + - name: "Test import" + shell: bash + working-directory: tests + run: python -c "from ansys.dpf import core" - name: "Prepare Testing Environment" uses: ansys/pydpf-actions/prepare_tests@v2.3 diff --git a/pyproject.toml b/pyproject.toml index af3e4463ea0..422ac4907a7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [build-system] build-backend = "setuptools.build_meta" -requires = ["setuptools>=61.0.0"] +requires = ["setuptools>=61.0.0", "wheel"] [project] # Check https://setuptools.pypa.io/en/stable/userguide/quickstart.html for all available sections @@ -63,3 +63,11 @@ source = ["ansys.dpf.core"] [tool.coverage.report] show_missing = true + +[tool.setuptools.packages.find] +namespaces = true +where = ["src"] + +[tool.setuptools.package-data] +"ansys.dpf.gatebin" = ["*.so", "*.dll"] + diff --git a/requirements/requirements_build.txt b/requirements/requirements_build.txt index d0e320c85ea..21e56d2ef85 100644 --- a/requirements/requirements_build.txt +++ b/requirements/requirements_build.txt @@ -1,3 +1,4 @@ black==23.9.1 chevron==0.14.0 wheel==0.41.0 +build==1.0.3 diff --git a/setup.py b/setup.py index a500a23671a..7c7246c339c 100644 --- a/setup.py +++ b/setup.py @@ -2,38 +2,7 @@ """ # To keep according to https://setuptools.pypa.io/en/stable/userguide/pyproject_config.html # to allow -e with pip<21.1 -from setuptools import setup, find_namespace_packages +from setuptools import setup -setup( - package_dir={"": "src"}, - include_package_data=True, - packages=find_namespace_packages(where="src"), - package_data={ - "ansys.dpf.gatebin": ["*.so", "*.dll"], - "ansys.dpf.core.examples": ["**/*"], - }, -) -# "ansys.dpf.core.examples" = [ -# "ASimpleBar.rst", -# "static.rst", -# "complex.rst", -# "model_with_ns.rst", -# "file_cyclic.rst", -# "msup_transient_plate1.rst", -# "rth/rth_electric.rth", -# "rth/rth_steady.rth", -# "rth/rth_transient.rth", -# "sub/cp56.sub", -# "msup/file.mode", -# "msup/file.rst", -# "msup/file.rfrq", -# "distributed/file0.rst", -# "distributed/file1.rst", -# "msup_distributed/file0.rst", -# "msup_distributed/file1.rst", -# "msup_distributed/file0.mode", -# "msup_distributed/file1.mode", -# "msup_distributed/file_load_1.rfrq", -# "msup_distributed/file_load_2.rfrq", -# ] +setup() diff --git a/src/ansys/dpf/core/server.py b/src/ansys/dpf/core/server.py index 39cedcd61c9..a935b2c79cd 100644 --- a/src/ansys/dpf/core/server.py +++ b/src/ansys/dpf/core/server.py @@ -359,13 +359,13 @@ def connect(): try: return connect() except ModuleNotFoundError as e: - if "gatebin" in e.msg: + if "use a LegacyGrpcServer" in e.msg: server_type = ServerFactory.get_remote_server_type_from_config( ServerConfig(protocol=CommunicationProtocols.gRPC, legacy=True) ) warnings.warn( UserWarning( - "Could not connect to remote server as ansys-dpf-gatebin " + "Could not connect to remote server as ansys.dpf.gatebin " "is missing. Trying again using LegacyGrpcServer.\n" f"The error stated:\n{e.msg}" ) diff --git a/src/ansys/dpf/gate/load_api.py b/src/ansys/dpf/gate/load_api.py index 36785face89..5d981fb1fdf 100644 --- a/src/ansys/dpf/gate/load_api.py +++ b/src/ansys/dpf/gate/load_api.py @@ -107,9 +107,9 @@ def _get_api_path_from_installer_or_package(ansys_path: str, is_posix: bool): dpf_client_found = True if not dpf_client_found and not is_ansys_version_old: raise ModuleNotFoundError( - "To use ansys-dpf-gate as a client API " - "install ansys-dpf-gatebin " - "with :\n pip install ansys-dpf-gatebin." + "To use ansys.dpf.gate as a client API " + "install ansys-dpf-core for your OS instead of " + "ansys-dpf-core any or use a LegacyGrpcServer." ) return path diff --git a/tests/test_service.py b/tests/test_service.py index d39aac0219a..8dba2ee0756 100644 --- a/tests/test_service.py +++ b/tests/test_service.py @@ -366,17 +366,21 @@ def test_load_api_without_awp_root_no_gatebin(restore_awp_root): # start CServer conf = ServerConfig(protocol=CommunicationProtocols.gRPC, legacy=False) - with pytest.warns( - UserWarning, - match="Could not connect to remote server as ansys-dpf--gatebin " - "is missing. Trying again using LegacyGrpcServer.\n", - ): - serv = dpf.core.connect_to_server( - config=conf, - as_global=False, - ip=loc_serv.external_ip, - port=loc_serv.external_port, - ) + # Cannot test this warning as it only happens if the server is remote, + # which we cannot test in the CI + # with pytest.warns( + # UserWarning, + # match="Could not connect to remote server as ansys.dpf.gatebin " + # "is missing. Trying again using LegacyGrpcServer.\n", + # ): + # Here it will work as ansys.dpf.gate.load_api._get_api_path_from_installer_or_package + # will find DPFClientAPI in the server local installation. + _ = dpf.core.connect_to_server( + config=conf, + as_global=False, + ip=loc_serv.external_ip, + port=loc_serv.external_port, + ) @pytest.mark.skipif(conftest.IS_USING_GATEBIN, reason="This test must no have gatebin installed")