diff --git a/.github/actionlint.yml b/.github/actionlint.yml index cb4b420b4e..afa1dc01d1 100644 --- a/.github/actionlint.yml +++ b/.github/actionlint.yml @@ -17,3 +17,4 @@ paths: ignore: # This runner exists, but is in beta and not known to actionlint. - 'label "windows-11-arm" is unknown\. .+' + - 'property "check_run_id" is not defined.+' diff --git a/.github/scripts/get-envs.py b/.github/scripts/get-envs.py index d772130bfc..dc8a28b6af 100755 --- a/.github/scripts/get-envs.py +++ b/.github/scripts/get-envs.py @@ -15,23 +15,22 @@ import fileinput import os +from pathlib import Path from textwrap import dedent GROUP_NUMBER = int(os.environ["GROUP_NUMBER"]) - 1 TOTAL_GROUPS = int(os.environ["TOTAL_GROUPS"]) GITHUB_JOB = os.environ["GITHUB_JOB"] +GITHUB_OUTPUT = os.environ.get("GITHUB_OUTPUT", None) def main(stdin): environments = [env.rstrip() for env in stdin] filtered_envs = [env for env in environments if env.startswith(GITHUB_JOB + "-")] grouped_envs = filtered_envs[GROUP_NUMBER::TOTAL_GROUPS] - joined_envs = ",".join(grouped_envs) # If not environments are found, raise an error with helpful information. - if joined_envs: - print(joined_envs) - else: + if not grouped_envs: error_msg = dedent(f""" No matching environments found. GITHUB_JOB = {GITHUB_JOB} @@ -41,10 +40,17 @@ def main(stdin): environments = {environments} filtered_envs = {filtered_envs} grouped_envs = {grouped_envs} - joined_envs = {joined_envs} """) raise RuntimeError(error_msg(environments)) + # Output results to GITHUB_OUTPUT for use in later steps. + if GITHUB_OUTPUT: + with Path(GITHUB_OUTPUT).open("a") as output_fh: + print(f"envs={','.join(grouped_envs)}", file=output_fh) + + # Output human readable results to stdout for visibility in logs. + print("\n".join(grouped_envs)) + if __name__ == "__main__": with fileinput.input() as stdin: diff --git a/.github/scripts/tox-summary.py b/.github/scripts/tox-summary.py new file mode 100755 index 0000000000..618e168dbc --- /dev/null +++ b/.github/scripts/tox-summary.py @@ -0,0 +1,114 @@ +#!/usr/bin/env python +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +import os +import re +from pathlib import Path +from textwrap import dedent + +REPO_DIR = Path(__file__).parent.parent.parent +TOX_DIR = REPO_DIR / ".tox" +GITHUB_SUMMARY = Path(os.environ.get("GITHUB_STEP_SUMMARY", TOX_DIR / "summary.md")) +RESULTS_FILE_RE = re.compile( + r"(?P[a-zA-Z0-9_-]+)-(?P\d+)-(?P[a-zA-Z0-9]+)-(?P[a-zA-Z0-9_-]+)-results.json" +) + +GITHUB_SERVER_URL = os.environ.get("GITHUB_SERVER_URL", "https://github.com") +GITHUB_REPOSITORY = os.environ.get("GITHUB_REPOSITORY", "newrelic/newrelic-python-agent") + +TABLE_HEADER = """ +# Tox Results Summary + +| Environment | Status | Duration (s) | Setup Duration (s) | Test Duration (s) | Runner | +|-------------|--------|--------------|--------------------|-------------------|--------| +""" +TABLE_HEADER = dedent(TABLE_HEADER).strip() + + +def main(): + results = {} + # Search both repo and .tox dirs + filepaths = list(REPO_DIR.glob("*-results.json")) + list(TOX_DIR.glob("*-results.json")) + for filepath in filepaths: + with filepath.open() as f: + # Load the JSON data + data = json.load(f) + envs = data.get("testenvs", ()) + + # Extract GitHub info from filename + match = RESULTS_FILE_RE.match(filepath.name) + if match: + runner_link = f"{GITHUB_SERVER_URL}/{GITHUB_REPOSITORY}/actions/runs/{match.group('run_id')}/job/{match.group('job_id')}" + runner = f"[{match.group('job_name')} ({match.group('job_num')})]({runner_link})" + else: + runner = "N/A" + + # Aggregate any non-empty results + sub_results = {k: v for k, v in envs.items() if v and k != ".pkg"} + for result in sub_results.values(): + result["runner"] = runner + results.update(sub_results) + + if not results: + raise RuntimeError("No tox results found.") + + with GITHUB_SUMMARY.open("w") as output_fp: + summary = summarize_results(results) + # Print table header + print(TABLE_HEADER, file=output_fp) + + for result in summary: + line = "| {env_name} | {status} | {duration} | {setup_duration} | {test_duration} | {runner} |".format( + **result + ) + print(line, file=output_fp) + + +def summarize_results(results): + summary = [] + for env, result in results.items(): + duration = result["result"].get("duration", 0) + duration = f"{duration:.2f}" if duration >= 0 else "N/A" + status = "OK ✅" if result["result"]["success"] else "FAIL ❌" + runner = result.get("runner", "N/A") + + # Sum up setup and test durations from individual commands + setup_duration = 0 + for cmd in result.get("setup", ()): + setup_duration += cmd.get("elapsed", 0) + setup_duration = f"{setup_duration:.2f}" if setup_duration >= 0 else "N/A" + + test_duration = 0 + for cmd in result.get("test", ()): + test_duration += cmd.get("elapsed", 0) + test_duration = f"{test_duration:.2f}" if test_duration >= 0 else "N/A" + + summary.append( + { + "env_name": env, + "status": status, + "duration": duration, + "setup_duration": setup_duration, + "test_duration": test_duration, + "runner": runner, + } + ) + + return sorted(summary, key=lambda result: (1 if "OK" in result["status"] else 0, result["env_name"])) + + +if __name__ == "__main__": + main() diff --git a/.github/workflows/benchmarks.yml b/.github/workflows/benchmarks.yml index b2718cb9d7..513e467f29 100644 --- a/.github/workflows/benchmarks.yml +++ b/.github/workflows/benchmarks.yml @@ -31,7 +31,7 @@ jobs: timeout-minutes: 30 strategy: matrix: - python: ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] + python: ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13", "3.14"] env: ASV_FACTOR: "1.1" diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index e447122709..2337ee8d40 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -41,6 +41,8 @@ jobs: os: ubuntu-24.04 - wheel: cp313-manylinux os: ubuntu-24.04 + - wheel: cp314-manylinux + os: ubuntu-24.04 # Linux musllibc - wheel: cp38-musllinux os: ubuntu-24.04 @@ -54,10 +56,14 @@ jobs: os: ubuntu-24.04 - wheel: cp313-musllinux os: ubuntu-24.04 + - wheel: cp314-musllinux + os: ubuntu-24.04 # Windows # Windows wheels won't but published until the full release announcement. # - wheel: cp313-win # os: windows-2025 + # - wheel: cp314-win + # os: windows-2025 name: Build wheels for ${{ matrix.wheel }} runs-on: ${{ matrix.os }} diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index e516fd7e4c..3cf295af90 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -102,6 +102,7 @@ jobs: - name: Download Coverage Artifacts uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # 5.0.0 with: + pattern: coverage-* path: ./ - name: Combine Coverage @@ -118,6 +119,31 @@ jobs: fail_ci_if_error: true token: ${{ secrets.CODECOV_TOKEN }} + # Combine and upload coverage data + summary: + runs-on: ubuntu-24.04 + if: success() || failure() # Does not run on cancelled workflows + needs: + - tests + + steps: + - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # 5.0.0 + - uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # 6.0.0 + with: + python-version: "3.13" + architecture: x64 + + - name: Download Results Artifacts + uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # 5.0.0 + with: + pattern: results-* + path: ./ + + - name: Report Results Summary + run: | + find . -name "*-results.json" -exec mv {} ./ \; + python ./.github/scripts/tox-summary.py + # ====================================== # OS Specific Core Test Suite Runners # - Linux runs in the CI container @@ -155,19 +181,23 @@ jobs: - name: Get Environments id: get-envs run: | - echo "envs=$(tox -l | python ./.github/scripts/get-envs.py)" >> "$GITHUB_OUTPUT" + tox -l | python ./.github/scripts/get-envs.py env: GROUP_NUMBER: ${{ matrix.group-number }} - name: Test run: | - tox -vv -e ${{ steps.get-envs.outputs.envs }} -p auto + tox run-parallel \ + -vv -e ${{ steps.get-envs.outputs.envs }} \ + --result-json=./.tox/${{ github.job }}-${{ matrix.group-number }}-${{ github.run_id }}-${{ job.check_run_id }}-results.json \ + --exit-and-dump-after=900 env: TOX_PARALLEL_NO_SPINNER: 1 FORCE_COLOR: "true" - name: Upload Coverage Artifacts uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + if: always() with: name: coverage-${{ github.job }}-${{ strategy.job-index }} path: ./**/.coverage.* @@ -175,6 +205,16 @@ jobs: if-no-files-found: error retention-days: 1 + - name: Upload Results Artifacts + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + if: always() + with: + name: results-${{ github.job }}-${{ strategy.job-index }} + path: ./.tox/*-results.json + include-hidden-files: true + if-no-files-found: error + retention-days: 1 + linux_arm64: env: TOTAL_GROUPS: 2 @@ -206,19 +246,23 @@ jobs: - name: Get Environments id: get-envs run: | - echo "envs=$(tox -l | python ./.github/scripts/get-envs.py)" >> "$GITHUB_OUTPUT" + tox -l | python ./.github/scripts/get-envs.py env: GROUP_NUMBER: ${{ matrix.group-number }} - name: Test run: | - tox -vv -e ${{ steps.get-envs.outputs.envs }} -p auto + tox run-parallel \ + -vv -e ${{ steps.get-envs.outputs.envs }} \ + --result-json=./.tox/${{ github.job }}-${{ matrix.group-number }}-${{ github.run_id }}-${{ job.check_run_id }}-results.json \ + --exit-and-dump-after=900 env: TOX_PARALLEL_NO_SPINNER: 1 FORCE_COLOR: "true" - name: Upload Coverage Artifacts uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + if: always() with: name: coverage-${{ github.job }}-${{ strategy.job-index }} path: ./**/.coverage.* @@ -226,6 +270,16 @@ jobs: if-no-files-found: error retention-days: 1 + - name: Upload Results Artifacts + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + if: always() + with: + name: results-${{ github.job }}-${{ strategy.job-index }} + path: ./.tox/*-results.json + include-hidden-files: true + if-no-files-found: error + retention-days: 1 + windows: env: TOTAL_GROUPS: 1 @@ -250,7 +304,9 @@ jobs: - name: Install Python uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # 6.0.0 with: - python-version: "3.13" + python-version: | + 3.13 + 3.14 - name: Install uv uses: astral-sh/setup-uv@3259c6206f993105e3a61b142c2d97bf4b9ef83d # 7.1.0 @@ -262,19 +318,23 @@ jobs: - name: Get Environments id: get-envs run: | - echo "envs=$(tox -l | python ./.github/scripts/get-envs.py)" >> "$GITHUB_OUTPUT" + tox -l | python ./.github/scripts/get-envs.py env: GROUP_NUMBER: ${{ matrix.group-number }} - name: Test run: | - tox -vv -e ${{ steps.get-envs.outputs.envs }} -p auto + tox run-parallel \ + -vv -e ${{ steps.get-envs.outputs.envs }} \ + --result-json=./.tox/${{ github.job }}-${{ matrix.group-number }}-${{ github.run_id }}-${{ job.check_run_id }}-results.json \ + --exit-and-dump-after=900 env: TOX_PARALLEL_NO_SPINNER: 1 FORCE_COLOR: "true" - name: Upload Coverage Artifacts uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + if: always() with: name: coverage-${{ github.job }}-${{ strategy.job-index }} path: ./**/.coverage.* @@ -282,6 +342,16 @@ jobs: if-no-files-found: error retention-days: 1 + - name: Upload Results Artifacts + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + if: always() + with: + name: results-${{ github.job }}-${{ strategy.job-index }} + path: ./.tox/*-results.json + include-hidden-files: true + if-no-files-found: error + retention-days: 1 + windows_arm64: env: TOTAL_GROUPS: 1 @@ -306,7 +376,9 @@ jobs: - name: Install Python uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # 6.0.0 with: - python-version: "3.13" + python-version: | + 3.13 + 3.14 - name: Install uv uses: astral-sh/setup-uv@3259c6206f993105e3a61b142c2d97bf4b9ef83d # 7.1.0 @@ -318,19 +390,23 @@ jobs: - name: Get Environments id: get-envs run: | - echo "envs=$(tox -l | python ./.github/scripts/get-envs.py)" >> "$GITHUB_OUTPUT" + tox -l | python ./.github/scripts/get-envs.py env: GROUP_NUMBER: ${{ matrix.group-number }} - name: Test run: | - tox -vv -e ${{ steps.get-envs.outputs.envs }} -p auto + tox run-parallel \ + -vv -e ${{ steps.get-envs.outputs.envs }} \ + --result-json=./.tox/${{ github.job }}-${{ matrix.group-number }}-${{ github.run_id }}-${{ job.check_run_id }}-results.json \ + --exit-and-dump-after=900 env: TOX_PARALLEL_NO_SPINNER: 1 FORCE_COLOR: "true" - name: Upload Coverage Artifacts uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + if: always() with: name: coverage-${{ github.job }}-${{ strategy.job-index }} path: ./**/.coverage.* @@ -338,6 +414,16 @@ jobs: if-no-files-found: error retention-days: 1 + - name: Upload Results Artifacts + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + if: always() + with: + name: results-${{ github.job }}-${{ strategy.job-index }} + path: ./.tox/*-results.json + include-hidden-files: true + if-no-files-found: error + retention-days: 1 + # ==================================== # Integration Test Suite Runners # - runs on Linux in the CI container @@ -374,19 +460,23 @@ jobs: - name: Get Environments id: get-envs run: | - echo "envs=$(tox -l | python ./.github/scripts/get-envs.py)" >> "$GITHUB_OUTPUT" + tox -l | python ./.github/scripts/get-envs.py env: GROUP_NUMBER: ${{ matrix.group-number }} - name: Test run: | - tox -vv -e ${{ steps.get-envs.outputs.envs }} -p auto + tox run-parallel \ + -vv -e ${{ steps.get-envs.outputs.envs }} \ + --result-json=./.tox/${{ github.job }}-${{ matrix.group-number }}-${{ github.run_id }}-${{ job.check_run_id }}-results.json \ + --exit-and-dump-after=900 env: TOX_PARALLEL_NO_SPINNER: 1 FORCE_COLOR: "true" - name: Upload Coverage Artifacts uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + if: always() with: name: coverage-${{ github.job }}-${{ strategy.job-index }} path: ./**/.coverage.* @@ -394,6 +484,16 @@ jobs: if-no-files-found: error retention-days: 1 + - name: Upload Results Artifacts + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + if: always() + with: + name: results-${{ github.job }}-${{ strategy.job-index }} + path: ./.tox/*-results.json + include-hidden-files: true + if-no-files-found: error + retention-days: 1 + cassandra: env: TOTAL_GROUPS: 1 @@ -443,19 +543,23 @@ jobs: - name: Get Environments id: get-envs run: | - echo "envs=$(tox -l | python ./.github/scripts/get-envs.py)" >> "$GITHUB_OUTPUT" + tox -l | python ./.github/scripts/get-envs.py env: GROUP_NUMBER: ${{ matrix.group-number }} - name: Test run: | - tox -vv -e ${{ steps.get-envs.outputs.envs }} -p auto + tox run-parallel \ + -vv -e ${{ steps.get-envs.outputs.envs }} \ + --result-json=./.tox/${{ github.job }}-${{ matrix.group-number }}-${{ github.run_id }}-${{ job.check_run_id }}-results.json \ + --exit-and-dump-after=900 env: TOX_PARALLEL_NO_SPINNER: 1 FORCE_COLOR: "true" - name: Upload Coverage Artifacts uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + if: always() with: name: coverage-${{ github.job }}-${{ strategy.job-index }} path: ./**/.coverage.* @@ -463,6 +567,16 @@ jobs: if-no-files-found: error retention-days: 1 + - name: Upload Results Artifacts + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + if: always() + with: + name: results-${{ github.job }}-${{ strategy.job-index }} + path: ./.tox/*-results.json + include-hidden-files: true + if-no-files-found: error + retention-days: 1 + elasticsearchserver07: env: TOTAL_GROUPS: 1 @@ -509,19 +623,23 @@ jobs: - name: Get Environments id: get-envs run: | - echo "envs=$(tox -l | python ./.github/scripts/get-envs.py)" >> "$GITHUB_OUTPUT" + tox -l | python ./.github/scripts/get-envs.py env: GROUP_NUMBER: ${{ matrix.group-number }} - name: Test run: | - tox -vv -e ${{ steps.get-envs.outputs.envs }} -p auto + tox run-parallel \ + -vv -e ${{ steps.get-envs.outputs.envs }} \ + --result-json=./.tox/${{ github.job }}-${{ matrix.group-number }}-${{ github.run_id }}-${{ job.check_run_id }}-results.json \ + --exit-and-dump-after=900 env: TOX_PARALLEL_NO_SPINNER: 1 FORCE_COLOR: "true" - name: Upload Coverage Artifacts uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + if: always() with: name: coverage-${{ github.job }}-${{ strategy.job-index }} path: ./**/.coverage.* @@ -529,6 +647,16 @@ jobs: if-no-files-found: error retention-days: 1 + - name: Upload Results Artifacts + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + if: always() + with: + name: results-${{ github.job }}-${{ strategy.job-index }} + path: ./.tox/*-results.json + include-hidden-files: true + if-no-files-found: error + retention-days: 1 + elasticsearchserver08: env: TOTAL_GROUPS: 1 @@ -576,19 +704,23 @@ jobs: - name: Get Environments id: get-envs run: | - echo "envs=$(tox -l | python ./.github/scripts/get-envs.py)" >> "$GITHUB_OUTPUT" + tox -l | python ./.github/scripts/get-envs.py env: GROUP_NUMBER: ${{ matrix.group-number }} - name: Test run: | - tox -vv -e ${{ steps.get-envs.outputs.envs }} -p auto + tox run-parallel \ + -vv -e ${{ steps.get-envs.outputs.envs }} \ + --result-json=./.tox/${{ github.job }}-${{ matrix.group-number }}-${{ github.run_id }}-${{ job.check_run_id }}-results.json \ + --exit-and-dump-after=900 env: TOX_PARALLEL_NO_SPINNER: 1 FORCE_COLOR: "true" - name: Upload Coverage Artifacts uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + if: always() with: name: coverage-${{ github.job }}-${{ strategy.job-index }} path: ./**/.coverage.* @@ -596,6 +728,16 @@ jobs: if-no-files-found: error retention-days: 1 + - name: Upload Results Artifacts + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + if: always() + with: + name: results-${{ github.job }}-${{ strategy.job-index }} + path: ./.tox/*-results.json + include-hidden-files: true + if-no-files-found: error + retention-days: 1 + firestore: env: TOTAL_GROUPS: 1 @@ -647,19 +789,23 @@ jobs: - name: Get Environments id: get-envs run: | - echo "envs=$(tox -l | python ./.github/scripts/get-envs.py)" >> "$GITHUB_OUTPUT" + tox -l | python ./.github/scripts/get-envs.py env: GROUP_NUMBER: ${{ matrix.group-number }} - name: Test run: | - tox -vv -e ${{ steps.get-envs.outputs.envs }} -p auto + tox run-parallel \ + -vv -e ${{ steps.get-envs.outputs.envs }} \ + --result-json=./.tox/${{ github.job }}-${{ matrix.group-number }}-${{ github.run_id }}-${{ job.check_run_id }}-results.json \ + --exit-and-dump-after=900 env: TOX_PARALLEL_NO_SPINNER: 1 FORCE_COLOR: "true" - name: Upload Coverage Artifacts uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + if: always() with: name: coverage-${{ github.job }}-${{ strategy.job-index }} path: ./**/.coverage.* @@ -667,6 +813,16 @@ jobs: if-no-files-found: error retention-days: 1 + - name: Upload Results Artifacts + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + if: always() + with: + name: results-${{ github.job }}-${{ strategy.job-index }} + path: ./.tox/*-results.json + include-hidden-files: true + if-no-files-found: error + retention-days: 1 + grpc: env: TOTAL_GROUPS: 1 @@ -698,19 +854,23 @@ jobs: - name: Get Environments id: get-envs run: | - echo "envs=$(tox -l | python ./.github/scripts/get-envs.py)" >> "$GITHUB_OUTPUT" + tox -l | python ./.github/scripts/get-envs.py env: GROUP_NUMBER: ${{ matrix.group-number }} - name: Test run: | - tox -vv -e ${{ steps.get-envs.outputs.envs }} + tox run \ + -vv -e ${{ steps.get-envs.outputs.envs }} \ + --result-json=./.tox/${{ github.job }}-${{ matrix.group-number }}-${{ github.run_id }}-${{ job.check_run_id }}-results.json \ + --exit-and-dump-after=900 env: TOX_PARALLEL_NO_SPINNER: 1 FORCE_COLOR: "true" - name: Upload Coverage Artifacts uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + if: always() with: name: coverage-${{ github.job }}-${{ strategy.job-index }} path: ./**/.coverage.* @@ -718,6 +878,16 @@ jobs: if-no-files-found: error retention-days: 1 + - name: Upload Results Artifacts + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + if: always() + with: + name: results-${{ github.job }}-${{ strategy.job-index }} + path: ./.tox/*-results.json + include-hidden-files: true + if-no-files-found: error + retention-days: 1 + kafka: env: TOTAL_GROUPS: 4 @@ -774,19 +944,23 @@ jobs: - name: Get Environments id: get-envs run: | - echo "envs=$(tox -l | python ./.github/scripts/get-envs.py)" >> "$GITHUB_OUTPUT" + tox -l | python ./.github/scripts/get-envs.py env: GROUP_NUMBER: ${{ matrix.group-number }} - name: Test run: | - tox -vv -e ${{ steps.get-envs.outputs.envs }} -p auto + tox run-parallel \ + -vv -e ${{ steps.get-envs.outputs.envs }} \ + --result-json=./.tox/${{ github.job }}-${{ matrix.group-number }}-${{ github.run_id }}-${{ job.check_run_id }}-results.json \ + --exit-and-dump-after=900 env: TOX_PARALLEL_NO_SPINNER: 1 FORCE_COLOR: "true" - name: Upload Coverage Artifacts uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + if: always() with: name: coverage-${{ github.job }}-${{ strategy.job-index }} path: ./**/.coverage.* @@ -794,6 +968,16 @@ jobs: if-no-files-found: error retention-days: 1 + - name: Upload Results Artifacts + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + if: always() + with: + name: results-${{ github.job }}-${{ strategy.job-index }} + path: ./.tox/*-results.json + include-hidden-files: true + if-no-files-found: error + retention-days: 1 + memcached: env: TOTAL_GROUPS: 1 @@ -838,19 +1022,23 @@ jobs: - name: Get Environments id: get-envs run: | - echo "envs=$(tox -l | python ./.github/scripts/get-envs.py)" >> "$GITHUB_OUTPUT" + tox -l | python ./.github/scripts/get-envs.py env: GROUP_NUMBER: ${{ matrix.group-number }} - name: Test run: | - tox -vv -e ${{ steps.get-envs.outputs.envs }} -p auto + tox run-parallel \ + -vv -e ${{ steps.get-envs.outputs.envs }} \ + --result-json=./.tox/${{ github.job }}-${{ matrix.group-number }}-${{ github.run_id }}-${{ job.check_run_id }}-results.json \ + --exit-and-dump-after=900 env: TOX_PARALLEL_NO_SPINNER: 1 FORCE_COLOR: "true" - name: Upload Coverage Artifacts uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + if: always() with: name: coverage-${{ github.job }}-${{ strategy.job-index }} path: ./**/.coverage.* @@ -858,6 +1046,16 @@ jobs: if-no-files-found: error retention-days: 1 + - name: Upload Results Artifacts + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + if: always() + with: + name: results-${{ github.job }}-${{ strategy.job-index }} + path: ./.tox/*-results.json + include-hidden-files: true + if-no-files-found: error + retention-days: 1 + mongodb3: env: TOTAL_GROUPS: 1 @@ -902,19 +1100,23 @@ jobs: - name: Get Environments id: get-envs run: | - echo "envs=$(tox -l | python ./.github/scripts/get-envs.py)" >> "$GITHUB_OUTPUT" + tox -l | python ./.github/scripts/get-envs.py env: GROUP_NUMBER: ${{ matrix.group-number }} - name: Test run: | - tox -vv -e ${{ steps.get-envs.outputs.envs }} -p auto + tox run-parallel \ + -vv -e ${{ steps.get-envs.outputs.envs }} \ + --result-json=./.tox/${{ github.job }}-${{ matrix.group-number }}-${{ github.run_id }}-${{ job.check_run_id }}-results.json \ + --exit-and-dump-after=900 env: TOX_PARALLEL_NO_SPINNER: 1 FORCE_COLOR: "true" - name: Upload Coverage Artifacts uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + if: always() with: name: coverage-${{ github.job }}-${{ strategy.job-index }} path: ./**/.coverage.* @@ -922,6 +1124,16 @@ jobs: if-no-files-found: error retention-days: 1 + - name: Upload Results Artifacts + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + if: always() + with: + name: results-${{ github.job }}-${{ strategy.job-index }} + path: ./.tox/*-results.json + include-hidden-files: true + if-no-files-found: error + retention-days: 1 + mongodb8: env: TOTAL_GROUPS: 1 @@ -966,19 +1178,23 @@ jobs: - name: Get Environments id: get-envs run: | - echo "envs=$(tox -l | python ./.github/scripts/get-envs.py)" >> "$GITHUB_OUTPUT" + tox -l | python ./.github/scripts/get-envs.py env: GROUP_NUMBER: ${{ matrix.group-number }} - name: Test run: | - tox -vv -e ${{ steps.get-envs.outputs.envs }} -p auto + tox run-parallel \ + -vv -e ${{ steps.get-envs.outputs.envs }} \ + --result-json=./.tox/${{ github.job }}-${{ matrix.group-number }}-${{ github.run_id }}-${{ job.check_run_id }}-results.json \ + --exit-and-dump-after=900 env: TOX_PARALLEL_NO_SPINNER: 1 FORCE_COLOR: "true" - name: Upload Coverage Artifacts uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + if: always() with: name: coverage-${{ github.job }}-${{ strategy.job-index }} path: ./**/.coverage.* @@ -986,6 +1202,16 @@ jobs: if-no-files-found: error retention-days: 1 + - name: Upload Results Artifacts + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + if: always() + with: + name: results-${{ github.job }}-${{ strategy.job-index }} + path: ./.tox/*-results.json + include-hidden-files: true + if-no-files-found: error + retention-days: 1 + mssql: env: TOTAL_GROUPS: 1 @@ -1035,19 +1261,23 @@ jobs: - name: Get Environments id: get-envs run: | - echo "envs=$(tox -l | python ./.github/scripts/get-envs.py)" >> "$GITHUB_OUTPUT" + tox -l | python ./.github/scripts/get-envs.py env: GROUP_NUMBER: ${{ matrix.group-number }} - name: Test run: | - tox -vv -e ${{ steps.get-envs.outputs.envs }} -p auto + tox run-parallel \ + -vv -e ${{ steps.get-envs.outputs.envs }} \ + --result-json=./.tox/${{ github.job }}-${{ matrix.group-number }}-${{ github.run_id }}-${{ job.check_run_id }}-results.json \ + --exit-and-dump-after=900 env: TOX_PARALLEL_NO_SPINNER: 1 FORCE_COLOR: "true" - name: Upload Coverage Artifacts uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + if: always() with: name: coverage-${{ github.job }}-${{ strategy.job-index }} path: ./**/.coverage.* @@ -1055,6 +1285,16 @@ jobs: if-no-files-found: error retention-days: 1 + - name: Upload Results Artifacts + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + if: always() + with: + name: results-${{ github.job }}-${{ strategy.job-index }} + path: ./.tox/*-results.json + include-hidden-files: true + if-no-files-found: error + retention-days: 1 + mysql: env: TOTAL_GROUPS: 1 @@ -1104,19 +1344,23 @@ jobs: - name: Get Environments id: get-envs run: | - echo "envs=$(tox -l | python ./.github/scripts/get-envs.py)" >> "$GITHUB_OUTPUT" + tox -l | python ./.github/scripts/get-envs.py env: GROUP_NUMBER: ${{ matrix.group-number }} - name: Test run: | - tox -vv -e ${{ steps.get-envs.outputs.envs }} -p auto + tox run-parallel \ + -vv -e ${{ steps.get-envs.outputs.envs }} \ + --result-json=./.tox/${{ github.job }}-${{ matrix.group-number }}-${{ github.run_id }}-${{ job.check_run_id }}-results.json \ + --exit-and-dump-after=900 env: TOX_PARALLEL_NO_SPINNER: 1 FORCE_COLOR: "true" - name: Upload Coverage Artifacts uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + if: always() with: name: coverage-${{ github.job }}-${{ strategy.job-index }} path: ./**/.coverage.* @@ -1124,6 +1368,16 @@ jobs: if-no-files-found: error retention-days: 1 + - name: Upload Results Artifacts + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + if: always() + with: + name: results-${{ github.job }}-${{ strategy.job-index }} + path: ./.tox/*-results.json + include-hidden-files: true + if-no-files-found: error + retention-days: 1 + nginx: env: TOTAL_GROUPS: 1 @@ -1169,19 +1423,23 @@ jobs: - name: Get Environments id: get-envs run: | - echo "envs=$(tox -l | python ./.github/scripts/get-envs.py)" >> "$GITHUB_OUTPUT" + tox -l | python ./.github/scripts/get-envs.py env: GROUP_NUMBER: ${{ matrix.group-number }} - name: Test run: | - tox -vv -e ${{ steps.get-envs.outputs.envs }} -p auto + tox run-parallel \ + -vv -e ${{ steps.get-envs.outputs.envs }} \ + --result-json=./.tox/${{ github.job }}-${{ matrix.group-number }}-${{ github.run_id }}-${{ job.check_run_id }}-results.json \ + --exit-and-dump-after=900 env: TOX_PARALLEL_NO_SPINNER: 1 FORCE_COLOR: "true" - name: Upload Coverage Artifacts uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + if: always() with: name: coverage-${{ github.job }}-${{ strategy.job-index }} path: ./**/.coverage.* @@ -1189,6 +1447,16 @@ jobs: if-no-files-found: error retention-days: 1 + - name: Upload Results Artifacts + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + if: always() + with: + name: results-${{ github.job }}-${{ strategy.job-index }} + path: ./.tox/*-results.json + include-hidden-files: true + if-no-files-found: error + retention-days: 1 + oracledb: env: TOTAL_GROUPS: 1 @@ -1236,19 +1504,23 @@ jobs: - name: Get Environments id: get-envs run: | - echo "envs=$(tox -l | python ./.github/scripts/get-envs.py)" >> "$GITHUB_OUTPUT" + tox -l | python ./.github/scripts/get-envs.py env: GROUP_NUMBER: ${{ matrix.group-number }} - name: Test run: | - tox -vv -e ${{ steps.get-envs.outputs.envs }} -p auto + tox run-parallel \ + -vv -e ${{ steps.get-envs.outputs.envs }} \ + --result-json=./.tox/${{ github.job }}-${{ matrix.group-number }}-${{ github.run_id }}-${{ job.check_run_id }}-results.json \ + --exit-and-dump-after=900 env: TOX_PARALLEL_NO_SPINNER: 1 FORCE_COLOR: "true" - name: Upload Coverage Artifacts uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + if: always() with: name: coverage-${{ github.job }}-${{ strategy.job-index }} path: ./**/.coverage.* @@ -1256,6 +1528,16 @@ jobs: if-no-files-found: error retention-days: 1 + - name: Upload Results Artifacts + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + if: always() + with: + name: results-${{ github.job }}-${{ strategy.job-index }} + path: ./.tox/*-results.json + include-hidden-files: true + if-no-files-found: error + retention-days: 1 + postgres9: env: TOTAL_GROUPS: 1 @@ -1302,19 +1584,23 @@ jobs: - name: Get Environments id: get-envs run: | - echo "envs=$(tox -l | python ./.github/scripts/get-envs.py)" >> "$GITHUB_OUTPUT" + tox -l | python ./.github/scripts/get-envs.py env: GROUP_NUMBER: ${{ matrix.group-number }} - name: Test run: | - tox -vv -e ${{ steps.get-envs.outputs.envs }} -p auto + tox run-parallel \ + -vv -e ${{ steps.get-envs.outputs.envs }} \ + --result-json=./.tox/${{ github.job }}-${{ matrix.group-number }}-${{ github.run_id }}-${{ job.check_run_id }}-results.json \ + --exit-and-dump-after=900 env: TOX_PARALLEL_NO_SPINNER: 1 FORCE_COLOR: "true" - name: Upload Coverage Artifacts uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + if: always() with: name: coverage-${{ github.job }}-${{ strategy.job-index }} path: ./**/.coverage.* @@ -1322,6 +1608,16 @@ jobs: if-no-files-found: error retention-days: 1 + - name: Upload Results Artifacts + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + if: always() + with: + name: results-${{ github.job }}-${{ strategy.job-index }} + path: ./.tox/*-results.json + include-hidden-files: true + if-no-files-found: error + retention-days: 1 + postgres16: env: TOTAL_GROUPS: 2 @@ -1368,19 +1664,23 @@ jobs: - name: Get Environments id: get-envs run: | - echo "envs=$(tox -l | python ./.github/scripts/get-envs.py)" >> "$GITHUB_OUTPUT" + tox -l | python ./.github/scripts/get-envs.py env: GROUP_NUMBER: ${{ matrix.group-number }} - name: Test run: | - tox -vv -e ${{ steps.get-envs.outputs.envs }} -p auto + tox run-parallel \ + -vv -e ${{ steps.get-envs.outputs.envs }} \ + --result-json=./.tox/${{ github.job }}-${{ matrix.group-number }}-${{ github.run_id }}-${{ job.check_run_id }}-results.json \ + --exit-and-dump-after=900 env: TOX_PARALLEL_NO_SPINNER: 1 FORCE_COLOR: "true" - name: Upload Coverage Artifacts uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + if: always() with: name: coverage-${{ github.job }}-${{ strategy.job-index }} path: ./**/.coverage.* @@ -1388,6 +1688,16 @@ jobs: if-no-files-found: error retention-days: 1 + - name: Upload Results Artifacts + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + if: always() + with: + name: results-${{ github.job }}-${{ strategy.job-index }} + path: ./.tox/*-results.json + include-hidden-files: true + if-no-files-found: error + retention-days: 1 + rabbitmq: env: TOTAL_GROUPS: 1 @@ -1433,19 +1743,23 @@ jobs: - name: Get Environments id: get-envs run: | - echo "envs=$(tox -l | python ./.github/scripts/get-envs.py)" >> "$GITHUB_OUTPUT" + tox -l | python ./.github/scripts/get-envs.py env: GROUP_NUMBER: ${{ matrix.group-number }} - name: Test run: | - tox -vv -e ${{ steps.get-envs.outputs.envs }} -p auto + tox run-parallel \ + -vv -e ${{ steps.get-envs.outputs.envs }} \ + --result-json=./.tox/${{ github.job }}-${{ matrix.group-number }}-${{ github.run_id }}-${{ job.check_run_id }}-results.json \ + --exit-and-dump-after=900 env: TOX_PARALLEL_NO_SPINNER: 1 FORCE_COLOR: "true" - name: Upload Coverage Artifacts uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + if: always() with: name: coverage-${{ github.job }}-${{ strategy.job-index }} path: ./**/.coverage.* @@ -1453,6 +1767,16 @@ jobs: if-no-files-found: error retention-days: 1 + - name: Upload Results Artifacts + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + if: always() + with: + name: results-${{ github.job }}-${{ strategy.job-index }} + path: ./.tox/*-results.json + include-hidden-files: true + if-no-files-found: error + retention-days: 1 + redis: env: TOTAL_GROUPS: 1 @@ -1497,19 +1821,23 @@ jobs: - name: Get Environments id: get-envs run: | - echo "envs=$(tox -l | python ./.github/scripts/get-envs.py)" >> "$GITHUB_OUTPUT" + tox -l | python ./.github/scripts/get-envs.py env: GROUP_NUMBER: ${{ matrix.group-number }} - name: Test run: | - tox -vv -e ${{ steps.get-envs.outputs.envs }} -p auto + tox run-parallel \ + -vv -e ${{ steps.get-envs.outputs.envs }} \ + --result-json=./.tox/${{ github.job }}-${{ matrix.group-number }}-${{ github.run_id }}-${{ job.check_run_id }}-results.json \ + --exit-and-dump-after=900 env: TOX_PARALLEL_NO_SPINNER: 1 FORCE_COLOR: "true" - name: Upload Coverage Artifacts uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + if: always() with: name: coverage-${{ github.job }}-${{ strategy.job-index }} path: ./**/.coverage.* @@ -1517,6 +1845,16 @@ jobs: if-no-files-found: error retention-days: 1 + - name: Upload Results Artifacts + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + if: always() + with: + name: results-${{ github.job }}-${{ strategy.job-index }} + path: ./.tox/*-results.json + include-hidden-files: true + if-no-files-found: error + retention-days: 1 + rediscluster: env: TOTAL_GROUPS: 1 @@ -1602,19 +1940,23 @@ jobs: - name: Get Environments id: get-envs run: | - echo "envs=$(tox -l | python ./.github/scripts/get-envs.py)" >> "$GITHUB_OUTPUT" + tox -l | python ./.github/scripts/get-envs.py env: GROUP_NUMBER: ${{ matrix.group-number }} - name: Test run: | - tox -vv -e ${{ steps.get-envs.outputs.envs }} -p auto + tox run-parallel \ + -vv -e ${{ steps.get-envs.outputs.envs }} \ + --result-json=./.tox/${{ github.job }}-${{ matrix.group-number }}-${{ github.run_id }}-${{ job.check_run_id }}-results.json \ + --exit-and-dump-after=900 env: TOX_PARALLEL_NO_SPINNER: 1 FORCE_COLOR: "true" - name: Upload Coverage Artifacts uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + if: always() with: name: coverage-${{ github.job }}-${{ strategy.job-index }} path: ./**/.coverage.* @@ -1622,6 +1964,16 @@ jobs: if-no-files-found: error retention-days: 1 + - name: Upload Results Artifacts + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + if: always() + with: + name: results-${{ github.job }}-${{ strategy.job-index }} + path: ./.tox/*-results.json + include-hidden-files: true + if-no-files-found: error + retention-days: 1 + solr: env: TOTAL_GROUPS: 1 @@ -1668,19 +2020,23 @@ jobs: - name: Get Environments id: get-envs run: | - echo "envs=$(tox -l | python ./.github/scripts/get-envs.py)" >> "$GITHUB_OUTPUT" + tox -l | python ./.github/scripts/get-envs.py env: GROUP_NUMBER: ${{ matrix.group-number }} - name: Test run: | - tox -vv -e ${{ steps.get-envs.outputs.envs }} -p auto + tox run-parallel \ + -vv -e ${{ steps.get-envs.outputs.envs }} \ + --result-json=./.tox/${{ github.job }}-${{ matrix.group-number }}-${{ github.run_id }}-${{ job.check_run_id }}-results.json \ + --exit-and-dump-after=900 env: TOX_PARALLEL_NO_SPINNER: 1 FORCE_COLOR: "true" - name: Upload Coverage Artifacts uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + if: always() with: name: coverage-${{ github.job }}-${{ strategy.job-index }} path: ./**/.coverage.* @@ -1688,6 +2044,16 @@ jobs: if-no-files-found: error retention-days: 1 + - name: Upload Results Artifacts + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + if: always() + with: + name: results-${{ github.job }}-${{ strategy.job-index }} + path: ./.tox/*-results.json + include-hidden-files: true + if-no-files-found: error + retention-days: 1 + valkey: env: TOTAL_GROUPS: 1 @@ -1732,22 +2098,36 @@ jobs: - name: Get Environments id: get-envs run: | - echo "envs=$(tox -l | python ./.github/scripts/get-envs.py)" >> "$GITHUB_OUTPUT" + tox -l | python ./.github/scripts/get-envs.py env: GROUP_NUMBER: ${{ matrix.group-number }} - name: Test run: | - tox -vv -e ${{ steps.get-envs.outputs.envs }} -p auto + tox run-parallel \ + -vv -e ${{ steps.get-envs.outputs.envs }} \ + --result-json=./.tox/${{ github.job }}-${{ matrix.group-number }}-${{ github.run_id }}-${{ job.check_run_id }}-results.json \ + --exit-and-dump-after=900 env: TOX_PARALLEL_NO_SPINNER: 1 FORCE_COLOR: "true" - name: Upload Coverage Artifacts uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + if: always() with: name: coverage-${{ github.job }}-${{ strategy.job-index }} path: ./**/.coverage.* include-hidden-files: true if-no-files-found: error retention-days: 1 + + - name: Upload Results Artifacts + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + if: always() + with: + name: results-${{ github.job }}-${{ strategy.job-index }} + path: ./.tox/*-results.json + include-hidden-files: true + if-no-files-found: error + retention-days: 1 diff --git a/.gitignore b/.gitignore index 4acad5ce17..63a3cb8877 100644 --- a/.gitignore +++ b/.gitignore @@ -35,7 +35,6 @@ share/python-wheels/ .installed.cfg *.egg MANIFEST -_version.py version.txt version.py _version.py diff --git a/MANIFEST.in b/MANIFEST.in index 35cd7ee08f..d683ef3cc0 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -2,7 +2,6 @@ include MANIFEST.in include README.md include LICENSE include THIRD_PARTY_NOTICES.md -include newrelic/version.txt include newrelic/newrelic.ini include newrelic/common/cacert.pem include newrelic/packages/wrapt/LICENSE diff --git a/asv.conf.json b/asv.conf.json index 5826289a5e..203d52c887 100644 --- a/asv.conf.json +++ b/asv.conf.json @@ -6,7 +6,7 @@ "repo": ".", "environment_type": "virtualenv", "install_timeout": 120, - "pythons": ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"], + "pythons": ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13", "3.14"], "benchmark_dir": "tests/agent_benchmarks", "env_dir": ".asv/env", "results_dir": ".asv/results", diff --git a/newrelic/hooks/coroutines_asyncio.py b/newrelic/hooks/coroutines_asyncio.py index 8dc4e27ef9..41fc776595 100644 --- a/newrelic/hooks/coroutines_asyncio.py +++ b/newrelic/hooks/coroutines_asyncio.py @@ -45,4 +45,7 @@ def instrument_asyncio_base_events(module): def instrument_asyncio_events(module): - wrap_function_wrapper(module, "BaseDefaultEventLoopPolicy.set_event_loop", wrap_create_task) + if hasattr(module, "_BaseDefaultEventLoopPolicy"): # Python >= 3.14 + wrap_function_wrapper(module, "_BaseDefaultEventLoopPolicy.set_event_loop", wrap_create_task) + else: # Python <= 3.13 + wrap_function_wrapper(module, "BaseDefaultEventLoopPolicy.set_event_loop", wrap_create_task) diff --git a/pyproject.toml b/pyproject.toml index 337aaae634..2dbdb34837 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -37,6 +37,7 @@ classifiers = [ "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", "Topic :: System :: Monitoring", @@ -90,7 +91,7 @@ packages = [ ] [tool.setuptools.package-data] -newrelic = ["newrelic.ini", "version.txt", "packages/urllib3/LICENSE.txt", "common/cacert.pem", "scripts/azure-prebuild.sh"] +newrelic = ["newrelic.ini", "packages/urllib3/LICENSE.txt", "common/cacert.pem", "scripts/azure-prebuild.sh"] [tool.setuptools_scm] write_to = "newrelic/_version.py" diff --git a/setup.py b/setup.py index 81a40ae967..4cccb1e437 100644 --- a/setup.py +++ b/setup.py @@ -134,7 +134,6 @@ def build_extension(self, ext): "package_data": { "newrelic": [ "newrelic.ini", - "version.txt", "packages/urllib3/LICENSE.txt", "common/cacert.pem", "scripts/azure-prebuild.sh", diff --git a/tests/cross_agent/test_collector_hostname.py b/tests/cross_agent/test_collector_hostname.py index 6da387fac6..f7e888b80d 100644 --- a/tests/cross_agent/test_collector_hostname.py +++ b/tests/cross_agent/test_collector_hostname.py @@ -108,6 +108,6 @@ def test_collector_hostname(config_file_key, config_override_host, env_key, env_ }, ) process.start() - result = queue.get(timeout=2) + result = queue.get(timeout=15) assert result == "PASS" diff --git a/tests/datastore_sqlite/test_obfuscation.py b/tests/datastore_sqlite/test_obfuscation.py index df3954aab9..9a417efc44 100644 --- a/tests/datastore_sqlite/test_obfuscation.py +++ b/tests/datastore_sqlite/test_obfuscation.py @@ -47,14 +47,20 @@ def test(): test() -_parameter_tests = [ - ("INSERT INTO a VALUES (:1, :2)", "INSERT INTO a VALUES (:1, :2)"), - ("INSERT INTO a VALUES (?, ?)", "INSERT INTO a VALUES (?, ?)"), -] +def test_named_parameters(sqlite3_cursor): + sql = obfuscated = "INSERT INTO a VALUES (:1, :2)" + + @validate_sql_obfuscation([obfuscated]) + @background_task() + def test(): + sqlite3_cursor.executemany(sql, [{"1": "hello", "2": "world"}, {"1": "love", "2": "python"}]) + + test() + +def test_sequence_parameters(sqlite3_cursor): + sql = obfuscated = "INSERT INTO a VALUES (?, ?)" -@pytest.mark.parametrize("sql,obfuscated", _parameter_tests) -def test_parameters(sqlite3_cursor, sql, obfuscated): @validate_sql_obfuscation([obfuscated]) @background_task() def test(): diff --git a/tests/framework_ariadne/_target_application.py b/tests/framework_ariadne/_target_application.py index 9222c2ba98..21cede2038 100644 --- a/tests/framework_ariadne/_target_application.py +++ b/tests/framework_ariadne/_target_application.py @@ -11,9 +11,6 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - - -import asyncio import json from graphql import MiddlewareManager @@ -52,6 +49,10 @@ def _run_sync(query, middleware=None): def run_async(schema): + import asyncio + + loop = asyncio.new_event_loop() + def _run_async(query, middleware=None): from ariadne import graphql @@ -60,7 +61,6 @@ def _run_async(query, middleware=None): if middleware: middleware = MiddlewareManager(*middleware) - loop = asyncio.get_event_loop() success, response = loop.run_until_complete(graphql(schema, {"query": query}, middleware=middleware)) check_response(query, success, response) diff --git a/tests/framework_graphene/_target_application.py b/tests/framework_graphene/_target_application.py index 1a7a6f15e4..8fec18aff8 100644 --- a/tests/framework_graphene/_target_application.py +++ b/tests/framework_graphene/_target_application.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. + from ._target_schema_async import target_schema as target_schema_async from ._target_schema_sync import target_schema as target_schema_sync @@ -37,8 +38,9 @@ def _run_sync(query, middleware=None): def run_async(schema): import asyncio + loop = asyncio.new_event_loop() + def _run_async(query, middleware=None): - loop = asyncio.get_event_loop() response = loop.run_until_complete(schema.execute_async(query, middleware=middleware)) check_response(query, response) diff --git a/tests/framework_graphql/_target_application.py b/tests/framework_graphql/_target_application.py index 28f3b895ca..0fa0bed40e 100644 --- a/tests/framework_graphql/_target_application.py +++ b/tests/framework_graphql/_target_application.py @@ -47,9 +47,10 @@ def run_async(schema): from graphql import graphql + loop = asyncio.new_event_loop() + def _run_async(query, middleware=None): coro = graphql(schema, query, middleware=middleware) - loop = asyncio.get_event_loop() response = loop.run_until_complete(coro) check_response(query, response) diff --git a/tests/framework_strawberry/_target_application.py b/tests/framework_strawberry/_target_application.py index 3e3b1d2330..cc3ba4dbcc 100644 --- a/tests/framework_strawberry/_target_application.py +++ b/tests/framework_strawberry/_target_application.py @@ -13,7 +13,6 @@ # limitations under the License. -import asyncio import json import pytest @@ -46,13 +45,16 @@ def _run_sync(query, middleware=None): def run_async(schema): + import asyncio + + loop = asyncio.new_event_loop() + def _run_async(query, middleware=None): from graphql.language.source import Source if middleware is not None: pytest.skip("Middleware not supported in Strawberry.") - loop = asyncio.get_event_loop() response = loop.run_until_complete(schema.execute(query)) if (isinstance(query, str) and "error" not in query) or ( diff --git a/tests/testing_support/fixtures.py b/tests/testing_support/fixtures.py index 00cfd7368d..3d93e06e30 100644 --- a/tests/testing_support/fixtures.py +++ b/tests/testing_support/fixtures.py @@ -351,12 +351,12 @@ def collector_available_fixture(collector_agent_registration): settings = global_settings() # Wait for the application to become active. - timeout = (settings.startup_timeout or 0) + 10.0 + timeout = _timeout = (settings.startup_timeout or 0) + 10.0 while not application.active and timeout > 0: time.sleep(0.1) timeout -= 0.1 - assert application.active, f"Application failed to activate after {timeout} seconds." + assert application.active, f"Application failed to activate after {_timeout} seconds." def raise_background_exceptions(timeout=5.0): diff --git a/tox.ini b/tox.ini index 026bd33a25..69025bc95e 100644 --- a/tox.ini +++ b/tox.ini @@ -5,6 +5,7 @@ ; * postgres, mysql (require specified database to be running) ; * grpc (no parallel execution allowed, and requires grpc compilation) ; * linux, macos, windows (OS specific runners, no services required) +; * python (no special requirements) ; 2. Folder under tests/ that includes the test suite. ; Requires an entry under changedir to run the test suite from the correct folder. ; Should be prepended to any deps definitions as well to prevent environments from colliding. @@ -19,7 +20,7 @@ ; framework_aiohttp-aiohttp01: aiohttp<2 ; framework_aiohttp-aiohttp0202: aiohttp<2.3 ; 3. Python version required. Uses the standard tox definitions. (https://tox.readthedocs.io/en/latest/config.html#tox-environments) -; Examples: py38,py39,py310,py311,py312,py313,pypy310 +; Examples: py38,py39,py310,py311,py312,py313,py314,pypy311 ; 4. Library and version (Optional). Used when testing multiple versions of the library, and may be omitted when only testing a single version. ; Versions should be specified with 2 digits per version number, so <3 becomes 02 and <3.5 becomes 0304. latest and master are also acceptable versions. ; Examples: uvicorn03, CherryPy0302, uvicornlatest @@ -31,7 +32,7 @@ ; 5. With or without New Relic C extensions (Optional). Used for testing agent features. ; Examples: with_extensions, without_extensions ; envlist = -; linux-agent_features-pypy310-without_extensions, +; linux-agent_features-pypy311-without_extensions, ; linux-agent_streaming-py312-{with,without}_extensions, ; ; Full Format: @@ -50,164 +51,175 @@ uv_seed = true skip_missing_interpreters = false envlist = # Linux Core Agent Test Suite - {linux,linux_arm64}-agent_features-{py38,py39,py310,py311,py312,py313}-{with,without}_extensions, - {linux,linux_arm64}-agent_features-pypy310-without_extensions, - {linux,linux_arm64}-agent_streaming-{py38,py39,py310,py311,py312,py313}-protobuf06-{with,without}_extensions, + {linux,linux_arm64}-agent_features-{py38,py39,py310,py311,py312,py313,py314}-{with,without}_extensions, + {linux,linux_arm64}-agent_features-pypy311-without_extensions, + {linux,linux_arm64}-agent_streaming-{py38,py39,py310,py311,py312,py313,py314}-protobuf06-{with,without}_extensions, {linux,linux_arm64}-agent_streaming-py39-protobuf{03,0319,04,05}-{with,without}_extensions, - {linux,linux_arm64}-agent_unittests-{py38,py39,py310,py311,py312,py313}-{with,without}_extensions, - {linux,linux_arm64}-agent_unittests-pypy310-without_extensions, - {linux,linux_arm64}-cross_agent-{py38,py39,py310,py311,py312,py313}-{with,without}_extensions, - {linux,linux_arm64}-cross_agent-pypy310-without_extensions, + {linux,linux_arm64}-agent_unittests-{py38,py39,py310,py311,py312,py313,py314}-{with,without}_extensions, + {linux,linux_arm64}-agent_unittests-pypy311-without_extensions, + {linux,linux_arm64}-cross_agent-{py38,py39,py310,py311,py312,py313,py314}-{with,without}_extensions, + {linux,linux_arm64}-cross_agent-pypy311-without_extensions, # Windows Core Agent Test Suite - {windows,windows_arm64}-agent_features-py313-{with,without}_extensions, + {windows,windows_arm64}-agent_features-{py313,py314}-{with,without}_extensions, # Windows grpcio wheels don't appear to be installable for Arm64 despite being available - windows-agent_streaming-py313-protobuf06-{with,without}_extensions, - {windows,windows_arm64}-agent_unittests-py313-{with,without}_extensions, - {windows,windows_arm64}-cross_agent-py313-{with,without}_extensions, + windows-agent_streaming-{py313,py314}-protobuf06-{with,without}_extensions, + {windows,windows_arm64}-agent_unittests-{py313,py314}-{with,without}_extensions, + {windows,windows_arm64}-cross_agent-{py313,py314}-{with,without}_extensions, # Integration Tests (only run on Linux) - cassandra-datastore_cassandradriver-{py38,py39,py310,py311,py312,pypy310}-cassandralatest, - elasticsearchserver07-datastore_elasticsearch-{py38,py39,py310,py311,py312,py313,pypy310}-elasticsearch07, - elasticsearchserver08-datastore_elasticsearch-{py38,py39,py310,py311,py312,py313,pypy310}-elasticsearch08, - firestore-datastore_firestore-{py38,py39,py310,py311,py312,py313}, - grpc-framework_grpc-{py39,py310,py311,py312,py313}-grpclatest, + cassandra-datastore_cassandradriver-{py38,py39,py310,py311,py312,pypy311}-cassandralatest, + elasticsearchserver07-datastore_elasticsearch-{py38,py39,py310,py311,py312,py313,py314,pypy311}-elasticsearch07, + elasticsearchserver08-datastore_elasticsearch-{py38,py39,py310,py311,py312,py313,py314,pypy311}-elasticsearch08, + firestore-datastore_firestore-{py38,py39,py310,py311,py312,py313,py314}, + grpc-framework_grpc-{py39,py310,py311,py312,py313,py314}-grpclatest, kafka-messagebroker_confluentkafka-py39-confluentkafka{0108,0107,0106}, kafka-messagebroker_confluentkafka-{py38,py39,py310,py311,py312,py313}-confluentkafkalatest, - kafka-messagebroker_kafkapython-{py38,py39,py310,py311,py312,py313,pypy310}-kafkapythonlatest, - kafka-messagebroker_kafkapython-{py38,py39,py310,py311,py312,py313,pypy310}-kafkapythonnglatest, - memcached-datastore_aiomcache-{py38,py39,py310,py311,py312,py313}, - memcached-datastore_bmemcached-{py38,py39,py310,py311,py312,py313}, - memcached-datastore_memcache-{py38,py39,py310,py311,py312,py313,pypy310}-memcached01, + ;; Package not ready for Python 3.14 (confluent-kafka wheels not released) + ; kafka-messagebroker_confluentkafka-py314-confluentkafkalatest, + kafka-messagebroker_kafkapython-{py38,py39,py310,py311,py312,py313,py314,pypy311}-kafkapythonlatest, + kafka-messagebroker_kafkapython-{py38,py39,py310,py311,py312,py313,py314,pypy311}-kafkapythonnglatest, + memcached-datastore_aiomcache-{py38,py39,py310,py311,py312,py313,py314}, + memcached-datastore_bmemcached-{py38,py39,py310,py311,py312,py313,py314}, + memcached-datastore_memcache-{py38,py39,py310,py311,py312,py313,py314,pypy311}-memcached01, memcached-datastore_pylibmc-{py38,py39,py310,py311}, - memcached-datastore_pymemcache-{py38,py39,py310,py311,py312,py313,pypy310}, - mongodb8-datastore_motor-{py38,py39,py310,py311,py312,py313}-motorlatest, + memcached-datastore_pymemcache-{py38,py39,py310,py311,py312,py313,py314,pypy311}, + mongodb8-datastore_motor-{py38,py39,py310,py311,py312,py313,py314}-motorlatest, mongodb3-datastore_pymongo-{py38,py39,py310,py311,py312}-pymongo03, - mongodb8-datastore_pymongo-{py38,py39,py310,py311,py312,py313,pypy310}-pymongo04, + mongodb8-datastore_pymongo-{py38,py39,py310,py311,py312,py313,py314,pypy311}-pymongo04, ; aiomysql tests on PyPy disabled for now due to issues building cryptography - mysql-datastore_aiomysql-{py38,py39,py310,py311,py312,py313}, - mssql-datastore_pymssql-pymssqllatest-{py39,py310,py311,py312,py313}, + mysql-datastore_aiomysql-{py38,py39,py310,py311,py312,py313,py314}, + mssql-datastore_pymssql-pymssqllatest-{py39,py310,py311,py312,py313,py314}, mssql-datastore_pymssql-pymssql020301-py38, - mysql-datastore_mysql-mysqllatest-{py38,py39,py310,py311,py312,py313}, - mysql-datastore_mysqldb-{py38,py39,py310,py311,py312,py313}, + mysql-datastore_mysql-mysqllatest-{py38,py39,py310,py311,py312,py313,py314}, + mysql-datastore_mysqldb-{py38,py39,py310,py311,py312,py313,py314}, ; pymysql tests on PyPy disabled for now due to issues building cryptography - mysql-datastore_pymysql-{py38,py39,py310,py311,py312,py313}, - oracledb-datastore_oracledb-{py39,py310,py311,py312,py313}-oracledblatest, - oracledb-datastore_oracledb-{py39,py313}-oracledb02, + mysql-datastore_pymysql-{py38,py39,py310,py311,py312,py313,py314}, + oracledb-datastore_oracledb-{py39,py310,py311,py312,py313,py314}-oracledblatest, + oracledb-datastore_oracledb-{py39,py313,py314}-oracledb02, oracledb-datastore_oracledb-{py39,py312}-oracledb01, - nginx-external_httpx-{py38,py39,py310,py311,py312,py313}, - postgres16-datastore_asyncpg-{py38,py39,py310,py311,py312,py313}, - postgres16-datastore_psycopg-{py38,py39,py310,py311,py312,py313,pypy310}-psycopglatest, + nginx-external_httpx-{py38,py39,py310,py311,py312,py313,py314}, + postgres16-datastore_asyncpg-{py38,py39,py310,py311,py312,py313,py314}, + postgres16-datastore_psycopg-{py38,py39,py310,py311,py312,py313,py314,pypy311}-psycopglatest, postgres16-datastore_psycopg-py312-psycopg_{purepython,binary,compiled}0301, postgres16-datastore_psycopg2-{py38,py39,py310,py311,py312}-psycopg2latest, postgres16-datastore_psycopg2cffi-{py38,py39,py310,py311,py312}-psycopg2cffilatest, - postgres16-datastore_pyodbc-{py38,py39,py310,py311,py312,py313}-pyodbclatest, - postgres9-datastore_postgresql-{py38,py39,py310,py311,py312,py313}, - python-adapter_asgiref-{py38,py39,py310,py311,py312,py313,pypy310}-asgireflatest, + postgres16-datastore_pyodbc-{py38,py39,py310,py311,py312,py313,py314}-pyodbclatest, + postgres9-datastore_postgresql-{py38,py39,py310,py311,py312,py313,py314}, + python-adapter_asgiref-{py38,py39,py310,py311,py312,py313,py314,pypy311}-asgireflatest, python-adapter_asgiref-py310-asgiref{0303,0304,0305,0306,0307}, - python-adapter_cheroot-{py38,py39,py310,py311,py312,py313}, - python-adapter_daphne-{py38,py39,py310,py311,py312,py313}-daphnelatest, - python-adapter_gevent-{py38,py310,py311,py312,py313}, + python-adapter_cheroot-{py38,py39,py310,py311,py312,py313,py314}, + python-adapter_daphne-{py38,py39,py310,py311,py312,py313,py314}-daphnelatest, + python-adapter_gevent-{py38,py310,py311,py312,py313,py314}, python-adapter_gunicorn-{py38,py39,py310,py311,py312,py313}-aiohttp03-gunicornlatest, - python-adapter_hypercorn-{py38,py39,py310,py311,py312,py313}-hypercornlatest, + ;; Package not ready for Python 3.14 (aiohttp's worker not updated) + ; python-adapter_gunicorn-py314-aiohttp03-gunicornlatest, + python-adapter_hypercorn-{py38,py39,py310,py311,py312,py313,py314}-hypercornlatest, python-adapter_hypercorn-py38-hypercorn{0010,0011,0012,0013}, ; mcp tests on PyPy disabled for now due to issues building cryptography - python-adapter_mcp-{py310,py311,py312,py313}, - python-adapter_uvicorn-{py38,py39,py310,py311,py312,py313}-uvicornlatest, + python-adapter_mcp-{py310,py311,py312,py313,py314}, + python-adapter_uvicorn-{py38,py39,py310,py311,py312,py313,py314}-uvicornlatest, python-adapter_uvicorn-py38-uvicorn014, - python-adapter_waitress-{py38,py39,py310,py311,py312,py313}-waitresslatest, - python-application_celery-{py38,py39,py310,py311,py312,py313,pypy310}-celerylatest, + python-adapter_waitress-{py38,py39,py310,py311,py312,py313,py314}-waitresslatest, + python-application_celery-{py38,py39,py310,py311,py312,py313,py314,pypy311}-celerylatest, python-application_celery-py311-celery{0504,0503,0502}, - python-component_djangorestframework-{py38,py39,py310,py311,py312,py313}-djangorestframeworklatest, - python-component_flask_rest-{py38,py39,py310,py311,py312,py313,pypy310}-flaskrestxlatest, + python-component_djangorestframework-{py38,py39,py310,py311,py312,py313,py314}-djangorestframeworklatest, + python-component_flask_rest-{py38,py39,py310,py311,py312,py313,py314,pypy311}-flaskrestxlatest, python-component_graphqlserver-{py38,py39,py310,py311,py312}, ;; Tests need to be updated to support newer graphql-server/sanic versions - ; python-component_graphqlserver-py313, - python-component_tastypie-{py38,py39,py310,py311,py312,py313,pypy310}-tastypielatest, - python-coroutines_asyncio-{py38,py39,py310,py311,py312,py313,pypy310}, - python-datastore_sqlite-{py38,py39,py310,py311,py312,py313,pypy310}, + ; python-component_graphqlserver-{py313,py314}, + python-component_tastypie-{py38,py39,py310,py311,py312,py313,py314,pypy311}-tastypielatest, + python-coroutines_asyncio-{py38,py39,py310,py311,py312,py313,py314,pypy311}, + python-datastore_sqlite-{py38,py39,py310,py311,py312,py313,py314,pypy311}, python-external_aiobotocore-{py38,py39,py310,py311,py312,py313}-aiobotocorelatest, - python-external_botocore-{py38,py39,py310,py311,py312,py313}-botocorelatest, + ;; Package not ready for Python 3.14 or PyPy 3.11 (httptools fails to compile) + ; python-external_aiobotocore-py314-aiobotocorelatest, + python-external_botocore-{py38,py39,py310,py311,py312,py313,py314}-botocorelatest, python-external_botocore-{py311}-botocorelatest-langchain, python-external_botocore-py310-botocore0125, python-external_botocore-py311-botocore0128, - python-external_feedparser-{py38,py39,py310,py311,py312,py313}-feedparser06, - python-external_http-{py38,py39,py310,py311,py312,py313}, - python-external_httplib-{py38,py39,py310,py311,py312,py313,pypy310}, - python-external_httplib2-{py38,py39,py310,py311,py312,py313,pypy310}, + python-external_feedparser-{py38,py39,py310,py311,py312,py313,py314}-feedparser06, + python-external_http-{py38,py39,py310,py311,py312,py313,py314}, + python-external_httplib-{py38,py39,py310,py311,py312,py313,py314,pypy311}, + python-external_httplib2-{py38,py39,py310,py311,py312,py313,py314,pypy311}, # pyzeebe requires grpcio which does not support pypy python-external_pyzeebe-{py39,py310,py311,py312}, - python-external_requests-{py38,py39,py310,py311,py312,py313,pypy310}, - python-external_urllib3-{py38,py39,py310,py311,py312,py313,pypy310}-urllib3latest, - python-external_urllib3-{py312,py313,pypy310}-urllib30126, - python-framework_aiohttp-{py38,py39,py310,py311,py312,py313,pypy310}-aiohttp03, - python-framework_ariadne-{py38,py39,py310,py311,py312,py313}-ariadnelatest, + python-external_requests-{py38,py39,py310,py311,py312,py313,py314,pypy311}, + python-external_urllib3-{py38,py39,py310,py311,py312,py313,py314,pypy311}-urllib3latest, + python-external_urllib3-{py312,py313,py314,pypy311}-urllib30126, + python-framework_aiohttp-{py38,py39,py310,py311,py312,py313,py314,pypy311}-aiohttp03, + python-framework_ariadne-{py38,py39,py310,py311,py312,py313,py314}-ariadnelatest, python-framework_azurefunctions-{py39,py310,py311,py312}, - python-framework_bottle-{py38,py39,py310,py311,py312,py313,pypy310}-bottle0012, - python-framework_cherrypy-{py38,py39,py310,py311,py312,py313,pypy310}-CherryPylatest, - python-framework_django-{py38,py39,py310,py311,py312,py313}-Djangolatest, + python-framework_bottle-{py38,py39,py310,py311,py312,py313,py314,pypy311}-bottle0012, + python-framework_cherrypy-{py38,py39,py310,py311,py312,py313,py314,pypy311}-CherryPylatest, + python-framework_django-{py38,py39,py310,py311,py312,py313,py314}-Djangolatest, python-framework_django-py39-Django{0202,0300,0301,0302,0401}, - python-framework_falcon-{py39,py310,py311,py312,py313,pypy310}-falconlatest, + python-framework_falcon-{py39,py310,py311,py312,py313,py314,pypy311}-falconlatest, python-framework_falcon-py38-falcon0410, - python-framework_falcon-{py39,py310,py311,py312,py313,pypy310}-falconmaster, - python-framework_fastapi-{py38,py39,py310,py311,py312,py313}, - python-framework_flask-{py38,py39,py310,py311,py312,pypy310}-flask02, + python-framework_falcon-{py39,py310,py311,py312,py313,py314,pypy311}-falconmaster, + python-framework_fastapi-{py38,py39,py310,py311,py312,py313,py314}, + python-framework_flask-{py38,py39,py310,py311,py312,pypy311}-flask02, ; python-framework_flask-py38-flaskmaster fails, even with Flask-Compress<1.16 and coverage==7.61 for py38 python-framework_flask-py38-flasklatest, ; flaskmaster tests disabled until they can be fixed - python-framework_flask-{py39,py310,py311,py312,py313,pypy310}-flask{latest}, - python-framework_graphene-{py38,py39,py310,py311,py312,py313}-graphenelatest, - python-component_graphenedjango-{py38,py39,py310,py311,py312,py313}-graphenedjangolatest, - python-framework_graphql-{py38,py39,py310,py311,py312,py313,pypy310}-graphql03, - python-framework_graphql-{py38,py39,py310,py311,py312,py313,pypy310}-graphqllatest, - python-framework_pyramid-{py38,py39,py310,py311,py312,py313,pypy310}-Pyramidlatest, - python-framework_pyramid-{py38,py39,py310,py311,py312,py313,pypy310}-Pyramid0110-cornice, + python-framework_flask-{py39,py310,py311,py312,py313,py314,pypy311}-flask{latest}, + python-framework_graphene-{py38,py39,py310,py311,py312,py313,py314}-graphenelatest, + python-component_graphenedjango-{py38,py39,py310,py311,py312,py313,py314}-graphenedjangolatest, + python-framework_graphql-{py38,py39,py310,py311,py312,py313,py314,pypy311}-graphql03, + python-framework_graphql-{py38,py39,py310,py311,py312,py313,py314,pypy311}-graphqllatest, + python-framework_pyramid-{py38,py39,py310,py311,py312,py313,py314,pypy311}-Pyramidlatest, + python-framework_pyramid-{py38,py39,py310,py311,py312,py313,py314,pypy311}-cornicelatest, python-framework_sanic-py38-sanic2406, - python-framework_sanic-{py39,py310,py311,py312,py313,pypy310}-saniclatest, - python-framework_sanic-{py38,pypy310}-sanic2290, - python-framework_starlette-{py310,pypy310}-starlette{0014,0015,0019,0028}, - python-framework_starlette-{py38,py39,py310,py311,py312,py313,pypy310}-starlettelatest, + python-framework_sanic-{py39,py310,py311,py312,py313}-saniclatest, + ;; Package not ready for Python 3.14 or PyPy 3.11 (httptools fails to compile) + ; python-framework_sanic-{py314,pypy311}-saniclatest, + python-framework_sanic-py38-sanic2290, + python-framework_starlette-{py310,pypy311}-starlette{0014,0015,0019,0028}, + python-framework_starlette-{py38,py39,py310,py311,py312,py313,py314,pypy311}-starlettelatest, python-framework_starlette-{py38}-starlette002001, python-framework_strawberry-{py38,py39,py310,py311,py312}-strawberry02352, - python-framework_strawberry-{py38,py39,py310,py311,py312,py313}-strawberrylatest, - python-framework_tornado-{py38,py39,py310,py311,py312,py313}-tornadolatest, - python-framework_tornado-{py310,py311,py312,py313}-tornadomaster, - python-logger_logging-{py38,py39,py310,py311,py312,py313,pypy310}, - python-logger_loguru-{py38,py39,py310,py311,py312,py313,pypy310}-logurulatest, - python-logger_structlog-{py38,py39,py310,py311,py312,py313,pypy310}-structloglatest, - python-mlmodel_autogen-{py310,py311,py312,py313}-autogen061, - python-mlmodel_autogen-{py310,py311,py312,py313}-autogenlatest, - python-mlmodel_gemini-{py39,py310,py311,py312,py313}, - python-mlmodel_langchain-{py39,py310,py311,py312}, - ;; Package not ready for Python 3.13 (uses an older version of numpy) - ; python-mlmodel_langchain-py313, + python-framework_strawberry-{py38,py39,py310,py311,py312,py313,py314}-strawberrylatest, + python-framework_tornado-{py38,py39,py310,py311,py312,py313,py314}-tornadolatest, + python-framework_tornado-{py310,py311,py312,py313,py314}-tornadomaster, + python-logger_logging-{py38,py39,py310,py311,py312,py313,py314,pypy311}, + python-logger_loguru-{py38,py39,py310,py311,py312,py313,py314,pypy311}-logurulatest, + python-logger_structlog-{py38,py39,py310,py311,py312,py313,py314,pypy311}-structloglatest, + python-mlmodel_autogen-{py310,py311,py312,py313,py314}-autogen061, + python-mlmodel_autogen-{py310,py311,py312,py313,py314}-autogenlatest, + ;; Package not ready for PyPy 3.11 (pydantic-core not updated) + ; python-mlmodel_autogen-pypy311-autogen061, + ; python-mlmodel_autogen-pypy311-autogenlatest, + python-mlmodel_gemini-{py39,py310,py311,py312,py313,py314}, + python-mlmodel_langchain-{py39,py310,py311,py312,py313}, + ;; Package not ready for Python 3.14 (pydantic not updated) + ; python-mlmodel_langchain-py314, python-mlmodel_openai-openai0-{py38,py39,py310,py311,py312}, python-mlmodel_openai-openai107-py312, - python-mlmodel_openai-openailatest-{py38,py39,py310,py311,py312,py313}, - python-mlmodel_sklearn-{py38,py39,py310,py311,py312,py313}-scikitlearnlatest, - python-template_genshi-{py38,py39,py310,py311,py312,py313}-genshilatest, - python-template_jinja2-{py38,py39,py310,py311,py312,py313}-jinja2latest, - python-template_mako-{py38,py39,py310,py311,py312,py313}, - rabbitmq-messagebroker_pika-{py38,py39,py310,py311,py312,py313,pypy310}-pikalatest, - rabbitmq-messagebroker_kombu-{py38,py39,py310,py311,py312,py313,pypy310}-kombulatest, - rabbitmq-messagebroker_kombu-{py38,py39,py310,pypy310}-kombu050204, - redis-datastore_redis-{py38,py39,py310,py311,pypy310}-redis04, - redis-datastore_redis-{py38,py39,py310,py311,py312,pypy310}-redis05, - redis-datastore_redis-{py38,py39,py310,py311,py312,py313,pypy310}-redislatest, - rediscluster-datastore_rediscluster-{py312,py313,pypy310}-redislatest, - valkey-datastore_valkey-{py38,py39,py310,py311,py312,py313,pypy310}-valkeylatest, - solr-datastore_pysolr-{py38,py39,py310,py311,py312,py313,pypy310}, + python-mlmodel_openai-openailatest-{py38,py39,py310,py311,py312,py313,py314}, + python-mlmodel_sklearn-{py38,py39,py310,py311,py312,py313,py314}-scikitlearnlatest, + python-template_genshi-{py38,py39,py310,py311,py312,py313,py314}-genshilatest, + python-template_jinja2-{py38,py39,py310,py311,py312,py313,py314}-jinja2latest, + python-template_mako-{py38,py39,py310,py311,py312,py313,py314}, + rabbitmq-messagebroker_pika-{py38,py39,py310,py311,py312,py313,py314,pypy311}-pikalatest, + rabbitmq-messagebroker_kombu-{py38,py39,py310,py311,py312,py313,py314,pypy311}-kombulatest, + rabbitmq-messagebroker_kombu-{py38,py39,py310,pypy311}-kombu050204, + redis-datastore_redis-{py38,py39,py310,py311,pypy311}-redis04, + redis-datastore_redis-{py38,py39,py310,py311,py312,pypy311}-redis05, + redis-datastore_redis-{py38,py39,py310,py311,py312,py313,py314,pypy311}-redislatest, + rediscluster-datastore_rediscluster-{py312,py313,py314,pypy311}-redislatest, + valkey-datastore_valkey-{py38,py39,py310,py311,py312,py313,py314,pypy311}-valkeylatest, + solr-datastore_pysolr-{py38,py39,py310,py311,py312,py313,py314,pypy311}, [testenv] deps = # Base Dependencies - {py39,py310,py311,py312,py313,pypy310}: pytest==8.4.1 + {py39,py310,py311,py312,py313,py314,pypy311}: pytest==8.4.1 py38: pytest==8.3.5 + {py39,py310,py311,py312,py313,py314,pypy311}: WebTest==3.0.6 + py38: WebTest==3.0.1 + py313,py314: legacy-cgi==2.6.1 # cgi was removed from the stdlib in 3.13, and is required for WebTest iniconfig coverage - {py39,py310,py311,py312,py313,pypy310}: WebTest==3.0.6 - py38: WebTest==3.0.1 - py313: legacy-cgi==2.6.1 # cgi was removed from the stdlib in 3.13, and is required for WebTest # Test Suite Dependencies adapter_asgiref-asgireflatest: asgiref @@ -250,7 +262,7 @@ deps = application_celery-celery0504: celery[pytest]<5.5 application_celery-celery0503: celery[pytest]<5.4 application_celery-celery0502: celery[pytest]<5.3 - application_celery-pypy310: importlib-metadata<5.0 + application_celery-pypy311: importlib-metadata<5.0 mlmodel_sklearn: pandas mlmodel_sklearn: protobuf mlmodel_sklearn: numpy @@ -271,7 +283,7 @@ deps = component_tastypie-tastypielatest: django-tastypie component_tastypie-tastypielatest: django<4.1 component_tastypie-tastypielatest: asgiref<3.7.1 # asgiref==3.7.1 only suppport Python 3.10+ - coroutines_asyncio-{py38,py39,py310,py311,py312,py313}: uvloop + coroutines_asyncio-{py38,py39,py310,py311,py312,py313,py314}: uvloop cross_agent: requests datastore_asyncpg: asyncpg datastore_aiomcache: aiomcache @@ -384,7 +396,7 @@ deps = framework_grpc-grpc0162: grpcio-tools<1.63 framework_grpc-grpc0162: protobuf<4.25 framework_pyramid: routes - framework_pyramid-cornice: cornice!=5.0.0 + framework_pyramid-cornicelatest: cornice framework_pyramid-Pyramidlatest: Pyramid framework_sanic-sanic2290: sanic<22.9.1 framework_sanic-sanic2406: sanic<24.07