diff --git a/.github/actionlint.yml b/.github/actionlint.yml new file mode 100644 index 0000000000..cb4b420b4e --- /dev/null +++ b/.github/actionlint.yml @@ -0,0 +1,19 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +paths: + .github/workflows/**/*.{yml,yaml}: + ignore: + # This runner exists, but is in beta and not known to actionlint. + - 'label "windows-11-arm" is unknown\. .+' diff --git a/.github/scripts/get-envs.py b/.github/scripts/get-envs.py new file mode 100755 index 0000000000..d772130bfc --- /dev/null +++ b/.github/scripts/get-envs.py @@ -0,0 +1,51 @@ +#!/usr/bin/env python +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import fileinput +import os +from textwrap import dedent + +GROUP_NUMBER = int(os.environ["GROUP_NUMBER"]) - 1 +TOTAL_GROUPS = int(os.environ["TOTAL_GROUPS"]) +GITHUB_JOB = os.environ["GITHUB_JOB"] + + +def main(stdin): + environments = [env.rstrip() for env in stdin] + filtered_envs = [env for env in environments if env.startswith(GITHUB_JOB + "-")] + grouped_envs = filtered_envs[GROUP_NUMBER::TOTAL_GROUPS] + joined_envs = ",".join(grouped_envs) + + # If not environments are found, raise an error with helpful information. + if joined_envs: + print(joined_envs) + else: + error_msg = dedent(f""" + No matching environments found. + GITHUB_JOB = {GITHUB_JOB} + GROUP_NUMBER = {GROUP_NUMBER + 1} + TOTAL_GROUPS = {TOTAL_GROUPS} + + environments = {environments} + filtered_envs = {filtered_envs} + grouped_envs = {grouped_envs} + joined_envs = {joined_envs} + """) + raise RuntimeError(error_msg(environments)) + + +if __name__ == "__main__": + with fileinput.input() as stdin: + main(stdin) diff --git a/.github/scripts/retry.sh b/.github/scripts/retry.sh deleted file mode 100755 index 079798a72d..0000000000 --- a/.github/scripts/retry.sh +++ /dev/null @@ -1,42 +0,0 @@ -#!/bin/bash -# Copyright 2010 New Relic, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -# Time in seconds to backoff after the initial attempt. -INITIAL_BACKOFF=10 - -# Grab first arg as number of retries -retries=$1 -shift - -# Use for loop to repeatedly try the wrapped command, breaking on success -for i in $(seq 1 $retries); do - echo "Running: $@" - - # Exponential backoff - if [[ i -gt 1 ]]; then - # Starts with the initial backoff then doubles every retry. - backoff=$(($INITIAL_BACKOFF * (2 ** (i - 2)))) - echo "Command failed, retrying in $backoff seconds..." - sleep $backoff - fi - - # Run wrapped command, and exit on success - $@ && break - result=$? -done - -# Exit with status code of wrapped command -exit $result diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index 78d7e251cb..e42678a58b 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -115,7 +115,7 @@ jobs: - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # 5.6.0 with: - python-version: "3.12" + python-version: "3.13" - name: Install Dependencies run: | diff --git a/.github/workflows/get-envs.py b/.github/workflows/get-envs.py deleted file mode 100755 index f5ef936936..0000000000 --- a/.github/workflows/get-envs.py +++ /dev/null @@ -1,36 +0,0 @@ -#!/usr/bin/env python3.8 -# Copyright 2010 New Relic, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import fileinput -import os - -GROUP_NUMBER = int(os.environ["GROUP_NUMBER"]) - 1 -TOTAL_GROUPS = int(os.environ["TOTAL_GROUPS"]) - - -def main(f): - environments = [e.rstrip() for e in f] - filtered_envs = environments[GROUP_NUMBER::TOTAL_GROUPS] - joined_envs = ",".join(filtered_envs) - - assert joined_envs, ( # noqa: S101 - f"No environments found.\nenvironments = {str(environments)}\nGROUP_NUMBER = {GROUP_NUMBER + 1}\nTOTAL_GROUPS = {TOTAL_GROUPS}" - ) - print(joined_envs) - - -if __name__ == "__main__": - with fileinput.input() as f: - main(f) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index afd5555150..3960ba90d8 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -27,6 +27,11 @@ on: permissions: contents: read +# Ensure bash is used on all runners as the default shell +defaults: + run: + shell: bash + concurrency: group: ${{ github.ref || github.run_id }}-${{ github.workflow }} cancel-in-progress: true @@ -37,7 +42,10 @@ jobs: runs-on: ubuntu-24.04 if: always() # Always run, even on cancellation or failure needs: - - python + - linux + - linux_arm64 + - windows + - windows_arm64 - cassandra - elasticsearchserver07 - elasticsearchserver08 @@ -84,7 +92,7 @@ jobs: - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # 5.0.0 - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # 5.6.0 with: - python-version: "3.10" + python-version: "3.13" architecture: x64 - name: Download Coverage Artifacts @@ -106,8 +114,12 @@ jobs: fail_ci_if_error: true token: ${{ secrets.CODECOV_TOKEN }} - # Tests - python: + # ============================== + # Integration Test Suite Runners + # (run in the CI container) + # ============================== + + linux: env: TOTAL_GROUPS: 20 @@ -160,7 +172,7 @@ jobs: - name: Get Environments id: get-envs run: | - echo "envs=$(tox -l | grep '^${{ github.job }}\-' | ./.github/workflows/get-envs.py)" >> "$GITHUB_OUTPUT" + echo "envs=$(tox -l | python ./.github/scripts/get-envs.py)" >> "$GITHUB_OUTPUT" env: GROUP_NUMBER: ${{ matrix.group-number }} @@ -211,7 +223,7 @@ jobs: - name: Get Environments id: get-envs run: | - echo "envs=$(tox -l | grep '^${{ github.job }}\-' | ./.github/workflows/get-envs.py)" >> "$GITHUB_OUTPUT" + echo "envs=$(tox -l | python ./.github/scripts/get-envs.py)" >> "$GITHUB_OUTPUT" env: GROUP_NUMBER: ${{ matrix.group-number }} @@ -276,7 +288,7 @@ jobs: - name: Get Environments id: get-envs run: | - echo "envs=$(tox -l | grep '^${{ github.job }}\-' | ./.github/workflows/get-envs.py)" >> "$GITHUB_OUTPUT" + echo "envs=$(tox -l | python ./.github/scripts/get-envs.py)" >> "$GITHUB_OUTPUT" env: GROUP_NUMBER: ${{ matrix.group-number }} @@ -342,7 +354,7 @@ jobs: - name: Get Environments id: get-envs run: | - echo "envs=$(tox -l | grep '^${{ github.job }}\-' | ./.github/workflows/get-envs.py)" >> "$GITHUB_OUTPUT" + echo "envs=$(tox -l | python ./.github/scripts/get-envs.py)" >> "$GITHUB_OUTPUT" env: GROUP_NUMBER: ${{ matrix.group-number }} @@ -408,7 +420,7 @@ jobs: - name: Get Environments id: get-envs run: | - echo "envs=$(tox -l | grep '^${{ github.job }}\-' | ./.github/workflows/get-envs.py)" >> "$GITHUB_OUTPUT" + echo "envs=$(tox -l | python ./.github/scripts/get-envs.py)" >> "$GITHUB_OUTPUT" env: GROUP_NUMBER: ${{ matrix.group-number }} @@ -477,7 +489,7 @@ jobs: - name: Get Environments id: get-envs run: | - echo "envs=$(tox -l | grep '^${{ github.job }}\-' | ./.github/workflows/get-envs.py)" >> "$GITHUB_OUTPUT" + echo "envs=$(tox -l | python ./.github/scripts/get-envs.py)" >> "$GITHUB_OUTPUT" env: GROUP_NUMBER: ${{ matrix.group-number }} @@ -546,7 +558,7 @@ jobs: - name: Get Environments id: get-envs run: | - echo "envs=$(tox -l | grep '^${{ github.job }}\-' | ./.github/workflows/get-envs.py)" >> "$GITHUB_OUTPUT" + echo "envs=$(tox -l | python ./.github/scripts/get-envs.py)" >> "$GITHUB_OUTPUT" env: GROUP_NUMBER: ${{ matrix.group-number }} @@ -651,7 +663,7 @@ jobs: - name: Get Environments id: get-envs run: | - echo "envs=$(tox -l | grep '^${{ github.job }}\-' | ./.github/workflows/get-envs.py)" >> "$GITHUB_OUTPUT" + echo "envs=$(tox -l | python ./.github/scripts/get-envs.py)" >> "$GITHUB_OUTPUT" env: GROUP_NUMBER: ${{ matrix.group-number }} @@ -715,7 +727,7 @@ jobs: - name: Get Environments id: get-envs run: | - echo "envs=$(tox -l | grep '^${{ github.job }}\-' | ./.github/workflows/get-envs.py)" >> "$GITHUB_OUTPUT" + echo "envs=$(tox -l | python ./.github/scripts/get-envs.py)" >> "$GITHUB_OUTPUT" env: GROUP_NUMBER: ${{ matrix.group-number }} @@ -781,7 +793,7 @@ jobs: - name: Get Environments id: get-envs run: | - echo "envs=$(tox -l | grep '^${{ github.job }}\-' | ./.github/workflows/get-envs.py)" >> "$GITHUB_OUTPUT" + echo "envs=$(tox -l | python ./.github/scripts/get-envs.py)" >> "$GITHUB_OUTPUT" env: GROUP_NUMBER: ${{ matrix.group-number }} @@ -848,7 +860,7 @@ jobs: - name: Get Environments id: get-envs run: | - echo "envs=$(tox -l | grep '^${{ github.job }}\-' | ./.github/workflows/get-envs.py)" >> "$GITHUB_OUTPUT" + echo "envs=$(tox -l | python ./.github/scripts/get-envs.py)" >> "$GITHUB_OUTPUT" env: GROUP_NUMBER: ${{ matrix.group-number }} @@ -912,7 +924,7 @@ jobs: - name: Get Environments id: get-envs run: | - echo "envs=$(tox -l | grep '^${{ github.job }}\-' | ./.github/workflows/get-envs.py)" >> "$GITHUB_OUTPUT" + echo "envs=$(tox -l | python ./.github/scripts/get-envs.py)" >> "$GITHUB_OUTPUT" env: GROUP_NUMBER: ${{ matrix.group-number }} @@ -977,7 +989,7 @@ jobs: - name: Get Environments id: get-envs run: | - echo "envs=$(tox -l | grep '^${{ github.job }}\-' | ./.github/workflows/get-envs.py)" >> "$GITHUB_OUTPUT" + echo "envs=$(tox -l | python ./.github/scripts/get-envs.py)" >> "$GITHUB_OUTPUT" env: GROUP_NUMBER: ${{ matrix.group-number }} @@ -1053,7 +1065,7 @@ jobs: - name: Get Environments id: get-envs run: | - echo "envs=$(tox -l | grep '^${{ github.job }}\-' | ./.github/workflows/get-envs.py)" >> "$GITHUB_OUTPUT" + echo "envs=$(tox -l | python ./.github/scripts/get-envs.py)" >> "$GITHUB_OUTPUT" env: GROUP_NUMBER: ${{ matrix.group-number }} @@ -1117,7 +1129,7 @@ jobs: - name: Get Environments id: get-envs run: | - echo "envs=$(tox -l | grep '^${{ github.job }}\-' | ./.github/workflows/get-envs.py)" >> "$GITHUB_OUTPUT" + echo "envs=$(tox -l | python ./.github/scripts/get-envs.py)" >> "$GITHUB_OUTPUT" env: GROUP_NUMBER: ${{ matrix.group-number }} @@ -1181,7 +1193,7 @@ jobs: - name: Get Environments id: get-envs run: | - echo "envs=$(tox -l | grep '^${{ github.job }}\-' | ./.github/workflows/get-envs.py)" >> "$GITHUB_OUTPUT" + echo "envs=$(tox -l | python ./.github/scripts/get-envs.py)" >> "$GITHUB_OUTPUT" env: GROUP_NUMBER: ${{ matrix.group-number }} @@ -1250,7 +1262,7 @@ jobs: - name: Get Environments id: get-envs run: | - echo "envs=$(tox -l | grep '^${{ github.job }}\-' | ./.github/workflows/get-envs.py)" >> "$GITHUB_OUTPUT" + echo "envs=$(tox -l | python ./.github/scripts/get-envs.py)" >> "$GITHUB_OUTPUT" env: GROUP_NUMBER: ${{ matrix.group-number }} @@ -1316,7 +1328,7 @@ jobs: - name: Get Environments id: get-envs run: | - echo "envs=$(tox -l | grep '^${{ github.job }}\-' | ./.github/workflows/get-envs.py)" >> "$GITHUB_OUTPUT" + echo "envs=$(tox -l | python ./.github/scripts/get-envs.py)" >> "$GITHUB_OUTPUT" env: GROUP_NUMBER: ${{ matrix.group-number }} @@ -1383,7 +1395,7 @@ jobs: - name: Get Environments id: get-envs run: | - echo "envs=$(tox -l | grep '^${{ github.job }}\-' | ./.github/workflows/get-envs.py)" >> "$GITHUB_OUTPUT" + echo "envs=$(tox -l | python ./.github/scripts/get-envs.py)" >> "$GITHUB_OUTPUT" env: GROUP_NUMBER: ${{ matrix.group-number }} @@ -1454,7 +1466,7 @@ jobs: - name: Get Environments id: get-envs run: | - echo "envs=$(tox -l | grep '^${{ github.job }}\-' | ./.github/workflows/get-envs.py)" >> "$GITHUB_OUTPUT" + echo "envs=$(tox -l | python ./.github/scripts/get-envs.py)" >> "$GITHUB_OUTPUT" env: GROUP_NUMBER: ${{ matrix.group-number }} @@ -1518,7 +1530,175 @@ jobs: - name: Get Environments id: get-envs run: | - echo "envs=$(tox -l | grep '^${{ github.job }}\-' | ./.github/workflows/get-envs.py)" >> "$GITHUB_OUTPUT" + echo "envs=$(tox -l | python ./.github/scripts/get-envs.py)" >> "$GITHUB_OUTPUT" + env: + GROUP_NUMBER: ${{ matrix.group-number }} + + - name: Test + run: | + tox -vv -e ${{ steps.get-envs.outputs.envs }} -p auto + env: + TOX_PARALLEL_NO_SPINNER: 1 + FORCE_COLOR: "true" + + - name: Upload Coverage Artifacts + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + with: + name: coverage-${{ github.job }}-${{ strategy.job-index }} + path: ./**/.coverage.* + include-hidden-files: true + if-no-files-found: error + retention-days: 1 + + # ============================================= + # OS Specific Core Test Suite Runners + # (runs directly on the OS, not in a container) + # ============================================= + + linux_arm64: + env: + TOTAL_GROUPS: 2 + + strategy: + fail-fast: false + matrix: + group-number: [1, 2] + + runs-on: ubuntu-24.04-arm + timeout-minutes: 30 + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # 4.2.2 + + - name: Fetch git tags + run: | + git fetch --tags origin + + - name: Install Python + uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # 5.6.0 + with: + python-version: | + pypy3.10 + 3.8 + 3.9 + 3.10 + 3.11 + 3.12 + 3.13 + + - name: Install Dependencies + run: | + pip install --upgrade tox + env: + GROUP_NUMBER: ${{ matrix.group-number }} + + - name: Get Environments + id: get-envs + run: | + echo "envs=$(tox -l | python ./.github/scripts/get-envs.py)" >> "$GITHUB_OUTPUT" + env: + GROUP_NUMBER: ${{ matrix.group-number }} + + - name: Test + run: | + tox -vv -e ${{ steps.get-envs.outputs.envs }} -p auto + env: + TOX_PARALLEL_NO_SPINNER: 1 + FORCE_COLOR: "true" + + - name: Upload Coverage Artifacts + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + with: + name: coverage-${{ github.job }}-${{ strategy.job-index }} + path: ./**/.coverage.* + include-hidden-files: true + if-no-files-found: error + retention-days: 1 + + windows: + env: + TOTAL_GROUPS: 1 + + strategy: + fail-fast: false + matrix: + group-number: [1] + + runs-on: windows-2025 + timeout-minutes: 30 + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # 4.2.2 + + - name: Fetch git tags + run: | + git fetch --tags origin + + - name: Install Python + uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # 5.6.0 + with: + python-version: "3.13" + + - name: Install Dependencies + run: | + pip install --upgrade tox + env: + GROUP_NUMBER: ${{ matrix.group-number }} + + - name: Get Environments + id: get-envs + run: | + echo "envs=$(tox -l | python ./.github/scripts/get-envs.py)" >> "$GITHUB_OUTPUT" + env: + GROUP_NUMBER: ${{ matrix.group-number }} + + - name: Test + run: | + tox -vv -e ${{ steps.get-envs.outputs.envs }} -p auto + env: + TOX_PARALLEL_NO_SPINNER: 1 + FORCE_COLOR: "true" + + - name: Upload Coverage Artifacts + uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + with: + name: coverage-${{ github.job }}-${{ strategy.job-index }} + path: ./**/.coverage.* + include-hidden-files: true + if-no-files-found: error + retention-days: 1 + + windows_arm64: + env: + TOTAL_GROUPS: 1 + + strategy: + fail-fast: false + matrix: + group-number: [1] + + runs-on: windows-11-arm + timeout-minutes: 30 + steps: + - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # 4.2.2 + + - name: Fetch git tags + run: | + git fetch --tags origin + + - name: Install Python + uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # 5.6.0 + with: + python-version: "3.13" + + - name: Install Dependencies + run: | + pip install --upgrade tox + env: + GROUP_NUMBER: ${{ matrix.group-number }} + + - name: Get Environments + id: get-envs + run: | + echo "envs=$(tox -l | python ./.github/scripts/get-envs.py)" >> "$GITHUB_OUTPUT" env: GROUP_NUMBER: ${{ matrix.group-number }} diff --git a/newrelic/admin/record_deploy.py b/newrelic/admin/record_deploy.py index 7851253c59..116fde5326 100644 --- a/newrelic/admin/record_deploy.py +++ b/newrelic/admin/record_deploy.py @@ -12,8 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -import os -import pwd +import getpass from newrelic.admin import command, usage from newrelic.common import agent_http, encoding_utils @@ -79,7 +78,7 @@ def record_deploy( path = f"/v2/applications/{app_id}/deployments.json" if user is None: - user = pwd.getpwuid(os.getuid()).pw_gecos + user = getpass.getuser() deployment = {} deployment["revision"] = revision diff --git a/newrelic/config.py b/newrelic/config.py index 0b2ad73567..76b3e4dd2c 100644 --- a/newrelic/config.py +++ b/newrelic/config.py @@ -1194,10 +1194,11 @@ def _module_function_glob(module, object_path): # Skip adding all class methods on failure pass - # Under the hood uses fnmatch, which uses os.path.normcase - # On windows this would cause issues with case insensitivity, - # but on all other operating systems there should be no issues. - return fnmatch.filter(available_functions, object_path) + # Globbing must be done using fnmatch.fnmatchcase as + # fnmatch.filter and fnmatch.fnmatch use os.path.normcase + # which cause case insensitivity issues on Windows. + + return [func for func in available_functions if fnmatch.fnmatchcase(func, object_path)] # Setup wsgi application wrapper defined in configuration file. diff --git a/newrelic/core/agent_control_health.py b/newrelic/core/agent_control_health.py index 33e49a8a16..dd7d3fff7b 100644 --- a/newrelic/core/agent_control_health.py +++ b/newrelic/core/agent_control_health.py @@ -15,12 +15,14 @@ import logging import os import sched +import sys import threading import time import uuid from enum import IntEnum from pathlib import Path from urllib.parse import urlparse +from urllib.request import url2pathname from newrelic.core.config import _environ_as_bool, _environ_as_int @@ -66,43 +68,6 @@ class HealthStatus(IntEnum): NR_CONNECTION_ERROR_CODES = frozenset([HealthStatus.FAILED_NR_CONNECTION.value, HealthStatus.FORCED_DISCONNECT.value]) -def is_valid_file_delivery_location(file_uri): - # Verify whether file directory provided to agent via env var is a valid file URI to determine whether health - # check should run - try: - parsed_uri = urlparse(file_uri) - if not parsed_uri.scheme or not parsed_uri.path: - _logger.warning( - "Configured Agent Control health delivery location is not a complete file URI. Health check will not be " - "enabled. " - ) - return False - - if parsed_uri.scheme != "file": - _logger.warning( - "Configured Agent Control health delivery location does not have a valid scheme. Health check will not be " - "enabled." - ) - return False - - path = Path(parsed_uri.path) - - # Check if the path exists - if not path.exists(): - _logger.warning( - "Configured Agent Control health delivery location does not exist. Health check will not be enabled." - ) - return False - - return True - - except Exception: - _logger.warning( - "Configured Agent Control health delivery location is not valid. Health check will not be enabled." - ) - return False - - class AgentControlHealth: _instance_lock = threading.Lock() _instance = None @@ -127,6 +92,7 @@ def __init__(self): self.status_message = HEALTHY_STATUS_MESSAGE self.start_time_unix_nano = None self.pid_file_id_map = {} + self._health_delivery_location_cache = {} @property def health_check_enabled(self): @@ -135,16 +101,87 @@ def health_check_enabled(self): if not agent_control_enabled: return False - return is_valid_file_delivery_location(self.health_delivery_location) + return self.health_delivery_location_is_valid @property def health_delivery_location(self): - # Set a default file path if env var is not set or set to an empty string - health_file_location = ( + file_uri = ( os.environ.get("NEW_RELIC_AGENT_CONTROL_HEALTH_DELIVERY_LOCATION", "") or "file:///newrelic/apm/health" ) - return health_file_location + # Return from cache if already parsed + if file_uri in self._health_delivery_location_cache: + return self._health_delivery_location_cache[file_uri] + + # Parse and add to cache + path = self.parse_health_delivery_location(file_uri) + if path is not None: + self._health_delivery_location_cache[file_uri] = path + + return path + + @property + def health_delivery_location_is_valid(self): + # Verify whether file directory provided to agent via env var is a valid file URI to determine whether health + # check should run + try: + path = self.health_delivery_location + if path is None: + # Warning already logged in parse_health_delivery_location() + return False + + # Check if the path exists + if not path.exists(): + _logger.warning( + "Configured Agent Control health delivery location does not exist. Health check will not be enabled." + ) + return False + + return True + + except Exception: + _logger.warning( + "Configured Agent Control health delivery location is not valid. Health check will not be enabled." + ) + return False + + @classmethod + def parse_health_delivery_location(cls, file_uri): + """Parse the health delivery location and return it as a Path object.""" + + # No built in method to correctly parse file URI to a path on Python < 3.13. + # In the future, Path.from_uri() can be used directly. + + # For now, parse with urllib.parse.urlparse and convert to a Path object. + parsed_uri = urlparse(file_uri) + + # Ensure URI has at least a scheme and path + if not parsed_uri.scheme or not parsed_uri.path: + _logger.warning( + "Configured Agent Control health delivery location is not a complete file URI. Health check will not be enabled." + ) + return None + + # Ensure URI has a file scheme + if parsed_uri.scheme != "file": + _logger.warning( + "Configured Agent Control health delivery location does not have a valid scheme. Health check will not be enabled." + ) + return None + + # Handle Windows systems carefully due to inconsistent path handling + if sys.platform == "win32": + if parsed_uri.netloc: + # Matching behavior of pip where netloc is prepended with a double backslash + # https://github.com/pypa/pip/blob/022248f6484fe87dc0ef5aec3437f4c7971fd14b/pip/download.py#L442 + urlpathname = url2pathname(rf"\\\\{parsed_uri.netloc}{parsed_uri.path}") + return Path(urlpathname) + else: + # If there's no netloc, we use url2pathname to fix leading slashes + return Path(url2pathname(parsed_uri.path)) + else: + # On non-Windows systems we can use the parsed path directly + return Path(parsed_uri.path) @property def is_healthy(self): @@ -185,13 +222,16 @@ def write_to_health_file(self): status_time_unix_nano = time.time_ns() try: - file_path = urlparse(self.health_delivery_location).path + health_dir_path = self.health_delivery_location + if health_dir_path is None: + # Allow except block to handle logging a warning + raise ValueError("Health delivery location is not valid.") + file_id = self.get_file_id() - file_name = f"health-{file_id}.yml" - full_path = Path(file_path) / file_name - is_healthy = self.is_healthy + health_file_path = health_dir_path / f"health-{file_id}.yml" + is_healthy = self.is_healthy # Cache property value to avoid multiple calls - with full_path.open("w") as f: + with health_file_path.open("w") as f: f.write(f"healthy: {is_healthy}\n") f.write(f"status: {self.status_message}\n") f.write(f"start_time_unix_nano: {self.start_time_unix_nano}\n") diff --git a/setup.py b/setup.py index f79202676e..883a6d58d8 100644 --- a/setup.py +++ b/setup.py @@ -50,6 +50,7 @@ raise RuntimeError(error_msg) with_setuptools = False +is_windows = sys.platform == "win32" try: from setuptools import setup @@ -89,10 +90,7 @@ def newrelic_agent_next_version(version): with readme_file.open() as f: readme_file_contents = f.read() -if sys.platform == "win32" and python_version > (2, 6): - build_ext_errors = (CCompilerError, DistutilsExecError, DistutilsPlatformError, IOError) -else: - build_ext_errors = (CCompilerError, DistutilsExecError, DistutilsPlatformError) +build_ext_errors = (CCompilerError, DistutilsExecError, DistutilsPlatformError, OSError) class BuildExtFailed(Exception): @@ -209,17 +207,24 @@ def _run_setup(): kwargs_tmp = dict(kwargs) if with_extensions: - monotonic_libraries = [] - if with_librt(): - monotonic_libraries = ["rt"] - kwargs_tmp["ext_modules"] = [ Extension("newrelic.packages.wrapt._wrappers", ["newrelic/packages/wrapt/_wrappers.c"]), - Extension( - "newrelic.common._monotonic", ["newrelic/common/_monotonic.c"], libraries=monotonic_libraries - ), - Extension("newrelic.core._thread_utilization", ["newrelic/core/_thread_utilization.c"]), ] + if not is_windows: + # These extensions are only supported on POSIX platforms. + monotonic_libraries = [] + if with_librt(): + monotonic_libraries = ["rt"] + + kwargs_tmp["ext_modules"].append( + Extension( + "newrelic.common._monotonic", ["newrelic/common/_monotonic.c"], libraries=monotonic_libraries + ) + ) + kwargs_tmp["ext_modules"].append( + Extension("newrelic.core._thread_utilization", ["newrelic/core/_thread_utilization.c"]) + ) + kwargs_tmp["cmdclass"] = dict(build_ext=optional_build_ext) setup(**kwargs_tmp) diff --git a/tests/agent_features/conftest.py b/tests/agent_features/conftest.py index a2ee754480..31c2e8e5b0 100644 --- a/tests/agent_features/conftest.py +++ b/tests/agent_features/conftest.py @@ -12,6 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. +import sys + +import pytest from testing_support.fixture.event_loop import event_loop from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture from testing_support.fixtures import newrelic_caplog as caplog diff --git a/tests/agent_features/test_agent_control_health_check.py b/tests/agent_features/test_agent_control_health_check.py index a8ceb893e4..4c6263362c 100644 --- a/tests/agent_features/test_agent_control_health_check.py +++ b/tests/agent_features/test_agent_control_health_check.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. import re +import sys import threading import time from pathlib import Path @@ -21,11 +22,7 @@ from testing_support.http_client_recorder import HttpClientRecorder from newrelic.config import _reset_configuration_done, initialize -from newrelic.core.agent_control_health import ( - HealthStatus, - agent_control_health_instance, - is_valid_file_delivery_location, -) +from newrelic.core.agent_control_health import HealthStatus, agent_control_health_instance from newrelic.core.agent_protocol import AgentProtocol from newrelic.core.application import Application from newrelic.core.config import finalize_application_settings, global_settings @@ -41,8 +38,29 @@ def get_health_file_contents(tmp_path): @pytest.mark.parametrize("file_uri", ["", "file://", "/test/dir", "foo:/test/dir"]) -def test_invalid_file_directory_supplied(file_uri): - assert not is_valid_file_delivery_location(file_uri) +def test_invalid_file_directory_supplied(monkeypatch, file_uri): + # Setup expected env vars to run agent control health check + monkeypatch.setenv("NEW_RELIC_AGENT_CONTROL_ENABLED", "True") + monkeypatch.setenv("NEW_RELIC_AGENT_CONTROL_HEALTH_DELIVERY_LOCATION", file_uri) + + agent_control_instance = agent_control_health_instance() + assert not agent_control_instance.health_delivery_location_is_valid + + +@pytest.mark.skipif(sys.platform != "win32", reason="Only valid for Windows") +@pytest.mark.parametrize("leading_slash", [True, False], ids=["leading_slash", "no_leading_slash"]) +def test_inconsistent_paths_on_windows(monkeypatch, tmp_path, leading_slash): + file_uri = tmp_path.as_uri() + if not leading_slash: + assert file_uri.startswith("file:///") + file_uri.replace("file:///", "file://") + + # Setup expected env vars to run agent control health check + monkeypatch.setenv("NEW_RELIC_AGENT_CONTROL_ENABLED", "True") + monkeypatch.setenv("NEW_RELIC_AGENT_CONTROL_HEALTH_DELIVERY_LOCATION", file_uri) + + agent_control_instance = agent_control_health_instance() + assert agent_control_instance.health_delivery_location_is_valid def test_agent_control_not_enabled(monkeypatch, tmp_path): diff --git a/tests/agent_features/test_async_generator_trace.py b/tests/agent_features/test_async_generator_trace.py index 7d04a9a321..2aecfdc5ed 100644 --- a/tests/agent_features/test_async_generator_trace.py +++ b/tests/agent_features/test_async_generator_trace.py @@ -13,7 +13,6 @@ # limitations under the License. import functools -import sys import time import pytest diff --git a/tests/agent_features/test_configuration.py b/tests/agent_features/test_configuration.py index 9586613bf8..41d929f131 100644 --- a/tests/agent_features/test_configuration.py +++ b/tests/agent_features/test_configuration.py @@ -17,11 +17,11 @@ import logging import pathlib import sys -import tempfile import urllib.parse as urlparse import pytest from testing_support.fixtures import override_generic_settings +from testing_support.util import NamedTemporaryFile from newrelic.api.exceptions import ConfigurationError from newrelic.common.object_names import callable_name @@ -637,7 +637,7 @@ def test_initialize(): def test_initialize_raises_if_config_does_not_match_previous(): error_message = "Configuration has already been done against differing configuration file or environment.*" with pytest.raises(ConfigurationError, match=error_message): - with tempfile.NamedTemporaryFile() as f: + with NamedTemporaryFile() as f: f.write(newrelic_ini_contents) f.seek(0) @@ -646,7 +646,7 @@ def test_initialize_raises_if_config_does_not_match_previous(): def test_initialize_via_config_file(): _reset_configuration_done() - with tempfile.NamedTemporaryFile() as f: + with NamedTemporaryFile() as f: f.write(newrelic_ini_contents) f.seek(0) @@ -667,7 +667,7 @@ def test_initialize_config_file_does_not_exist(): def test_initialize_environment(): _reset_configuration_done() - with tempfile.NamedTemporaryFile() as f: + with NamedTemporaryFile() as f: f.write(newrelic_ini_contents) f.seek(0) @@ -676,7 +676,7 @@ def test_initialize_environment(): def test_initialize_log_level(): _reset_configuration_done() - with tempfile.NamedTemporaryFile() as f: + with NamedTemporaryFile() as f: f.write(newrelic_ini_contents) f.seek(0) @@ -685,7 +685,7 @@ def test_initialize_log_level(): def test_initialize_log_file(): _reset_configuration_done() - with tempfile.NamedTemporaryFile() as f: + with NamedTemporaryFile() as f: f.write(newrelic_ini_contents) f.seek(0) @@ -700,7 +700,7 @@ def test_initialize_config_file_feature_flag(feature_flag, expect_warning, logge apply_config_setting(settings, "feature_flag", feature_flag) _reset_configuration_done() - with tempfile.NamedTemporaryFile() as f: + with NamedTemporaryFile() as f: f.write(newrelic_ini_contents) f.seek(0) @@ -759,7 +759,7 @@ def test_initialize_config_file_with_traces(setting_name, setting_value, expect_ apply_config_setting(settings, setting_name, setting_value) _reset_configuration_done() - with tempfile.NamedTemporaryFile() as f: + with NamedTemporaryFile() as f: f.write(newrelic_ini_contents) f.seek(0) @@ -951,7 +951,7 @@ def test_initialize_developer_mode(section, expect_error, logger): _reset_instrumentation_done() _reset_config_parser() - with tempfile.NamedTemporaryFile() as f: + with NamedTemporaryFile() as f: f.write(newrelic_ini_contents) f.write(section) f.seek(0) @@ -1019,7 +1019,7 @@ def test_toml_parse_development(): _reset_config_parser() _reset_instrumentation_done() - with tempfile.NamedTemporaryFile(suffix=".toml") as f: + with NamedTemporaryFile(suffix=".toml") as f: f.write(newrelic_toml_contents) f.seek(0) @@ -1041,7 +1041,7 @@ def test_toml_parse_production(): _reset_config_parser() _reset_instrumentation_done() - with tempfile.NamedTemporaryFile(suffix=".toml") as f: + with NamedTemporaryFile(suffix=".toml") as f: f.write(newrelic_toml_contents) f.seek(0) @@ -1061,7 +1061,7 @@ def test_config_file_path_types_ini(pathtype): _reset_config_parser() _reset_instrumentation_done() - with tempfile.NamedTemporaryFile(suffix=".ini") as f: + with NamedTemporaryFile(suffix=".ini") as f: f.write(newrelic_ini_contents) f.seek(0) @@ -1081,7 +1081,7 @@ def test_config_file_path_types_toml(pathtype): _reset_config_parser() _reset_instrumentation_done() - with tempfile.NamedTemporaryFile(suffix=".toml") as f: + with NamedTemporaryFile(suffix=".toml") as f: f.write(newrelic_toml_contents) f.seek(0) diff --git a/tests/agent_features/test_coroutine_transaction.py b/tests/agent_features/test_coroutine_transaction.py index b3890b2b55..2b447b00c0 100644 --- a/tests/agent_features/test_coroutine_transaction.py +++ b/tests/agent_features/test_coroutine_transaction.py @@ -13,7 +13,6 @@ # limitations under the License. import asyncio -import sys import pytest from testing_support.fixtures import capture_transaction_metrics, override_generic_settings diff --git a/tests/agent_features/test_lambda_handler.py b/tests/agent_features/test_lambda_handler.py index 9378606547..f6160564de 100644 --- a/tests/agent_features/test_lambda_handler.py +++ b/tests/agent_features/test_lambda_handler.py @@ -145,6 +145,8 @@ def test_lambda_malformed_api_gateway_payload(monkeypatch): _malformed_request_attributes = {"agent": ["aws.requestId", "aws.lambda.arn"], "user": [], "intrinsic": []} +# The lambda_hander has been deprecated for 3+ years +@pytest.mark.skip(reason="The lambda_handler has been deprecated") @validate_transaction_trace_attributes(_malformed_request_attributes) @validate_transaction_event_attributes(_malformed_request_attributes) @override_application_settings(_override_settings) diff --git a/tests/agent_features/test_logs_in_context.py b/tests/agent_features/test_logs_in_context.py index 1487928f95..a6764c41cc 100644 --- a/tests/agent_features/test_logs_in_context.py +++ b/tests/agent_features/test_logs_in_context.py @@ -90,7 +90,7 @@ def test_newrelic_logger_min_extra_keys_no_error(log_buffer): assert isinstance(timestamp, int) assert isinstance(thread_id, int) assert isinstance(process_id, int) - assert filename.endswith("/test_logs_in_context.py") + assert filename.endswith("test_logs_in_context.py") assert isinstance(line_number, int) expected = { @@ -138,7 +138,7 @@ def test_newrelic_logger_no_error(log_buffer): assert isinstance(timestamp, int) assert isinstance(thread_id, int) assert isinstance(process_id, int) - assert filename.endswith("/test_logs_in_context.py") + assert filename.endswith("test_logs_in_context.py") assert isinstance(line_number, int) expected = { @@ -191,7 +191,7 @@ def test_newrelic_logger_error_inside_transaction_no_stack_trace(log_buffer): assert isinstance(timestamp, int) assert isinstance(thread_id, int) assert isinstance(process_id, int) - assert filename.endswith("/test_logs_in_context.py") + assert filename.endswith("test_logs_in_context.py") assert isinstance(line_number, int) expected = { @@ -238,7 +238,7 @@ def test_newrelic_logger_error_inside_transaction_with_stack_trace(log_buffer_wi assert isinstance(timestamp, int) assert isinstance(thread_id, int) assert isinstance(process_id, int) - assert filename.endswith("/test_logs_in_context.py") + assert filename.endswith("test_logs_in_context.py") assert isinstance(line_number, int) assert isinstance(stack_trace, str) assert stack_trace == expected_stack_trace @@ -281,7 +281,7 @@ def test_newrelic_logger_error_outside_transaction_no_stack_trace(log_buffer): assert isinstance(timestamp, int) assert isinstance(thread_id, int) assert isinstance(process_id, int) - assert filename.endswith("/test_logs_in_context.py") + assert filename.endswith("test_logs_in_context.py") assert isinstance(line_number, int) expected = { @@ -326,7 +326,7 @@ def test_newrelic_logger_error_outside_transaction_with_stack_trace(log_buffer_w assert isinstance(timestamp, int) assert isinstance(thread_id, int) assert isinstance(process_id, int) - assert filename.endswith("/test_logs_in_context.py") + assert filename.endswith("test_logs_in_context.py") assert isinstance(line_number, int) assert isinstance(stack_trace, str) assert stack_trace == expected_stack_trace diff --git a/tests/agent_unittests/conftest.py b/tests/agent_unittests/conftest.py index 65f22b2079..5b1ce10755 100644 --- a/tests/agent_unittests/conftest.py +++ b/tests/agent_unittests/conftest.py @@ -13,12 +13,12 @@ # limitations under the License. import sys -import tempfile from importlib import reload import pytest from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture from testing_support.fixtures import newrelic_caplog as caplog +from testing_support.util import NamedTemporaryFile from newrelic.core.agent import agent_instance @@ -35,6 +35,9 @@ ) +FAILING_ON_WINDOWS = pytest.mark.xfail(sys.platform == "win32", reason="TODO: Fix this test on Windows") + + class FakeProtos: Span = object() SpanBatch = object() @@ -54,7 +57,7 @@ def global_settings(request, monkeypatch): if "env" in request.fixturenames: env = request.getfixturevalue("env") for k, v in env.items(): - monkeypatch.setenv(k, v) + monkeypatch.setenv(k, str(v)) import newrelic.core.config as core_config from newrelic import config @@ -69,7 +72,7 @@ def global_settings(request, monkeypatch): reload(core_config) reload(config) - with tempfile.NamedTemporaryFile() as ini_file: + with NamedTemporaryFile() as ini_file: ini_file.write(ini_contents) ini_file.seek(0) diff --git a/tests/agent_unittests/test_agent_protocol.py b/tests/agent_unittests/test_agent_protocol.py index 4bcf7dc579..972d8be5bf 100644 --- a/tests/agent_unittests/test_agent_protocol.py +++ b/tests/agent_unittests/test_agent_protocol.py @@ -522,7 +522,8 @@ def test_audit_logging(): protocol = AgentProtocol(settings, client_cls=HttpClientRecorder) protocol.send("preconnect") - with Path(f.name).open() as f: + audit_log_path = Path(f.name) + with audit_log_path.open() as f: audit_log_contents = f.read() assert audit_log_contents.startswith("*\n") diff --git a/tests/agent_unittests/test_aws_utilization_caching.py b/tests/agent_unittests/test_aws_utilization_caching.py index e62eaca406..885aa6463a 100644 --- a/tests/agent_unittests/test_aws_utilization_caching.py +++ b/tests/agent_unittests/test_aws_utilization_caching.py @@ -39,7 +39,7 @@ def no_token(cls): def _load_tests(): - with FIXTURE.open() as fh: + with FIXTURE.open(encoding="utf-8") as fh: js = fh.read() return json.loads(js) diff --git a/tests/agent_unittests/test_encoding_utils.py b/tests/agent_unittests/test_encoding_utils.py index 9494980194..9a0a937041 100644 --- a/tests/agent_unittests/test_encoding_utils.py +++ b/tests/agent_unittests/test_encoding_utils.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import sys from pathlib import Path import pytest @@ -64,20 +65,24 @@ def _generator(): yield from range(1, 4) -@pytest.mark.parametrize( - "input_,expected", - [ - (10, "10"), - (10.0, "10.0"), - ("my_string", '"my_string"'), - (b"my_bytes", '"my_bytes"'), - ({"id": 1, "name": "test", "NoneType": None}, '{"id":1,"name":"test","NoneType":null}'), - (_generator(), "[1,2,3]"), - (tuple(range(4, 7)), "[4,5,6]"), - (Path("test/path/file.txt"), '"test/path/file.txt"'), - ], - ids=["int", "float", "str", "bytes", "dict", "generator", "iterable", "Path"], -) +JSON_ENCODE_TESTS = [ + pytest.param(10, "10", id="int"), + pytest.param(10.0, "10.0", id="float"), + pytest.param("my_string", '"my_string"', id="str"), + pytest.param(b"my_bytes", '"my_bytes"', id="bytes"), + pytest.param({"id": 1, "name": "test", "NoneType": None}, '{"id":1,"name":"test","NoneType":null}', id="dict"), + pytest.param(_generator(), "[1,2,3]", id="generator"), + pytest.param(tuple(range(4, 7)), "[4,5,6]", id="iterable"), +] + +# Add a Path object test that's platform dependent +if sys.platform == "win32": + JSON_ENCODE_TESTS.append(pytest.param(Path("test\\path\\file.txt"), '"test\\\\path\\\\file.txt"', id="Path")) +else: + JSON_ENCODE_TESTS.append(pytest.param(Path("test/path/file.txt"), '"test/path/file.txt"', id="Path")) + + +@pytest.mark.parametrize("input_,expected", JSON_ENCODE_TESTS) def test_json_encode(input_, expected): output = json_encode(input_) assert output == expected diff --git a/tests/agent_unittests/test_http_client.py b/tests/agent_unittests/test_http_client.py index 7c13406330..32af0baf54 100644 --- a/tests/agent_unittests/test_http_client.py +++ b/tests/agent_unittests/test_http_client.py @@ -20,6 +20,7 @@ from io import StringIO import pytest +from conftest import FAILING_ON_WINDOWS from testing_support.certs import CERT_PATH from testing_support.mock_external_http_server import MockExternalHTTPServer @@ -226,6 +227,7 @@ def test_http_close_connection_in_context_manager(): client.close_connection() +@FAILING_ON_WINDOWS @pytest.mark.parametrize( "client_cls,method,threshold", ( diff --git a/tests/agent_unittests/test_utilization_settings.py b/tests/agent_unittests/test_utilization_settings.py index 11ded562c4..40c3bd1d94 100644 --- a/tests/agent_unittests/test_utilization_settings.py +++ b/tests/agent_unittests/test_utilization_settings.py @@ -12,12 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -import os -import tempfile from importlib import reload import pytest from testing_support.fixtures import Environ +from testing_support.util import NamedTemporaryFile # these will be reloaded for each test import newrelic.config @@ -73,8 +72,7 @@ def reset_agent_config(ini_contents, env_dict): @function_wrapper def reset(wrapped, instance, args, kwargs): - with Environ(env_dict): - ini_file = tempfile.NamedTemporaryFile() + with Environ(env_dict), NamedTemporaryFile() as ini_file: ini_file.write(ini_contents) ini_file.seek(0) diff --git a/tests/cross_agent/conftest.py b/tests/cross_agent/conftest.py index 36e62fae56..8cc766d704 100644 --- a/tests/cross_agent/conftest.py +++ b/tests/cross_agent/conftest.py @@ -12,7 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. +import sys +import pytest from testing_support.fixtures import collector_agent_registration_fixture, collector_available_fixture _default_settings = { @@ -28,3 +30,5 @@ collector_agent_registration = collector_agent_registration_fixture( app_name="Python Agent Test (cross_agent_tests)", default_settings=_default_settings ) + +SKIP_ON_WINDOWS = pytest.mark.xfail(sys.platform == "win32", reason="This feature is not supported on Windows") diff --git a/tests/cross_agent/test_agent_attributes.py b/tests/cross_agent/test_agent_attributes.py index 40f1baa36e..778c816c0f 100644 --- a/tests/cross_agent/test_agent_attributes.py +++ b/tests/cross_agent/test_agent_attributes.py @@ -44,7 +44,7 @@ def _default_settings(): def _load_tests(): - with FIXTURE.open() as fh: + with FIXTURE.open(encoding="utf-8") as fh: js = fh.read() return json.loads(js) diff --git a/tests/cross_agent/test_aws_utilization_data.py b/tests/cross_agent/test_aws_utilization_data.py index 643735e71c..27f0405d57 100644 --- a/tests/cross_agent/test_aws_utilization_data.py +++ b/tests/cross_agent/test_aws_utilization_data.py @@ -29,7 +29,7 @@ def _load_tests(): - with FIXTURE.open() as fh: + with FIXTURE.open(encoding="utf-8") as fh: js = fh.read() return json.loads(js) diff --git a/tests/cross_agent/test_azure_utilization_data.py b/tests/cross_agent/test_azure_utilization_data.py index baabd5a7b0..d99d9ea0c9 100644 --- a/tests/cross_agent/test_azure_utilization_data.py +++ b/tests/cross_agent/test_azure_utilization_data.py @@ -29,7 +29,7 @@ def _load_tests(): - with FIXTURE.open() as fh: + with FIXTURE.open(encoding="utf-8") as fh: js = fh.read() return json.loads(js) diff --git a/tests/cross_agent/test_boot_id_utilization_data.py b/tests/cross_agent/test_boot_id_utilization_data.py index 3d0f6dec58..bbc988c79b 100644 --- a/tests/cross_agent/test_boot_id_utilization_data.py +++ b/tests/cross_agent/test_boot_id_utilization_data.py @@ -14,10 +14,10 @@ import json import sys -import tempfile from pathlib import Path import pytest +from testing_support.util import NamedTemporaryFile from testing_support.validators.validate_internal_metrics import validate_internal_metrics from newrelic.common.system_info import BootIdUtilization @@ -39,7 +39,7 @@ def _load_tests(): - with FIXTURE.open() as fh: + with FIXTURE.open(encoding="utf-8") as fh: js = fh.read() return json.loads(js) @@ -57,7 +57,7 @@ def __init__(self, boot_id): def __enter__(self): if self.boot_id is not None: - self.boot_id_file = tempfile.NamedTemporaryFile() + self.boot_id_file = NamedTemporaryFile().__enter__() self.boot_id_file.write(self.boot_id.encode("utf8")) self.boot_id_file.seek(0) BootIdUtilization.METADATA_URL = self.boot_id_file.name @@ -68,7 +68,7 @@ def __enter__(self): def __exit__(self, *args, **kwargs): sys.platform = SYS_PLATFORM if self.boot_id: - del self.boot_id_file # close and thus delete the tempfile + self.boot_id_file.__exit__(*args, **kwargs) # close and thus delete the tempfile @pytest.mark.parametrize(_parameters, _boot_id_tests) diff --git a/tests/cross_agent/test_cat_map.py b/tests/cross_agent/test_cat_map.py index 01ede0b19a..3322fc7d97 100644 --- a/tests/cross_agent/test_cat_map.py +++ b/tests/cross_agent/test_cat_map.py @@ -67,7 +67,7 @@ def server(): def load_tests(): result = [] - with FIXTURE.open() as fh: + with FIXTURE.open(encoding="utf-8") as fh: tests = json.load(fh) for test in tests: diff --git a/tests/cross_agent/test_collector_hostname.py b/tests/cross_agent/test_collector_hostname.py index 57f0654b60..6da387fac6 100644 --- a/tests/cross_agent/test_collector_hostname.py +++ b/tests/cross_agent/test_collector_hostname.py @@ -16,11 +16,11 @@ import multiprocessing import os import sys -import tempfile from importlib import reload from pathlib import Path import pytest +from testing_support.util import NamedTemporaryFile FIXTURE = Path(__file__).parent / "fixtures" / "collector_hostname.json" @@ -29,7 +29,7 @@ def _load_tests(): - with FIXTURE.open() as fh: + with FIXTURE.open(encoding="utf-8") as fh: js = fh.read() return json.loads(js) @@ -70,11 +70,12 @@ def _test_collector_hostname( reload(core_config) reload(config) - ini_file = tempfile.NamedTemporaryFile() - ini_file.write(ini_contents.encode("utf-8")) - ini_file.seek(0) + with NamedTemporaryFile() as ini_file: + ini_file.write(ini_contents.encode("utf-8")) + ini_file.seek(0) + + config.initialize(ini_file.name) - config.initialize(ini_file.name) settings = core_config.global_settings() assert settings.host == hostname diff --git a/tests/cross_agent/test_datastore_instance.py b/tests/cross_agent/test_datastore_instance.py index e6e3f61c31..a35b3e65dd 100644 --- a/tests/cross_agent/test_datastore_instance.py +++ b/tests/cross_agent/test_datastore_instance.py @@ -40,7 +40,7 @@ def _load_tests(): - with FIXTURE.open() as fh: + with FIXTURE.open(encoding="utf-8") as fh: js = fh.read() return json.loads(js) diff --git a/tests/cross_agent/test_distributed_tracing.py b/tests/cross_agent/test_distributed_tracing.py index ae3097db2f..3c7314b31d 100644 --- a/tests/cross_agent/test_distributed_tracing.py +++ b/tests/cross_agent/test_distributed_tracing.py @@ -52,7 +52,7 @@ def load_tests(): result = [] - with FIXTURE.open() as fh: + with FIXTURE.open(encoding="utf-8") as fh: tests = json.load(fh) for test in tests: diff --git a/tests/cross_agent/test_docker_container_id.py b/tests/cross_agent/test_docker_container_id.py index a54c982c88..7aaedae93c 100644 --- a/tests/cross_agent/test_docker_container_id.py +++ b/tests/cross_agent/test_docker_container_id.py @@ -16,6 +16,7 @@ from pathlib import Path import pytest +from conftest import SKIP_ON_WINDOWS import newrelic.common.utilization as u @@ -28,7 +29,7 @@ def _load_docker_test_attributes(): """ test_cases = DOCKER_FIXTURE / "cases.json" - with test_cases.open() as fh: + with test_cases.open(encoding="utf-8") as fh: json_list = json.load(fh) docker_test_attributes = [(json_record["filename"], json_record["containerId"]) for json_record in json_list] return docker_test_attributes @@ -46,6 +47,7 @@ def _mock_open(path, mode): return _mock_open +@SKIP_ON_WINDOWS @pytest.mark.parametrize("filename, containerId", _load_docker_test_attributes()) def test_docker_container_id_v1(monkeypatch, filename, containerId): path = DOCKER_FIXTURE / filename diff --git a/tests/cross_agent/test_docker_container_id_v2.py b/tests/cross_agent/test_docker_container_id_v2.py index 5603584668..3fbfcff846 100644 --- a/tests/cross_agent/test_docker_container_id_v2.py +++ b/tests/cross_agent/test_docker_container_id_v2.py @@ -16,6 +16,7 @@ from pathlib import Path import pytest +from conftest import SKIP_ON_WINDOWS import newrelic.common.utilization as u @@ -28,7 +29,7 @@ def _load_docker_test_attributes(): """ test_cases = DOCKER_FIXTURE / "cases.json" - with test_cases.open() as fh: + with test_cases.open(encoding="utf-8") as fh: json_list = json.load(fh) docker_test_attributes = [(json_record["filename"], json_record["containerId"]) for json_record in json_list] return docker_test_attributes @@ -46,6 +47,7 @@ def _mock_open(path, mode): return _mock_open +@SKIP_ON_WINDOWS @pytest.mark.parametrize("filename, containerId", _load_docker_test_attributes()) def test_docker_container_id_v2(monkeypatch, filename, containerId): path = DOCKER_FIXTURE / filename diff --git a/tests/cross_agent/test_gcp_utilization_data.py b/tests/cross_agent/test_gcp_utilization_data.py index 1439c92e2b..3d79eba20b 100644 --- a/tests/cross_agent/test_gcp_utilization_data.py +++ b/tests/cross_agent/test_gcp_utilization_data.py @@ -29,7 +29,7 @@ def _load_tests(): - with FIXTURE.open() as fh: + with FIXTURE.open(encoding="utf-8") as fh: js = fh.read() return json.loads(js) diff --git a/tests/cross_agent/test_labels_and_rollups.py b/tests/cross_agent/test_labels_and_rollups.py index a16aac9c5a..ba74f8ca58 100644 --- a/tests/cross_agent/test_labels_and_rollups.py +++ b/tests/cross_agent/test_labels_and_rollups.py @@ -24,7 +24,7 @@ def _load_tests(): - with FIXTURE.open() as fh: + with FIXTURE.open(encoding="utf-8") as fh: js = fh.read() return json.loads(js) diff --git a/tests/cross_agent/test_lambda_event_source.py b/tests/cross_agent/test_lambda_event_source.py index 74ca28e5d7..325a920f6c 100644 --- a/tests/cross_agent/test_lambda_event_source.py +++ b/tests/cross_agent/test_lambda_event_source.py @@ -28,13 +28,13 @@ def _load_tests(): - with FIXTURE.open() as fh: + with FIXTURE.open(encoding="utf-8") as fh: for test in json.loads(fh.read()): test_name = test.pop("name") test_file = f"{test_name}.json" path = FIXTURE_DIR / "lambda_event_source" / test_file - with path.open() as fh: + with path.open(encoding="utf-8") as fh: events[test_name] = json.loads(fh.read()) tests[test_name] = test diff --git a/tests/cross_agent/test_pcf_utilization_data.py b/tests/cross_agent/test_pcf_utilization_data.py index 4594a55a66..d25a7b3257 100644 --- a/tests/cross_agent/test_pcf_utilization_data.py +++ b/tests/cross_agent/test_pcf_utilization_data.py @@ -29,7 +29,7 @@ def _load_tests(): - with FIXTURE.open() as fh: + with FIXTURE.open(encoding="utf-8") as fh: js = fh.read() return json.loads(js) diff --git a/tests/cross_agent/test_rules.py b/tests/cross_agent/test_rules.py index ca1eb8d354..e4d2fd1b0a 100644 --- a/tests/cross_agent/test_rules.py +++ b/tests/cross_agent/test_rules.py @@ -27,7 +27,7 @@ def _load_tests(): - with FIXTURE.open() as fh: + with FIXTURE.open(encoding="utf-8") as fh: js = fh.read() return json.loads(js) diff --git a/tests/cross_agent/test_sql_obfuscation.py b/tests/cross_agent/test_sql_obfuscation.py index 2d166b395f..178625cdf3 100644 --- a/tests/cross_agent/test_sql_obfuscation.py +++ b/tests/cross_agent/test_sql_obfuscation.py @@ -27,7 +27,7 @@ def load_tests(): result = [] - with FIXTURE.open() as fh: + with FIXTURE.open(encoding="utf-8") as fh: tests = json.load(fh) for test in tests: diff --git a/tests/cross_agent/test_transaction_segment_terms.py b/tests/cross_agent/test_transaction_segment_terms.py index 49dc6a19e5..1463bea7b6 100644 --- a/tests/cross_agent/test_transaction_segment_terms.py +++ b/tests/cross_agent/test_transaction_segment_terms.py @@ -32,7 +32,7 @@ def load_tests(): result = [] - with FIXTURE.open() as fh: + with FIXTURE.open(encoding="utf-8") as fh: tests = json.load(fh) for test in tests: diff --git a/tests/cross_agent/test_utilization_configs.py b/tests/cross_agent/test_utilization_configs.py index 9534d65756..ffabbb625b 100644 --- a/tests/cross_agent/test_utilization_configs.py +++ b/tests/cross_agent/test_utilization_configs.py @@ -15,7 +15,6 @@ import json import os import sys -import tempfile from importlib import reload from pathlib import Path @@ -24,6 +23,7 @@ # NOTE: the test_utilization_settings_from_env_vars test mocks several of the # methods in newrelic.core.data_collector and does not put them back! from testing_support.mock_http_client import create_client_cls +from testing_support.util import NamedTemporaryFile import newrelic.core.config from newrelic.common.object_wrapper import function_wrapper @@ -37,7 +37,7 @@ def _load_tests(): - with FIXTURE.open() as fh: + with FIXTURE.open(encoding="utf-8") as fh: js = fh.read() return json.loads(js) @@ -132,19 +132,19 @@ def _patch_boot_id_file(wrapped, instance, args, kwargs): boot_id_file = None initial_sys_platform = sys.platform - if test.get("input_boot_id"): - boot_id_file = tempfile.NamedTemporaryFile() - boot_id_file.write(test.get("input_boot_id")) - boot_id_file.seek(0) - BootIdUtilization.METADATA_URL = boot_id_file.name - sys.platform = "linux-mock-testing" # ensure boot_id is gathered - else: - # do not gather boot_id at all, this will ensure there is nothing - # extra in the gathered utilizations data - sys.platform = "not-linux" - try: - return wrapped(*args, **kwargs) + if test.get("input_boot_id"): + with NamedTemporaryFile() as boot_id_file: + boot_id_file.write(test.get("input_boot_id")) + boot_id_file.seek(0) + BootIdUtilization.METADATA_URL = boot_id_file.name + sys.platform = "linux-mock-testing" # ensure boot_id is gathered + return wrapped(*args, **kwargs) + else: + # do not gather boot_id at all, this will ensure there is nothing + # extra in the gathered utilizations data + sys.platform = "not-linux" + return wrapped(*args, **kwargs) finally: del boot_id_file # close and thus delete the tempfile sys.platform = initial_sys_platform diff --git a/tests/cross_agent/test_w3c_trace_context.py b/tests/cross_agent/test_w3c_trace_context.py index 72745d3b5b..bc0de4f003 100644 --- a/tests/cross_agent/test_w3c_trace_context.py +++ b/tests/cross_agent/test_w3c_trace_context.py @@ -62,7 +62,7 @@ def load_tests(): result = [] - with FIXTURE.open() as fh: + with FIXTURE.open(encoding="utf-8") as fh: tests = json.load(fh) for test in tests: diff --git a/tests/testing_support/db_settings.py b/tests/testing_support/db_settings.py index cb51a01e23..f11e876cbf 100644 --- a/tests/testing_support/db_settings.py +++ b/tests/testing_support/db_settings.py @@ -13,9 +13,6 @@ # limitations under the License. import os -import pwd - -USER = pwd.getpwuid(os.getuid()).pw_name def postgresql_settings(): diff --git a/tests/testing_support/fixtures.py b/tests/testing_support/fixtures.py index ae4403a101..8f369c3a0f 100644 --- a/tests/testing_support/fixtures.py +++ b/tests/testing_support/fixtures.py @@ -257,9 +257,16 @@ def _collector_agent_registration_fixture(request): @pytest.fixture def collector_available_fixture(request, collector_agent_registration): - application = application_instance() - active = application.active - assert active + application = collector_agent_registration + settings = global_settings() + + # Wait for the application to become active. + timeout = (settings.startup_timeout or 0) + 10.0 + while not application.active and timeout > 0: + time.sleep(0.1) + timeout -= 0.1 + + assert application.active, "Application failed to activate after 10 seconds." def raise_background_exceptions(timeout=5.0): diff --git a/tests/testing_support/util.py b/tests/testing_support/util.py index 86d5505ea3..6d31338ee4 100644 --- a/tests/testing_support/util.py +++ b/tests/testing_support/util.py @@ -14,8 +14,11 @@ import re import socket +import sys +import tempfile import time from functools import wraps +from pathlib import Path def _to_int(version_str): @@ -83,3 +86,27 @@ def wrapper(*args, **kwargs): return wrapper return decorator + + +def NamedTemporaryFile(*args, **kwargs): + """A wrapper around tempfile.NamedTemporaryFile that fixes issues with file flags on Windows.""" + if sys.platform == "win32": + # Set delete=False to prevent file flags being set incorrectly on Windows. + kwargs["delete"] = False + + # Create the temporary file + temp_file = tempfile.NamedTemporaryFile(*args, **kwargs) + temp_file.path = Path(temp_file.name) # Add path attribute for convenience + + # Patch the __exit__ method to manually remove the file on exit + original_exit = temp_file.__exit__ + + def remove_on_exit(*args, **kwargs): + original_exit(*args, **kwargs) + # Clean up the file manually + if temp_file.path.exists(): + temp_file.path.unlink() + + temp_file.__exit__ = remove_on_exit + + return temp_file diff --git a/tox.ini b/tox.ini index bb40a63c07..d3c347f7bf 100644 --- a/tox.ini +++ b/tox.ini @@ -1,7 +1,10 @@ ; Formatting Conventions ; Each part of the env name ; 1. Services required to be present to run test suite. Allows us to select which Github Actions runner to place the test suite on. -; Examples: postgres, mysql, python (pure python, no service required) +; Examples: +; * postgres, mysql (require specified database to be running) +; * grpc (no parallel execution allowed, and requires grpc compilation) +; * linux, macos, windows (OS specific runners, no services required) ; 2. Folder under tests/ that includes the test suite. ; Requires an entry under changedir to run the test suite from the correct folder. ; Should be prepended to any deps definitions as well to prevent environments from colliding. @@ -28,22 +31,43 @@ ; 5. With or without New Relic C extensions (Optional). Used for testing agent features. ; Examples: with_extensions, without_extensions ; envlist = -; python-agent_features-pypy310-without_extensions, -; python-agent_streaming-py37-{with,without}_extensions, +; linux-agent_features-pypy310-without_extensions, +; linux-agent_streaming-py37-{with,without}_extensions, ; ; Full Format: ; services_required-tests_folder-python_version-library_and_version[optional]-with/without_c_extensions[optional] ; ; Full Examples: ; - memcached-datastore_bmemcached-py37-memcached030 -; - python-agent_unittests-py38-with_extensions -; - python-adapter_gevent-py39 +; - linux-agent_unittests-py38-with_extensions +; - linux-adapter_gevent-py39 [tox] setupdir = {toxinidir} ; Fail tests when interpreters are missing. skip_missing_interpreters = false envlist = + # Linux Core Agent Test Suite + linux-agent_features-py37-{with,without}_extensions, + {linux,linux_arm64}-agent_features-{py38,py39,py310,py311,py312,py313}-{with,without}_extensions, + {linux,linux_arm64}-agent_features-pypy310-without_extensions, + linux-agent_streaming-py37-protobuf06-{with,without}_extensions, + {linux,linux_arm64}-agent_streaming-{py38,py39,py310,py311,py312,py313}-protobuf06-{with,without}_extensions, + {linux,linux_arm64}-agent_streaming-py39-protobuf{03,0319,04,05}-{with,without}_extensions, + linux-agent_unittests-py37-{with,without}_extensions, + {linux,linux_arm64}-agent_unittests-{py38,py39,py310,py311,py312,py313}-{with,without}_extensions, + {linux,linux_arm64}-agent_unittests-pypy310-without_extensions, + linux-cross_agent-py37-{with,without}_extensions, + {linux,linux_arm64}-cross_agent-{py38,py39,py310,py311,py312,py313}-{with,without}_extensions, + + # Windows Core Agent Test Suite + {windows,windows_arm64}-agent_features-py313-{with,without}_extensions, + # Windows grpcio wheels don't appear to be installable for Arm64 despite being available + windows-agent_streaming-py313-protobuf06-{with,without}_extensions, + {windows,windows_arm64}-agent_unittests-py313-{with,without}_extensions, + {windows,windows_arm64}-cross_agent-py313-{with,without}_extensions, + + # Integration Tests (only run on Linux) cassandra-datastore_cassandradriver-{py38,py39,py310,py311,py312,pypy310}-cassandralatest, elasticsearchserver07-datastore_elasticsearch-{py37,py38,py39,py310,py311,py312,py313,pypy310}-elasticsearch07, elasticsearchserver08-datastore_elasticsearch-{py37,py38,py39,py310,py311,py312,py313,pypy310}-elasticsearch08, @@ -78,97 +102,98 @@ envlist = postgres16-datastore_psycopg2cffi-{py37,py38,py39,py310,py311,py312}-psycopg2cffilatest, postgres16-datastore_pyodbc-{py37,py38,py39,py310,py311,py312,py313}-pyodbclatest, postgres9-datastore_postgresql-{py37,py38,py39,py310,py311,py312,py313}, - python-adapter_asgiref-{py37,py38,py39,py310,py311,py312,py313,pypy310}-asgireflatest, - python-adapter_asgiref-py310-asgiref{0303,0304,0305,0306,0307}, - python-adapter_cheroot-{py37,py38,py39,py310,py311,py312,py313}, - python-adapter_daphne-{py37,py38,py39,py310,py311,py312,py313}-daphnelatest, - python-adapter_gevent-{py37,py38,py310,py311,py312,py313}, - python-adapter_gunicorn-{py37,py38,py39,py310,py311,py312,py313}-aiohttp03-gunicornlatest, - python-adapter_hypercorn-{py38,py39,py310,py311,py312,py313}-hypercornlatest, - python-adapter_hypercorn-py38-hypercorn{0010,0011,0012,0013}, - python-adapter_mcp-{py310,py311,py312,py313,pypy310}, - python-adapter_uvicorn-{py37,py38,py39,py310,py311,py312,py313}-uvicornlatest, - python-adapter_uvicorn-py38-uvicorn014, - python-adapter_waitress-{py37,py38,py39,py310,py311,py312,py313}-waitresslatest, - python-agent_features-{py37,py38,py39,py310,py311,py312,py313}-{with,without}_extensions, - python-agent_features-pypy310-without_extensions, - python-agent_streaming-{py37,py38,py39,py310,py311,py312,py313}-protobuf06-{with,without}_extensions, - python-agent_streaming-py39-protobuf{03,0319,04,05}-{with,without}_extensions, - python-agent_unittests-{py37,py38,py39,py310,py311,py312,py313}-{with,without}_extensions, - python-agent_unittests-pypy310-without_extensions, - python-application_celery-{py37,py38,py39,py310,py311,py312,py313,pypy310}-celerylatest, - python-application_celery-py311-celery{0504,0503,0502}, - python-component_djangorestframework-{py37,py38,py39,py310,py311,py312,py313}-djangorestframeworklatest, - python-component_flask_rest-{py38,py39,py310,py311,py312,py313,pypy310}-flaskrestxlatest, - python-component_flask_rest-py37-flaskrestx110, - python-component_graphqlserver-{py37,py38,py39,py310,py311,py312}, + linux-adapter_asgiref-{py37,py38,py39,py310,py311,py312,py313,pypy310}-asgireflatest, + linux-adapter_asgiref-py310-asgiref{0303,0304,0305,0306,0307}, + linux-adapter_cheroot-{py37,py38,py39,py310,py311,py312,py313}, + linux-adapter_daphne-{py37,py38,py39,py310,py311,py312,py313}-daphnelatest, + linux-adapter_gevent-{py37,py38,py310,py311,py312,py313}, + linux-adapter_gunicorn-{py37,py38,py39,py310,py311,py312,py313}-aiohttp03-gunicornlatest, + linux-adapter_hypercorn-{py38,py39,py310,py311,py312,py313}-hypercornlatest, + linux-adapter_hypercorn-py38-hypercorn{0010,0011,0012,0013}, + linux-adapter_mcp-{py310,py311,py312,py313,pypy310}, + linux-adapter_uvicorn-{py37,py38,py39,py310,py311,py312,py313}-uvicornlatest, + linux-adapter_uvicorn-py38-uvicorn014, + linux-adapter_waitress-{py37,py38,py39,py310,py311,py312,py313}-waitresslatest, + linux-adapter_waitress-{py37,py38,py39,py310}-waitress02, + linux-adapter_waitress-{py37,py38,py39}-waitress010404, + linux-agent_features-{py37,py38,py39,py310,py311,py312,py313}-{with,without}_extensions, + linux-agent_features-pypy310-without_extensions, + linux-agent_streaming-{py37,py38,py39,py310,py311,py312,py313}-protobuf06-{with,without}_extensions, + linux-agent_streaming-py39-protobuf{03,0319,04,05}-{with,without}_extensions, + linux-agent_unittests-{py37,py38,py39,py310,py311,py312,py313}-{with,without}_extensions, + linux-agent_unittests-pypy310-without_extensions, + linux-application_celery-{py37,py38,py39,py310,py311,py312,py313,pypy310}-celerylatest, + linux-application_celery-py311-celery{0504,0503,0502}, + linux-component_djangorestframework-{py37,py38,py39,py310,py311,py312,py313}-djangorestframeworklatest, + linux-component_flask_rest-{py38,py39,py310,py311,py312,py313,pypy310}-flaskrestxlatest, + linux-component_flask_rest-py37-flaskrestx110, + linux-component_graphqlserver-{py37,py38,py39,py310,py311,py312}, ;; Tests need to be updated to support newer graphql-server/sanic versions - ; python-component_graphqlserver-py313, - python-component_tastypie-{py37,py38,py39,py310,py311,py312,py313,pypy310}-tastypielatest, - python-coroutines_asyncio-{py37,py38,py39,py310,py311,py312,py313,pypy310}, - python-cross_agent-{py37,py38,py39,py310,py311,py312,py313}-{with,without}_extensions, - python-datastore_sqlite-{py37,py38,py39,py310,py311,py312,py313,pypy310}, - python-external_aiobotocore-{py38,py39,py310,py311,py312,py313}-aiobotocorelatest, - python-external_botocore-{py38,py39,py310,py311,py312,py313}-botocorelatest, - python-external_botocore-{py311}-botocorelatest-langchain, - python-external_botocore-py310-botocore0125, - python-external_botocore-py311-botocore0128, - python-external_feedparser-{py37,py38,py39,py310,py311,py312,py313}-feedparser06, - python-external_http-{py37,py38,py39,py310,py311,py312,py313}, - python-external_httplib-{py37,py38,py39,py310,py311,py312,py313,pypy310}, - python-external_httplib2-{py37,py38,py39,py310,py311,py312,py313,pypy310}, - python-external_requests-{py37,py38,py39,py310,py311,py312,py313,pypy310}, - python-external_urllib3-{py37,py38,py39,py310,py311,py312,py313,pypy310}-urllib3latest, - python-external_urllib3-{py37,py312,py313,pypy310}-urllib30126, - python-framework_aiohttp-{py37,py38,py39,py310,py311,py312,py313,pypy310}-aiohttp03, - python-framework_ariadne-{py37,py38,py39,py310,py311,py312,py313}-ariadnelatest, - python-framework_ariadne-py37-ariadne{0011,0012,0013}, - python-framework_azurefunctions-{py39,py310,py311,py312}, - python-framework_bottle-{py37,py38,py39,py310,py311,py312,py313,pypy310}-bottle0012, - python-framework_cherrypy-{py37,py38,py39,py310,py311,py312,py313,pypy310}-CherryPylatest, - python-framework_django-{py37,py38,py39,py310,py311,py312,py313}-Djangolatest, - python-framework_django-{py39}-Django{0202,0300,0301,0302,0401}, - python-framework_falcon-{py37,py38,py39,py310,py311,py312,py313,pypy310}-falconlatest, - python-framework_falcon-{py38,py39,py310,py311,py312,py313,pypy310}-falconmaster, - python-framework_fastapi-{py37,py38,py39,py310,py311,py312,py313}, - python-framework_flask-py37-flask020205, - python-framework_flask-{py38,py39,py310,py311,py312,pypy310}-flask02, - ; python-framework_flask-py38-flaskmaster fails, even with Flask-Compress<1.16 and coverage==7.61 for py37,py38 - python-framework_flask-py38-flasklatest, + ; linux-component_graphqlserver-py313, + linux-component_tastypie-{py37,py38,py39,py310,py311,py312,py313,pypy310}-tastypielatest, + linux-coroutines_asyncio-{py37,py38,py39,py310,py311,py312,py313,pypy310}, + linux-datastore_sqlite-{py37,py38,py39,py310,py311,py312,py313,pypy310}, + linux-external_aiobotocore-{py38,py39,py310,py311,py312,py313}-aiobotocorelatest, + linux-external_botocore-{py38,py39,py310,py311,py312,py313}-botocorelatest, + linux-external_botocore-{py311}-botocorelatest-langchain, + linux-external_botocore-py310-botocore0125, + linux-external_botocore-py311-botocore0128, + linux-external_feedparser-{py37,py38,py39,py310,py311,py312,py313}-feedparser06, + linux-external_http-{py37,py38,py39,py310,py311,py312,py313}, + linux-external_httplib-{py37,py38,py39,py310,py311,py312,py313,pypy310}, + linux-external_httplib2-{py37,py38,py39,py310,py311,py312,py313,pypy310}, + linux-external_requests-{py37,py38,py39,py310,py311,py312,py313,pypy310}, + linux-external_urllib3-{py37,py38,py39,py310,py311,py312,py313,pypy310}-urllib3latest, + linux-external_urllib3-{py37,py312,py313,pypy310}-urllib30126, + linux-framework_aiohttp-{py37,py38,py39,py310,py311,py312,py313,pypy310}-aiohttp03, + linux-framework_ariadne-{py37,py38,py39,py310,py311,py312,py313}-ariadnelatest, + linux-framework_ariadne-py37-ariadne{0011,0012,0013}, + linux-framework_azurefunctions-{py39,py310,py311,py312}, + linux-framework_bottle-{py37,py38,py39,py310,py311,py312,py313,pypy310}-bottle0012, + linux-framework_cherrypy-{py37,py38,py39,py310,py311,py312,py313,pypy310}-CherryPylatest, + linux-framework_django-{py37,py38,py39,py310,py311,py312,py313}-Djangolatest, + linux-framework_django-{py39}-Django{0202,0300,0301,0302,0401}, + linux-framework_falcon-{py37,py38,py39,py310,py311,py312,py313,pypy310}-falconlatest, + linux-framework_falcon-{py38,py39,py310,py311,py312,py313,pypy310}-falconmaster, + linux-framework_fastapi-{py37,py38,py39,py310,py311,py312,py313}, + linux-framework_flask-py37-flask020205, + linux-framework_flask-{py38,py39,py310,py311,py312,pypy310}-flask02, + ; linux-framework_flask-py38-flaskmaster fails, even with Flask-Compress<1.16 and coverage==7.61 for py37,py38 + linux-framework_flask-py38-flasklatest, ; flaskmaster tests disabled until they can be fixed - python-framework_flask-{py39,py310,py311,py312,py313,pypy310}-flask{latest}, - python-framework_graphene-{py37,py38,py39,py310,py311,py312,py313}-graphenelatest, - python-framework_graphql-{py37,py38,py39,py310,py311,py312,py313,pypy310}-graphql03, - python-framework_graphql-{py37,py38,py39,py310,py311,py312,py313,pypy310}-graphql{latest}, - python-framework_graphql-py37-graphql{0301,0302}, - python-framework_pyramid-{py37,py38,py39,py310,py311,py312,py313,pypy310}-Pyramidlatest, - python-framework_pyramid-{py37,py38,py39,py310,py311,py312,py313,pypy310}-Pyramid0110-cornice, - python-framework_sanic-{py37,py38}-sanic2406, - python-framework_sanic-{py39,py310,py311,py312,py313,pypy310}-saniclatest, - python-framework_sanic-{py38,pypy310}-sanic{201207,2112,2290}, - python-framework_starlette-{py310,pypy310}-starlette{0014,0015,0019,0028}, - python-framework_starlette-{py37,py38,py39,py310,py311,py312,py313,pypy310}-starlettelatest, - python-framework_starlette-{py37,py38}-starlette002001, - python-framework_strawberry-{py38,py39,py310,py311,py312}-strawberry02352, - python-framework_strawberry-{py37,py38,py39,py310,py311,py312,py313}-strawberrylatest, - python-framework_tornado-{py38,py39,py310,py311,py312,py313}-tornadolatest, - python-framework_tornado-{py39,py310,py311,py312,py313}-tornadomaster, - python-logger_logging-{py37,py38,py39,py310,py311,py312,py313,pypy310}, - python-logger_loguru-{py37,py38,py39,py310,py311,py312,py313,pypy310}-logurulatest, - python-logger_structlog-{py37,py38,py39,py310,py311,py312,py313,pypy310}-structloglatest, - python-mlmodel_gemini-{py39,py310,py311,py312,py313}, - python-mlmodel_langchain-{py39,py310,py311,py312}, + linux-framework_flask-{py39,py310,py311,py312,py313,pypy310}-flask{latest}, + linux-framework_graphene-{py37,py38,py39,py310,py311,py312,py313}-graphenelatest, + linux-framework_graphql-{py37,py38,py39,py310,py311,py312,py313,pypy310}-graphql03, + linux-framework_graphql-{py37,py38,py39,py310,py311,py312,py313,pypy310}-graphql{latest}, + linux-framework_graphql-py37-graphql{0301,0302}, + linux-framework_pyramid-{py37,py38,py39,py310,py311,py312,py313,pypy310}-Pyramidlatest, + linux-framework_pyramid-{py37,py38,py39,py310,py311,py312,py313,pypy310}-Pyramid0110-cornice, + linux-framework_sanic-{py37,py38}-sanic2406, + linux-framework_sanic-{py39,py310,py311,py312,py313,pypy310}-saniclatest, + linux-framework_sanic-{py38,pypy310}-sanic{201207,2112,2290}, + linux-framework_starlette-{py310,pypy310}-starlette{0014,0015,0019,0028}, + linux-framework_starlette-{py37,py38,py39,py310,py311,py312,py313,pypy310}-starlettelatest, + linux-framework_starlette-{py37,py38}-starlette002001, + linux-framework_strawberry-{py38,py39,py310,py311,py312}-strawberry02352, + linux-framework_strawberry-{py37,py38,py39,py310,py311,py312,py313}-strawberrylatest, + linux-framework_tornado-{py38,py39,py310,py311,py312,py313}-tornadolatest, + linux-framework_tornado-{py39,py310,py311,py312,py313}-tornadomaster, + linux-logger_logging-{py37,py38,py39,py310,py311,py312,py313,pypy310}, + linux-logger_loguru-{py37,py38,py39,py310,py311,py312,py313,pypy310}-logurulatest, + linux-logger_structlog-{py37,py38,py39,py310,py311,py312,py313,pypy310}-structloglatest, + linux-mlmodel_gemini-{py39,py310,py311,py312,py313}, + linux-mlmodel_langchain-{py39,py310,py311,py312}, ;; Package not ready for Python 3.13 (uses an older version of numpy) - ; python-mlmodel_langchain-py313, - python-mlmodel_openai-openai0-{py37,py38,py39,py310,py311,py312}, - python-mlmodel_openai-openai107-py312, - python-mlmodel_openai-openailatest-{py37,py38,py39,py310,py311,py312,py313}, - python-mlmodel_sklearn-{py37}-scikitlearn0101, - python-mlmodel_sklearn-{py38,py39,py310,py311,py312,py313}-scikitlearnlatest, - python-template_genshi-{py37,py38,py39,py310,py311,py312,py313}-genshilatest, - python-template_jinja2-{py38,py39,py310,py311,py312,py313}-jinja2latest, - python-template_jinja2-py37-jinja2030103, - python-template_mako-{py37,py38,py39,py310,py311,py312,py313}, + ; linux-mlmodel_langchain-py313, + linux-mlmodel_openai-openai0-{py37,py38,py39,py310,py311,py312}, + linux-mlmodel_openai-openai107-py312, + linux-mlmodel_openai-openailatest-{py37,py38,py39,py310,py311,py312,py313}, + linux-mlmodel_sklearn-{py37}-scikitlearn0101, + linux-mlmodel_sklearn-{py38,py39,py310,py311,py312,py313}-scikitlearnlatest, + linux-template_genshi-{py37,py38,py39,py310,py311,py312,py313}-genshilatest, + linux-template_jinja2-{py38,py39,py310,py311,py312,py313}-jinja2latest, + linux-template_jinja2-py37-jinja2030103, + linux-template_mako-{py37,py38,py39,py310,py311,py312,py313}, rabbitmq-messagebroker_pika-{py37,py38,py39,py310,py311,py312,py313,pypy310}-pikalatest, rabbitmq-messagebroker_kombu-{py38,py39,py310,py311,py312,py313,pypy310}-kombulatest, rabbitmq-messagebroker_kombu-{py38,py39,py310,pypy310}-kombu050204, @@ -480,7 +505,7 @@ allowlist_externals = {toxinidir}/.github/scripts/* install_command= - {toxinidir}/.github/scripts/retry.sh 3 pip install {opts} {packages} + pip install {opts} {packages} extras = agent_streaming: infinite-tracing @@ -587,8 +612,8 @@ source = newrelic [coverage:paths] source = newrelic/ - .tox/**/site-packages/newrelic/ - /__w/**/site-packages/newrelic/ + **/site-packages/newrelic/ + **\site-packages\newrelic\ [coverage:html] directory = ${TOX_ENV_DIR-.}/htmlcov