diff --git a/.github/workflows/build-ci-image.yml b/.github/workflows/build-ci-image.yml index 8d56ad35c9..ab183f48a2 100644 --- a/.github/workflows/build-ci-image.yml +++ b/.github/workflows/build-ci-image.yml @@ -97,7 +97,7 @@ jobs: touch "${{ runner.temp }}/digests/${digest#sha256:}" - name: Upload Digest - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # 5.0.0 with: name: digests-${{ matrix.cache_tag }} path: ${{ runner.temp }}/digests/* @@ -114,7 +114,7 @@ jobs: steps: - name: Download Digests - uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # 5.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # 6.0.0 with: path: ${{ runner.temp }}/digests pattern: digests-* diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index 2337ee8d40..81ed6f6be8 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -97,7 +97,7 @@ jobs: CIBW_TEST_SKIP: "*-win_arm64" - name: Upload Artifacts - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # 5.0.0 with: name: ${{ github.job }}-${{ matrix.wheel }} path: ./wheelhouse/*.whl @@ -134,7 +134,7 @@ jobs: openssl md5 -binary "dist/${tarball}" | xxd -p | tr -d '\n' > "dist/${md5_file}" - name: Upload Artifacts - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # 5.0.0 with: name: ${{ github.job }}-sdist path: | @@ -166,7 +166,7 @@ jobs: environment: ${{ matrix.pypi-instance }} steps: - - uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # 5.0.0 + - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # 6.0.0 with: path: ./dist/ merge-multiple: true diff --git a/.github/workflows/mega-linter.yml b/.github/workflows/mega-linter.yml index 17e65ed47a..8f74866d43 100644 --- a/.github/workflows/mega-linter.yml +++ b/.github/workflows/mega-linter.yml @@ -68,7 +68,7 @@ jobs: # Upload MegaLinter artifacts - name: Archive production artifacts if: success() || failure() - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # 5.0.0 with: name: MegaLinter reports include-hidden-files: "true" diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index fc51fcb08c..711e1324ce 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -100,7 +100,7 @@ jobs: architecture: x64 - name: Download Coverage Artifacts - uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # 5.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # 6.0.0 with: pattern: coverage-* path: ./ @@ -134,7 +134,7 @@ jobs: architecture: x64 - name: Download Results Artifacts - uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # 5.0.0 + uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # 6.0.0 with: pattern: results-* path: ./ @@ -196,7 +196,7 @@ jobs: FORCE_COLOR: "true" - name: Upload Coverage Artifacts - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # 5.0.0 if: always() with: name: coverage-${{ github.job }}-${{ strategy.job-index }} @@ -206,7 +206,7 @@ jobs: retention-days: 1 - name: Upload Results Artifacts - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # 5.0.0 if: always() with: name: results-${{ github.job }}-${{ strategy.job-index }} @@ -261,7 +261,7 @@ jobs: FORCE_COLOR: "true" - name: Upload Coverage Artifacts - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # 5.0.0 if: always() with: name: coverage-${{ github.job }}-${{ strategy.job-index }} @@ -271,7 +271,7 @@ jobs: retention-days: 1 - name: Upload Results Artifacts - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # 5.0.0 if: always() with: name: results-${{ github.job }}-${{ strategy.job-index }} @@ -309,7 +309,7 @@ jobs: 3.14 - name: Install uv - uses: astral-sh/setup-uv@2ddd2b9cb38ad8efd50337e8ab201519a34c9f24 # 7.1.1 + uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # 7.1.2 - name: Install Dependencies run: | @@ -333,7 +333,7 @@ jobs: FORCE_COLOR: "true" - name: Upload Coverage Artifacts - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # 5.0.0 if: always() with: name: coverage-${{ github.job }}-${{ strategy.job-index }} @@ -343,7 +343,7 @@ jobs: retention-days: 1 - name: Upload Results Artifacts - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # 5.0.0 if: always() with: name: results-${{ github.job }}-${{ strategy.job-index }} @@ -381,7 +381,7 @@ jobs: 3.14 - name: Install uv - uses: astral-sh/setup-uv@2ddd2b9cb38ad8efd50337e8ab201519a34c9f24 # 7.1.1 + uses: astral-sh/setup-uv@85856786d1ce8acfbcc2f13a5f3fbd6b938f9f41 # 7.1.2 - name: Install Dependencies run: | @@ -405,7 +405,7 @@ jobs: FORCE_COLOR: "true" - name: Upload Coverage Artifacts - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # 5.0.0 if: always() with: name: coverage-${{ github.job }}-${{ strategy.job-index }} @@ -415,7 +415,7 @@ jobs: retention-days: 1 - name: Upload Results Artifacts - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # 5.0.0 if: always() with: name: results-${{ github.job }}-${{ strategy.job-index }} @@ -475,7 +475,7 @@ jobs: FORCE_COLOR: "true" - name: Upload Coverage Artifacts - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # 5.0.0 if: always() with: name: coverage-${{ github.job }}-${{ strategy.job-index }} @@ -485,7 +485,7 @@ jobs: retention-days: 1 - name: Upload Results Artifacts - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # 5.0.0 if: always() with: name: results-${{ github.job }}-${{ strategy.job-index }} @@ -558,7 +558,7 @@ jobs: FORCE_COLOR: "true" - name: Upload Coverage Artifacts - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # 5.0.0 if: always() with: name: coverage-${{ github.job }}-${{ strategy.job-index }} @@ -568,7 +568,7 @@ jobs: retention-days: 1 - name: Upload Results Artifacts - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # 5.0.0 if: always() with: name: results-${{ github.job }}-${{ strategy.job-index }} @@ -638,7 +638,7 @@ jobs: FORCE_COLOR: "true" - name: Upload Coverage Artifacts - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # 5.0.0 if: always() with: name: coverage-${{ github.job }}-${{ strategy.job-index }} @@ -648,7 +648,7 @@ jobs: retention-days: 1 - name: Upload Results Artifacts - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # 5.0.0 if: always() with: name: results-${{ github.job }}-${{ strategy.job-index }} @@ -719,7 +719,7 @@ jobs: FORCE_COLOR: "true" - name: Upload Coverage Artifacts - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # 5.0.0 if: always() with: name: coverage-${{ github.job }}-${{ strategy.job-index }} @@ -729,7 +729,7 @@ jobs: retention-days: 1 - name: Upload Results Artifacts - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # 5.0.0 if: always() with: name: results-${{ github.job }}-${{ strategy.job-index }} @@ -804,7 +804,7 @@ jobs: FORCE_COLOR: "true" - name: Upload Coverage Artifacts - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # 5.0.0 if: always() with: name: coverage-${{ github.job }}-${{ strategy.job-index }} @@ -814,7 +814,7 @@ jobs: retention-days: 1 - name: Upload Results Artifacts - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # 5.0.0 if: always() with: name: results-${{ github.job }}-${{ strategy.job-index }} @@ -869,7 +869,7 @@ jobs: FORCE_COLOR: "true" - name: Upload Coverage Artifacts - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # 5.0.0 if: always() with: name: coverage-${{ github.job }}-${{ strategy.job-index }} @@ -879,7 +879,7 @@ jobs: retention-days: 1 - name: Upload Results Artifacts - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # 5.0.0 if: always() with: name: results-${{ github.job }}-${{ strategy.job-index }} @@ -959,7 +959,7 @@ jobs: FORCE_COLOR: "true" - name: Upload Coverage Artifacts - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # 5.0.0 if: always() with: name: coverage-${{ github.job }}-${{ strategy.job-index }} @@ -969,7 +969,7 @@ jobs: retention-days: 1 - name: Upload Results Artifacts - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # 5.0.0 if: always() with: name: results-${{ github.job }}-${{ strategy.job-index }} @@ -1037,7 +1037,7 @@ jobs: FORCE_COLOR: "true" - name: Upload Coverage Artifacts - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # 5.0.0 if: always() with: name: coverage-${{ github.job }}-${{ strategy.job-index }} @@ -1047,7 +1047,7 @@ jobs: retention-days: 1 - name: Upload Results Artifacts - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # 5.0.0 if: always() with: name: results-${{ github.job }}-${{ strategy.job-index }} @@ -1115,7 +1115,7 @@ jobs: FORCE_COLOR: "true" - name: Upload Coverage Artifacts - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # 5.0.0 if: always() with: name: coverage-${{ github.job }}-${{ strategy.job-index }} @@ -1125,7 +1125,7 @@ jobs: retention-days: 1 - name: Upload Results Artifacts - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # 5.0.0 if: always() with: name: results-${{ github.job }}-${{ strategy.job-index }} @@ -1193,7 +1193,7 @@ jobs: FORCE_COLOR: "true" - name: Upload Coverage Artifacts - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # 5.0.0 if: always() with: name: coverage-${{ github.job }}-${{ strategy.job-index }} @@ -1203,7 +1203,7 @@ jobs: retention-days: 1 - name: Upload Results Artifacts - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # 5.0.0 if: always() with: name: results-${{ github.job }}-${{ strategy.job-index }} @@ -1276,7 +1276,7 @@ jobs: FORCE_COLOR: "true" - name: Upload Coverage Artifacts - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # 5.0.0 if: always() with: name: coverage-${{ github.job }}-${{ strategy.job-index }} @@ -1286,7 +1286,7 @@ jobs: retention-days: 1 - name: Upload Results Artifacts - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # 5.0.0 if: always() with: name: results-${{ github.job }}-${{ strategy.job-index }} @@ -1359,7 +1359,7 @@ jobs: FORCE_COLOR: "true" - name: Upload Coverage Artifacts - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # 5.0.0 if: always() with: name: coverage-${{ github.job }}-${{ strategy.job-index }} @@ -1369,7 +1369,7 @@ jobs: retention-days: 1 - name: Upload Results Artifacts - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # 5.0.0 if: always() with: name: results-${{ github.job }}-${{ strategy.job-index }} @@ -1438,7 +1438,7 @@ jobs: FORCE_COLOR: "true" - name: Upload Coverage Artifacts - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # 5.0.0 if: always() with: name: coverage-${{ github.job }}-${{ strategy.job-index }} @@ -1448,7 +1448,7 @@ jobs: retention-days: 1 - name: Upload Results Artifacts - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # 5.0.0 if: always() with: name: results-${{ github.job }}-${{ strategy.job-index }} @@ -1519,7 +1519,7 @@ jobs: FORCE_COLOR: "true" - name: Upload Coverage Artifacts - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # 5.0.0 if: always() with: name: coverage-${{ github.job }}-${{ strategy.job-index }} @@ -1529,7 +1529,7 @@ jobs: retention-days: 1 - name: Upload Results Artifacts - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # 5.0.0 if: always() with: name: results-${{ github.job }}-${{ strategy.job-index }} @@ -1599,7 +1599,7 @@ jobs: FORCE_COLOR: "true" - name: Upload Coverage Artifacts - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # 5.0.0 if: always() with: name: coverage-${{ github.job }}-${{ strategy.job-index }} @@ -1609,7 +1609,7 @@ jobs: retention-days: 1 - name: Upload Results Artifacts - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # 5.0.0 if: always() with: name: results-${{ github.job }}-${{ strategy.job-index }} @@ -1679,7 +1679,7 @@ jobs: FORCE_COLOR: "true" - name: Upload Coverage Artifacts - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # 5.0.0 if: always() with: name: coverage-${{ github.job }}-${{ strategy.job-index }} @@ -1689,7 +1689,7 @@ jobs: retention-days: 1 - name: Upload Results Artifacts - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # 5.0.0 if: always() with: name: results-${{ github.job }}-${{ strategy.job-index }} @@ -1758,7 +1758,7 @@ jobs: FORCE_COLOR: "true" - name: Upload Coverage Artifacts - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # 5.0.0 if: always() with: name: coverage-${{ github.job }}-${{ strategy.job-index }} @@ -1768,7 +1768,7 @@ jobs: retention-days: 1 - name: Upload Results Artifacts - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # 5.0.0 if: always() with: name: results-${{ github.job }}-${{ strategy.job-index }} @@ -1836,7 +1836,7 @@ jobs: FORCE_COLOR: "true" - name: Upload Coverage Artifacts - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # 5.0.0 if: always() with: name: coverage-${{ github.job }}-${{ strategy.job-index }} @@ -1846,7 +1846,7 @@ jobs: retention-days: 1 - name: Upload Results Artifacts - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # 5.0.0 if: always() with: name: results-${{ github.job }}-${{ strategy.job-index }} @@ -1955,7 +1955,7 @@ jobs: FORCE_COLOR: "true" - name: Upload Coverage Artifacts - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # 5.0.0 if: always() with: name: coverage-${{ github.job }}-${{ strategy.job-index }} @@ -1965,7 +1965,7 @@ jobs: retention-days: 1 - name: Upload Results Artifacts - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # 5.0.0 if: always() with: name: results-${{ github.job }}-${{ strategy.job-index }} @@ -2035,7 +2035,7 @@ jobs: FORCE_COLOR: "true" - name: Upload Coverage Artifacts - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # 5.0.0 if: always() with: name: coverage-${{ github.job }}-${{ strategy.job-index }} @@ -2045,7 +2045,7 @@ jobs: retention-days: 1 - name: Upload Results Artifacts - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # 5.0.0 if: always() with: name: results-${{ github.job }}-${{ strategy.job-index }} @@ -2113,7 +2113,7 @@ jobs: FORCE_COLOR: "true" - name: Upload Coverage Artifacts - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # 5.0.0 if: always() with: name: coverage-${{ github.job }}-${{ strategy.job-index }} @@ -2123,7 +2123,7 @@ jobs: retention-days: 1 - name: Upload Results Artifacts - uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # 4.6.2 + uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # 5.0.0 if: always() with: name: results-${{ github.job }}-${{ strategy.job-index }} diff --git a/.github/workflows/trivy.yml b/.github/workflows/trivy.yml index 2016fe2121..9bf87921d7 100644 --- a/.github/workflows/trivy.yml +++ b/.github/workflows/trivy.yml @@ -61,6 +61,6 @@ jobs: - name: Upload Trivy scan results to GitHub Security tab if: ${{ github.event_name == 'schedule' }} - uses: github/codeql-action/upload-sarif@16140ae1a102900babc80a33c44059580f687047 # 4.30.9 + uses: github/codeql-action/upload-sarif@4e94bd11f71e507f7f87df81788dff88d1dacbfb # 4.31.0 with: sarif_file: "trivy-results.sarif" diff --git a/newrelic/api/application.py b/newrelic/api/application.py index 9aa6d7b6b8..f3a68413fe 100644 --- a/newrelic/api/application.py +++ b/newrelic/api/application.py @@ -156,11 +156,11 @@ def normalize_name(self, name, rule_type="url"): return self._agent.normalize_name(self._name, name, rule_type) return name, False - def compute_sampled(self): + def compute_sampled(self, full_granularity, section, *args, **kwargs): if not self.active or not self.settings.distributed_tracing.enabled: return False - return self._agent.compute_sampled(self._name) + return self._agent.compute_sampled(self._name, full_granularity, section, *args, **kwargs) def application_instance(name=None, activate=True): diff --git a/newrelic/api/transaction.py b/newrelic/api/transaction.py index f90bbde143..f982cc5a87 100644 --- a/newrelic/api/transaction.py +++ b/newrelic/api/transaction.py @@ -637,6 +637,7 @@ def __exit__(self, exc, value, tb): trace_id=self.trace_id, loop_time=self._loop_time, root=root_node, + partial_granularity_sampled=hasattr(self, "partial_granularity_sampled"), ) # Clear settings as we are all done and don't need it @@ -1005,7 +1006,7 @@ def _update_agent_attributes(self): def user_attributes(self): return create_attributes(self._custom_params, DST_ALL, self.attribute_filter) - def sampling_algo_compute_sampled_and_priority(self, priority, sampled): + def sampling_algo_compute_sampled_and_priority(self, priority, sampled, sampler_kwargs): # self._priority and self._sampled are set when parsing the W3C tracestate # or newrelic DT headers and may be overridden in _make_sampling_decision # based on the configuration. The only time they are set in here is when the @@ -1015,7 +1016,7 @@ def sampling_algo_compute_sampled_and_priority(self, priority, sampled): priority = float(f"{random.random():.6f}") # noqa: S311 if sampled is None: _logger.debug("No trusted account id found. Sampling decision will be made by adaptive sampling algorithm.") - sampled = self._application.compute_sampled() + sampled = self._application.compute_sampled(**sampler_kwargs) if sampled: priority += 1 return priority, sampled @@ -1024,25 +1025,28 @@ def _compute_sampled_and_priority( self, priority, sampled, - remote_parent_sampled_path, + full_granularity, remote_parent_sampled_setting, - remote_parent_not_sampled_path, remote_parent_not_sampled_setting, ): if self._remote_parent_sampled is None: + section = 0 config = "default" # Use sampling algo. _logger.debug("Sampling decision made based on no remote parent sampling decision present.") elif self._remote_parent_sampled: - setting_path = remote_parent_sampled_path + section = 1 + setting_path = f"distributed_tracing.sampler.{'full_granularity' if full_granularity else: 'partial_granularity'}.remote_parent_sampled" config = remote_parent_sampled_setting _logger.debug( "Sampling decision made based on remote_parent_sampled=%s and %s=%s.", self._remote_parent_sampled, setting_path, + config, ) else: # self._remote_parent_sampled is False. - setting_path = remote_parent_not_sampled_path + section = 2 + setting_path = f"distributed_tracing.sampler.{'full_granularity' if full_granularity else: 'partial_granularity'}.remote_parent_not_sampled" config = remote_parent_not_sampled_setting _logger.debug( "Sampling decision made based on remote_parent_sampled=%s and %s=%s.", @@ -1063,7 +1067,7 @@ def _compute_sampled_and_priority( _logger.debug( "Let adaptive sampler algorithm decide based on sampled=%s and priority=%s.", sampled, priority ) - priority, sampled = self.sampling_algo_compute_sampled_and_priority(priority, sampled) + priority, sampled = self.sampling_algo_compute_sampled_and_priority(priority, sampled, {"full_granularity":full_granularity, "section": section}) return priority, sampled def _make_sampling_decision(self): @@ -1073,23 +1077,50 @@ def _make_sampling_decision(self): return priority = self._priority sampled = self._sampled - _logger.debug( - "Full granularity tracing is enabled. Asking if full granularity wants to sample. priority=%s, sampled=%s", - priority, - sampled, - ) - computed_priority, computed_sampled = self._compute_sampled_and_priority( - priority, - sampled, - remote_parent_sampled_path="distributed_tracing.sampler.remote_parent_sampled", - remote_parent_sampled_setting=self.settings.distributed_tracing.sampler.remote_parent_sampled, - remote_parent_not_sampled_path="distributed_tracing.sampler.remote_parent_not_sampled", - remote_parent_not_sampled_setting=self.settings.distributed_tracing.sampler.remote_parent_not_sampled, - ) - _logger.debug("Full granularity sampling decision was %s with priority=%s.", sampled, priority) - self._priority = computed_priority - self._sampled = computed_sampled - self._sampling_decision_made = True + # Compute sampling decision for full granularity. + if self.settings.distributed_tracing.sampler.full_granularity.enabled: + _logger.debug( + "Full granularity tracing is enabled. Asking if full granularity wants to sample. priority=%s, sampled=%s", + priority, + sampled, + ) + computed_priority, computed_sampled = self._compute_sampled_and_priority( + priority, + sampled, + full_granularity=True, + remote_parent_sampled_setting=self.settings.distributed_tracing.sampler.full_granularity.remote_parent_sampled, + remote_parent_not_sampled_setting=self.settings.distributed_tracing.sampler.full_granularity.remote_parent_not_sampled, + ) + _logger.debug("Full granularity sampling decision was %s with priority=%s.", sampled, priority) + if computed_sampled or not self.settings.distributed_tracing.sampler.partial_granularity.enabled: + self._priority = computed_priority + self._sampled = computed_sampled + self._sampling_decision_made = True + return + + # If full granularity is not going to sample, let partial granularity decide. + if self.settings.distributed_tracing.sampler.partial_granularity.enabled: + _logger.debug("Partial granularity tracing is enabled. Asking if partial granularity wants to sample.") + self._priority, self._sampled = self._compute_sampled_and_priority( + priority, + sampled, + full_granularity=False, + remote_parent_sampled_setting=self.settings.distributed_tracing.sampler.partial_granularity.remote_parent_sampled, + remote_parent_not_sampled_setting=self.settings.distributed_tracing.sampler.partial_granularity.remote_parent_not_sampled, + ) + _logger.debug( + "Partial granularity sampling decision was %s with priority=%s.", self._sampled, self._priority + ) + self._sampling_decision_made = True + if self._sampled: + self.partial_granularity_sampled = True + return + + # This is only reachable if both full and partial granularity tracing are off. + # Set priority=0 and do not sample. This enables DT headers to still be sent + # even if the trace is never sampled. + self._priority = 0 + self._sampled = False def _freeze_path(self): if self._frozen_path is None: diff --git a/newrelic/config.py b/newrelic/config.py index 5367538695..e24e137b52 100644 --- a/newrelic/config.py +++ b/newrelic/config.py @@ -319,6 +319,47 @@ def _process_setting(section, option, getter, mapper): _raise_configuration_error(section, option) +def _process_dt_setting(section, option_p1, option_p2, getter): + try: + # The type of a value is dictated by the getter + # function supplied. + + value1 = getattr(_config_object, getter)(section, option_p1) + value2 = getattr(_config_object, getter)(section, option_p2) + + # Now need to apply the option from the + # configuration file to the internal settings + # object. Walk the object path and assign it. + + target = _settings + fields = option_p1.split(".", 1) + + while True: + if len(fields) == 1: + value = value1 or value2 or "default" + setattr(target, fields[0], value) + break + target = getattr(target, fields[0]) + fields = fields[1].split(".", 1) + + # Cache the configuration so can be dumped out to + # log file when whole main configuration has been + # processed. This ensures that the log file and log + # level entries have been set. + + _cache_object.append((option_p1, value1)) + _cache_object.append((option_p2, value2)) + + except configparser.NoSectionError: + pass + + except configparser.NoOptionError: + pass + + except Exception: + _raise_configuration_error(section, option_p1) + + # Processing of all the settings for specified section except # for log file and log level which are applied separately to # ensure they are set as soon as possible. @@ -405,8 +446,13 @@ def _process_configuration(section): _process_setting(section, "distributed_tracing.enabled", "getboolean", None) _process_setting(section, "distributed_tracing.exclude_newrelic_header", "getboolean", None) _process_setting(section, "distributed_tracing.sampler.adaptive_sampling_target", "getint", None) - _process_setting(section, "distributed_tracing.sampler.remote_parent_sampled", "get", None) - _process_setting(section, "distributed_tracing.sampler.remote_parent_not_sampled", "get", None) + _process_dt_setting(section, "distributed_tracing.sampler.full_granularity.remote_parent_sampled", "distributed_tracing.sampler.remote_parent_sampled", "get") + _process_dt_setting(section, "distributed_tracing.sampler.full_granularity.remote_parent_not_sampled", "distributed_tracing.sampler.remote_parent_not_sampled", "get") + _process_setting(section, "distributed_tracing.sampler.full_granularity.enabled", "getboolean", None) + _process_setting(section, "distributed_tracing.sampler.partial_granularity.enabled", "getboolean", None) + _process_setting(section, "distributed_tracing.sampler.partial_granularity.type", "get", None) + _process_setting(section, "distributed_tracing.sampler.partial_granularity.remote_parent_sampled", "get", None) + _process_setting(section, "distributed_tracing.sampler.partial_granularity.remote_parent_not_sampled", "get", None) _process_setting(section, "span_events.enabled", "getboolean", None) _process_setting(section, "span_events.max_samples_stored", "getint", None) _process_setting(section, "span_events.attributes.enabled", "getboolean", None) diff --git a/newrelic/core/agent.py b/newrelic/core/agent.py index fbfc06b260..7206b2a887 100644 --- a/newrelic/core/agent.py +++ b/newrelic/core/agent.py @@ -581,9 +581,9 @@ def normalize_name(self, app_name, name, rule_type="url"): return application.normalize_name(name, rule_type) - def compute_sampled(self, app_name): + def compute_sampled(self, app_name, full_granularity, section, *args, **kwargs): application = self._applications.get(app_name, None) - return application.compute_sampled() + return application.compute_sampled(full_granularity, section, *args, **kwargs) def _harvest_shutdown_is_set(self): try: @@ -746,7 +746,12 @@ def shutdown_agent(self, timeout=None): self._harvest_thread.start() if self._harvest_thread.is_alive(): - self._harvest_thread.join(timeout) + try: + self._harvest_thread.join(timeout) + except RuntimeError: + # This can occur if the application is killed while in the harvest thread, + # causing shutdown_agent to be called from within the harvest thread. + pass def agent_instance(): diff --git a/newrelic/core/application.py b/newrelic/core/application.py index 3ba8168d60..1368b7fb70 100644 --- a/newrelic/core/application.py +++ b/newrelic/core/application.py @@ -22,8 +22,8 @@ import traceback from functools import partial +from newrelic.core.samplers.sampler_proxy import SamplerProxy from newrelic.common.object_names import callable_name -from newrelic.core.adaptive_sampler import AdaptiveSampler from newrelic.core.agent_control_health import ( HealthStatus, agent_control_health_instance, @@ -156,11 +156,11 @@ def configuration(self): def active(self): return self.configuration is not None - def compute_sampled(self): - if self.adaptive_sampler is None: + def compute_sampled(self, full_granularity, section, *args, **kwargs): + if self.sampler is None: return False - return self.adaptive_sampler.compute_sampled() + return self.sampler.compute_sampled(full_granularity, section, *args, **kwargs) def dump(self, file): """Dumps details about the application to the file object.""" @@ -501,12 +501,7 @@ def connect_to_data_collector(self, activate_agent): with self._stats_lock: self._stats_engine.reset_stats(configuration, reset_stream=True) - - if configuration.serverless_mode.enabled: - sampling_target_period = 60.0 - else: - sampling_target_period = configuration.sampling_target_period_in_seconds - self.adaptive_sampler = AdaptiveSampler(configuration.sampling_target, sampling_target_period) + self.sampler = SamplerProxy(configuration) active_session.connect_span_stream(self._stats_engine.span_stream, self.record_custom_metric) diff --git a/newrelic/core/attribute.py b/newrelic/core/attribute.py index 785a2fa0ec..49bc890a80 100644 --- a/newrelic/core/attribute.py +++ b/newrelic/core/attribute.py @@ -109,6 +109,23 @@ "zeebe.client.resourceFile", } +SPAN_ENTITY_RELATIONSHIP_ATTRIBUTES = { + "cloud.account.id", + "cloud.platform", + "cloud.region", + "cloud.resource_id", + "db.instance", + "db.system", + "http.url", + "messaging.destination.name", + "messaging.system", + "peer.hostname", + "server.address", + "server.port", + "span.kind", +} + + MAX_NUM_USER_ATTRIBUTES = 128 MAX_ATTRIBUTE_LENGTH = 255 MAX_NUM_ML_USER_ATTRIBUTES = 64 diff --git a/newrelic/core/config.py b/newrelic/core/config.py index 4f813e4370..bd62faff87 100644 --- a/newrelic/core/config.py +++ b/newrelic/core/config.py @@ -337,6 +337,14 @@ class DistributedTracingSamplerSettings(Settings): pass +class DistributedTracingSamplerFullGranularitySettings(Settings): + pass + + +class DistributedTracingSamplerPartialGranularitySettings(Settings): + pass + + class ServerlessModeSettings(Settings): pass @@ -507,6 +515,8 @@ class EventHarvestConfigHarvestLimitSettings(Settings): _settings.debug = DebugSettings() _settings.distributed_tracing = DistributedTracingSettings() _settings.distributed_tracing.sampler = DistributedTracingSamplerSettings() +_settings.distributed_tracing.sampler.full_granularity = DistributedTracingSamplerFullGranularitySettings() +_settings.distributed_tracing.sampler.partial_granularity = DistributedTracingSamplerPartialGranularitySettings() _settings.error_collector = ErrorCollectorSettings() _settings.error_collector.attributes = ErrorCollectorAttributesSettings() _settings.event_harvest_config = EventHarvestConfigSettings() @@ -839,12 +849,31 @@ def default_otlp_host(host): _settings.distributed_tracing.sampler.adaptive_sampling_target = _environ_as_int( "NEW_RELIC_DISTRIBUTED_TRACING_SAMPLER_ADAPTIVE_SAMPLING_TARGET", default=10 ) -_settings.distributed_tracing.sampler.remote_parent_sampled = os.environ.get( +_settings.distributed_tracing.sampler.full_granularity.enabled = _environ_as_bool( + "NEW_RELIC_DISTRIBUTED_TRACING_SAMPLER_FULL_GRANULARITY_ENABLED", default=True +) +_settings.distributed_tracing.sampler.full_granularity.remote_parent_sampled = os.environ.get( + "NEW_RELIC_DISTRIBUTED_TRACING_SAMPLER_FULL_GRANULARITY_REMOTE_PARENT_SAMPLED", None +) or os.environ.get( "NEW_RELIC_DISTRIBUTED_TRACING_SAMPLER_REMOTE_PARENT_SAMPLED", "default" ) -_settings.distributed_tracing.sampler.remote_parent_not_sampled = os.environ.get( +_settings.distributed_tracing.sampler.full_granularity.remote_parent_not_sampled = os.environ.get( + "NEW_RELIC_DISTRIBUTED_TRACING_SAMPLER_FULL_GRANULARITY_REMOTE_PARENT_NOT_SAMPLED", None +) or os.environ.get( "NEW_RELIC_DISTRIBUTED_TRACING_SAMPLER_REMOTE_PARENT_NOT_SAMPLED", "default" ) +_settings.distributed_tracing.sampler.partial_granularity.enabled = _environ_as_bool( + "NEW_RELIC_DISTRIBUTED_TRACING_SAMPLER_PARTIAL_GRANULARITY_ENABLED", default=False +) +_settings.distributed_tracing.sampler.partial_granularity.type = os.environ.get( + "NEW_RELIC_DISTRIBUTED_TRACING_SAMPLER_PARTIAL_GRANULARITY_TYPE", "essential" +) +_settings.distributed_tracing.sampler.partial_granularity.remote_parent_sampled = os.environ.get( + "NEW_RELIC_DISTRIBUTED_TRACING_SAMPLER_PARTIAL_GRANULARITY_REMOTE_PARENT_SAMPLED", "default" +) +_settings.distributed_tracing.sampler.partial_granularity.remote_parent_not_sampled = os.environ.get( + "NEW_RELIC_DISTRIBUTED_TRACING_SAMPLER_PARTIAL_GRANULARITY_REMOTE_PARENT_NOT_SAMPLED", "default" +) _settings.distributed_tracing.exclude_newrelic_header = False _settings.span_events.enabled = _environ_as_bool("NEW_RELIC_SPAN_EVENTS_ENABLED", default=True) _settings.event_harvest_config.harvest_limits.span_event_data = _environ_as_int( @@ -1371,9 +1400,20 @@ def finalize_application_settings(server_side_config=None, settings=_settings): application_settings.attribute_filter = AttributeFilter(flatten_settings(application_settings)) + simplify_distributed_tracing_sampler_granularity_settings(application_settings) + return application_settings +def simplify_distributed_tracing_sampler_granularity_settings(settings): + # Partial granularity tracing is not available in infinite tracing mode. + if settings.infinite_tracing.enabled and settings.distributed_tracing.sampler.partial_granularity.enabled: + _logger.warning( + "Improper configuration. Infinite tracing cannot be enabled at the same time as partial granularity tracing. Setting distributed_tracing.sampler.partial_granularity.enabled=False." + ) + settings.distributed_tracing.sampler.partial_granularity.enabled = False + + def _remove_ignored_configs(server_settings): if not server_settings.get("agent_config"): return server_settings diff --git a/newrelic/core/data_collector.py b/newrelic/core/data_collector.py index e481f1d6e7..c303fad90b 100644 --- a/newrelic/core/data_collector.py +++ b/newrelic/core/data_collector.py @@ -117,7 +117,14 @@ def send_ml_events(self, sampling_info, custom_event_data): def send_span_events(self, sampling_info, span_event_data): """Called to submit sample set for span events.""" - + # TODO: remove this later after list types are suported. + for span_event in span_event_data: + try: + ids = span_event[1].get("nr.ids") + if ids: + span_event[1]["nr.ids"] = ",".join(ids) + except: + pass payload = (self.agent_run_id, sampling_info, span_event_data) return self._protocol.send("span_event_data", payload) diff --git a/newrelic/core/database_node.py b/newrelic/core/database_node.py index 1f60add195..8e30e3fecf 100644 --- a/newrelic/core/database_node.py +++ b/newrelic/core/database_node.py @@ -279,7 +279,15 @@ def trace_node(self, stats, root, connections): start_time=start_time, end_time=end_time, name=name, params=params, children=children, label=None ) - def span_event(self, settings, base_attrs=None, parent_guid=None, attr_class=dict): + def span_event( + self, + settings, + base_attrs=None, + parent_guid=None, + attr_class=dict, + partial_granularity_sampled=False, + ct_exit_spans=None, + ): sql = self.formatted if sql: @@ -288,4 +296,11 @@ def span_event(self, settings, base_attrs=None, parent_guid=None, attr_class=dic self.agent_attributes["db.statement"] = sql - return super().span_event(settings, base_attrs=base_attrs, parent_guid=parent_guid, attr_class=attr_class) + return super().span_event( + settings, + base_attrs=base_attrs, + parent_guid=parent_guid, + attr_class=attr_class, + partial_granularity_sampled=partial_granularity_sampled, + ct_exit_spans=ct_exit_spans, + ) diff --git a/newrelic/core/external_node.py b/newrelic/core/external_node.py index f47d634b3d..7251504bb1 100644 --- a/newrelic/core/external_node.py +++ b/newrelic/core/external_node.py @@ -169,7 +169,15 @@ def trace_node(self, stats, root, connections): start_time=start_time, end_time=end_time, name=name, params=params, children=children, label=None ) - def span_event(self, settings, base_attrs=None, parent_guid=None, attr_class=dict): + def span_event( + self, + settings, + base_attrs=None, + parent_guid=None, + attr_class=dict, + partial_granularity_sampled=False, + ct_exit_spans=None, + ): self.agent_attributes["http.url"] = self.http_url i_attrs = (base_attrs and base_attrs.copy()) or attr_class() @@ -180,4 +188,11 @@ def span_event(self, settings, base_attrs=None, parent_guid=None, attr_class=dic if self.method: _, i_attrs["http.method"] = attribute.process_user_attribute("http.method", self.method) - return super().span_event(settings, base_attrs=i_attrs, parent_guid=parent_guid, attr_class=attr_class) + return super().span_event( + settings, + base_attrs=i_attrs, + parent_guid=parent_guid, + attr_class=attr_class, + partial_granularity_sampled=partial_granularity_sampled, + ct_exit_spans=ct_exit_spans, + ) diff --git a/newrelic/core/function_node.py b/newrelic/core/function_node.py index 588f675f31..2eab783ecc 100644 --- a/newrelic/core/function_node.py +++ b/newrelic/core/function_node.py @@ -114,8 +114,23 @@ def trace_node(self, stats, root, connections): start_time=start_time, end_time=end_time, name=name, params=params, children=children, label=self.label ) - def span_event(self, settings, base_attrs=None, parent_guid=None, attr_class=dict): + def span_event( + self, + settings, + base_attrs=None, + parent_guid=None, + attr_class=dict, + partial_granularity_sampled=False, + ct_exit_spans=None, + ): i_attrs = (base_attrs and base_attrs.copy()) or attr_class() i_attrs["name"] = f"{self.group}/{self.name}" - return super().span_event(settings, base_attrs=i_attrs, parent_guid=parent_guid, attr_class=attr_class) + return super().span_event( + settings, + base_attrs=i_attrs, + parent_guid=parent_guid, + attr_class=attr_class, + partial_granularity_sampled=partial_granularity_sampled, + ct_exit_spans=ct_exit_spans, + ) diff --git a/newrelic/core/loop_node.py b/newrelic/core/loop_node.py index 58d1b3a746..b562720a85 100644 --- a/newrelic/core/loop_node.py +++ b/newrelic/core/loop_node.py @@ -79,8 +79,23 @@ def trace_node(self, stats, root, connections): start_time=start_time, end_time=end_time, name=name, params=params, children=children, label=None ) - def span_event(self, settings, base_attrs=None, parent_guid=None, attr_class=dict): + def span_event( + self, + settings, + base_attrs=None, + parent_guid=None, + attr_class=dict, + partial_granularity_sampled=False, + ct_exit_spans=None, + ): i_attrs = (base_attrs and base_attrs.copy()) or attr_class() i_attrs["name"] = f"EventLoop/Wait/{self.name}" - return super().span_event(settings, base_attrs=i_attrs, parent_guid=parent_guid, attr_class=attr_class) + return super().span_event( + settings, + base_attrs=i_attrs, + parent_guid=parent_guid, + attr_class=attr_class, + partial_granularity_sampled=partial_granularity_sampled, + ct_exit_spans=ct_exit_spans, + ) diff --git a/newrelic/core/node_mixin.py b/newrelic/core/node_mixin.py index 9154cc8765..29f5bedbc1 100644 --- a/newrelic/core/node_mixin.py +++ b/newrelic/core/node_mixin.py @@ -49,7 +49,15 @@ def get_trace_segment_params(self, settings, params=None): _params["exclusive_duration_millis"] = 1000.0 * self.exclusive return _params - def span_event(self, settings, base_attrs=None, parent_guid=None, attr_class=dict): + def span_event( + self, + settings, + base_attrs=None, + parent_guid=None, + attr_class=dict, + partial_granularity_sampled=False, + ct_exit_spans=None, + ): i_attrs = (base_attrs and base_attrs.copy()) or attr_class() i_attrs["type"] = "Span" i_attrs["name"] = i_attrs.get("name") or self.name @@ -68,18 +76,111 @@ def span_event(self, settings, base_attrs=None, parent_guid=None, attr_class=dic u_attrs = attribute.resolve_user_attributes( self.processed_user_attributes, settings.attribute_filter, DST_SPAN_EVENTS, attr_class=attr_class ) + if not partial_granularity_sampled: + # intrinsics, user attrs, agent attrs + return [i_attrs, u_attrs, a_attrs] + else: + if ct_exit_spans is None: + ct_exit_spans = {} - # intrinsics, user attrs, agent attrs - return [i_attrs, u_attrs, a_attrs] - - def span_events(self, settings, base_attrs=None, parent_guid=None, attr_class=dict): - yield self.span_event(settings, base_attrs=base_attrs, parent_guid=parent_guid, attr_class=attr_class) + partial_granularity_type = settings.distributed_tracing.sampler.partial_granularity.type + exit_span_attrs_present = attribute.SPAN_ENTITY_RELATIONSHIP_ATTRIBUTES & set(a_attrs) + # If this is the entry node or an LLM span always return it. + if i_attrs.get("nr.entryPoint") or i_attrs["name"].startswith("Llm/"): + if partial_granularity_type == "reduced": + return [i_attrs, u_attrs, a_attrs] + else: + return [i_attrs, {}, {}] + # If the span is not an exit span, skip it by returning None. + if not exit_span_attrs_present: + return None + # If the span is an exit span and we are in reduced mode (meaning no attribute dropping), + # just return the exit span as is. + if partial_granularity_type == "reduced": + return [i_attrs, u_attrs, a_attrs] + else: + a_minimized_attrs = attr_class({key: a_attrs[key] for key in exit_span_attrs_present}) + # If we are in essential mode return the span with minimized attributes. + if partial_granularity_type == "essential": + return [i_attrs, {}, a_minimized_attrs] + # If the span is an exit span but span compression (compact) is enabled, + # we need to check for uniqueness before returning it. + # Combine all the entity relationship attr values into a string to be + # used as the hash to check for uniqueness. + span_attrs = "".join([str(a_minimized_attrs[key]) for key in exit_span_attrs_present]) + new_exit_span = span_attrs not in ct_exit_spans + # If this is a new exit span, add it to the known ct_exit_spans and + # return it. + if new_exit_span: + # nr.ids is the list of span guids that share this unqiue exit span. + a_minimized_attrs["nr.ids"] = [] + a_minimized_attrs["nr.durations"] = self.duration + ct_exit_spans[span_attrs] = [i_attrs, a_minimized_attrs] + return [i_attrs, {}, a_minimized_attrs] + # If this is an exit span we've already seen, add it's guid to the list + # of ids on the seen span, compute the new duration & start time, and + # return None. + ct_exit_spans[span_attrs][1]["nr.ids"].append(self.guid) + # Max size for `nr.ids` = 1024. Max length = 63 (each span id is 16 bytes + 8 bytes for list type). + ct_exit_spans[span_attrs][1]["nr.ids"] = ct_exit_spans[span_attrs][1]["nr.ids"][:63] + # Compute the new start and end time for all compressed spans and use + # that to set the duration for all compressed spans. + current_start_time = ct_exit_spans[span_attrs][0]["timestamp"] + current_end_time = ( + ct_exit_spans[span_attrs][0]["timestamp"] / 1000 + ct_exit_spans[span_attrs][1]["nr.durations"] + ) + new_start_time = i_attrs["timestamp"] + new_end_time = i_attrs["timestamp"] / 1000 + i_attrs["duration"] + set_start_time = min(new_start_time, current_start_time) + # If the new span starts after the old span's end time or the new span + # ends before the current span starts; add the durations. + if current_end_time < new_start_time / 1000 or new_end_time < current_start_time / 1000: + set_duration = ct_exit_spans[span_attrs][1]["nr.durations"] + i_attrs["duration"] + # Otherwise, if the new and old span's overlap in time, use the newest + # end time and subtract the start time from it to calculate the new + # duration. + else: + set_duration = max(current_end_time, new_end_time) - set_start_time / 1000 + ct_exit_spans[span_attrs][0]["timestamp"] = set_start_time + ct_exit_spans[span_attrs][1]["nr.durations"] = set_duration + return None + def span_events( + self, + settings, + base_attrs=None, + parent_guid=None, + attr_class=dict, + partial_granularity_sampled=False, + ct_exit_spans=None, + ): + span = self.span_event( + settings, + base_attrs=base_attrs, + parent_guid=parent_guid, + attr_class=attr_class, + partial_granularity_sampled=partial_granularity_sampled, + ct_exit_spans=ct_exit_spans, + ) + parent_id = parent_guid + if span: # span will be None if the span is an inprocess span or repeated exit span. + yield span + # Compressed spans are always reparented onto the entry span. + if not settings.distributed_tracing.sampler.partial_granularity.type == "compact" or span[0].get( + "nr.entryPoint" + ): + parent_id = self.guid for child in self.children: - for event in child.span_events( # noqa: UP028 - settings, base_attrs=base_attrs, parent_guid=self.guid, attr_class=attr_class + for event in child.span_events( + settings, + base_attrs=base_attrs, + parent_guid=parent_id, + attr_class=attr_class, + partial_granularity_sampled=partial_granularity_sampled, + ct_exit_spans=ct_exit_spans, ): - yield event + if event: # event will be None if the span is an inprocess span or repeated exit span. + yield event class DatastoreNodeMixin(GenericNodeMixin): @@ -108,7 +209,15 @@ def db_instance(self): self._db_instance = db_instance_attr return db_instance_attr - def span_event(self, settings, base_attrs=None, parent_guid=None, attr_class=dict): + def span_event( + self, + settings, + base_attrs=None, + parent_guid=None, + attr_class=dict, + partial_granularity_sampled=False, + ct_exit_spans=None, + ): a_attrs = self.agent_attributes a_attrs["db.instance"] = self.db_instance i_attrs = (base_attrs and base_attrs.copy()) or attr_class() @@ -140,4 +249,11 @@ def span_event(self, settings, base_attrs=None, parent_guid=None, attr_class=dic except Exception: pass - return super().span_event(settings, base_attrs=i_attrs, parent_guid=parent_guid, attr_class=attr_class) + return super().span_event( + settings, + base_attrs=i_attrs, + parent_guid=parent_guid, + attr_class=attr_class, + partial_granularity_sampled=partial_granularity_sampled, + ct_exit_spans=ct_exit_spans, + ) diff --git a/newrelic/core/root_node.py b/newrelic/core/root_node.py index fa8b3de82b..72f1d392d7 100644 --- a/newrelic/core/root_node.py +++ b/newrelic/core/root_node.py @@ -37,7 +37,15 @@ class RootNode(_RootNode, GenericNodeMixin): - def span_event(self, settings, base_attrs=None, parent_guid=None, attr_class=dict): + def span_event( + self, + settings, + base_attrs=None, + parent_guid=None, + attr_class=dict, + partial_granularity_sampled=False, + ct_exit_spans=None, + ): i_attrs = (base_attrs and base_attrs.copy()) or attr_class() i_attrs["transaction.name"] = self.path i_attrs["nr.entryPoint"] = True @@ -46,7 +54,14 @@ def span_event(self, settings, base_attrs=None, parent_guid=None, attr_class=dic if self.tracing_vendors: i_attrs["tracingVendors"] = self.tracing_vendors - return super().span_event(settings, base_attrs=i_attrs, parent_guid=parent_guid, attr_class=attr_class) + return super().span_event( + settings, + base_attrs=i_attrs, + parent_guid=parent_guid, + attr_class=attr_class, + partial_granularity_sampled=partial_granularity_sampled, + ct_exit_spans=ct_exit_spans, + ) def trace_node(self, stats, root, connections): name = self.path diff --git a/newrelic/core/samplers/__init__.py b/newrelic/core/samplers/__init__.py new file mode 100644 index 0000000000..bfe7af1430 --- /dev/null +++ b/newrelic/core/samplers/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/newrelic/core/adaptive_sampler.py b/newrelic/core/samplers/adaptive_sampler.py similarity index 100% rename from newrelic/core/adaptive_sampler.py rename to newrelic/core/samplers/adaptive_sampler.py diff --git a/newrelic/core/samplers/sampler_proxy.py b/newrelic/core/samplers/sampler_proxy.py new file mode 100644 index 0000000000..2e8b7cf87f --- /dev/null +++ b/newrelic/core/samplers/sampler_proxy.py @@ -0,0 +1,34 @@ +# Copyright 2010 New Relic, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from newrelic.core.samplers.adaptive_sampler import AdaptiveSampler + + +class SamplerProxy: + def __init__(self, settings): + if settings.serverless_mode.enabled: + sampling_target_period = 60.0 + else: + sampling_target_period = settings.sampling_target_period_in_seconds + adaptive_sampler = AdaptiveSampler(settings.sampling_target, sampling_target_period) + self._samplers = [adaptive_sampler] + + def get_sampler(self, full_granularity, section): + return self._samplers[0] + + def compute_sampled(self, full_granularity, section, *args, **kwargs): + """ + full_granularity: True is full granularity, False is partial granularity + section: 0-root, 1-remote_parent_sampled, 2-remote_parent_not_sampled + """ + return self.get_sampler(full_granularity, section).compute_sampled(*args, **kwargs) diff --git a/newrelic/core/transaction_node.py b/newrelic/core/transaction_node.py index 34871d8b21..f1c9f1ea7a 100644 --- a/newrelic/core/transaction_node.py +++ b/newrelic/core/transaction_node.py @@ -98,6 +98,7 @@ "root_span_guid", "trace_id", "loop_time", + "partial_granularity_sampled", ], ) @@ -633,5 +634,12 @@ def span_events(self, settings, attr_class=dict): ("priority", self.priority), ) ) - - yield from self.root.span_events(settings, base_attrs, parent_guid=self.parent_span, attr_class=attr_class) + ct_exit_spans = {} + yield from self.root.span_events( + settings, + base_attrs, + parent_guid=self.parent_span, + attr_class=attr_class, + partial_granularity_sampled=self.partial_granularity_sampled, + ct_exit_spans=ct_exit_spans, + ) diff --git a/newrelic/hooks/database_aiomysql.py b/newrelic/hooks/database_aiomysql.py index 9a2f3d1d18..2cedcb40f9 100644 --- a/newrelic/hooks/database_aiomysql.py +++ b/newrelic/hooks/database_aiomysql.py @@ -78,6 +78,10 @@ async def _wrap_pool__acquire(wrapped, instance, args, kwargs): with FunctionTrace(name=callable_name(wrapped), terminal=True, rollup=rollup, source=wrapped): connection = await wrapped(*args, **kwargs) connection_kwargs = getattr(instance, "_conn_kwargs", {}) + + if hasattr(connection, "__wrapped__"): + return connection + return AsyncConnectionWrapper(connection, dbapi2_module, (((), connection_kwargs))) return _wrap_pool__acquire diff --git a/newrelic/hooks/external_botocore.py b/newrelic/hooks/external_botocore.py index d8c18b49db..28bd8ffb13 100644 --- a/newrelic/hooks/external_botocore.py +++ b/newrelic/hooks/external_botocore.py @@ -1451,6 +1451,9 @@ def wrap_serialize_to_request(wrapped, instance, args, kwargs): ("kinesis", "untag_resource"): aws_function_trace( "untag_resource", extract_kinesis, extract_agent_attrs=extract_kinesis_agent_attrs, library="Kinesis" ), + ("kinesis", "update_max_record_size"): aws_function_trace( + "update_max_record_size", extract_kinesis, extract_agent_attrs=extract_kinesis_agent_attrs, library="Kinesis" + ), ("kinesis", "update_shard_count"): aws_function_trace( "update_shard_count", extract_kinesis, extract_agent_attrs=extract_kinesis_agent_attrs, library="Kinesis" ), diff --git a/tests/agent_features/test_distributed_tracing.py b/tests/agent_features/test_distributed_tracing.py index 6548b17cf8..f11375a00b 100644 --- a/tests/agent_features/test_distributed_tracing.py +++ b/tests/agent_features/test_distributed_tracing.py @@ -12,8 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. +import asyncio import copy import json +import time import pytest import webtest @@ -24,6 +26,18 @@ from testing_support.validators.validate_transaction_event_attributes import validate_transaction_event_attributes from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics +from newrelic.api.function_trace import function_trace +from newrelic.common.object_wrapper import function_wrapper, transient_function_wrapper + +try: + from newrelic.core.infinite_tracing_pb2 import AttributeValue, Span +except: + AttributeValue = None + Span = None + +from testing_support.mock_external_http_server import MockExternalHTTPHResponseHeadersServer +from testing_support.validators.validate_span_events import check_value_equals, validate_span_events + from newrelic.api.application import application_instance from newrelic.api.background_task import BackgroundTask, background_task from newrelic.api.external_trace import ExternalTrace @@ -72,6 +86,110 @@ } +def validate_compact_span_event( + name, compressed_span_count, expected_nr_durations_low_bound, expected_nr_durations_high_bound +): + @function_wrapper + def _validate_wrapper(wrapped, instance, args, kwargs): + record_transaction_called = [] + recorded_span_events = [] + + @transient_function_wrapper("newrelic.core.stats_engine", "StatsEngine.record_transaction") + def capture_span_events(wrapped, instance, args, kwargs): + events = [] + + @transient_function_wrapper("newrelic.common.streaming_utils", "StreamBuffer.put") + def stream_capture(wrapped, instance, args, kwargs): + event = args[0] + events.append(event) + return wrapped(*args, **kwargs) + + record_transaction_called.append(True) + try: + result = stream_capture(wrapped)(*args, **kwargs) + except: + raise + else: + if not instance.settings.infinite_tracing.enabled: + events = [event for priority, seen_at, event in instance.span_events.pq] + + recorded_span_events.append(events) + + return result + + _new_wrapper = capture_span_events(wrapped) + val = _new_wrapper(*args, **kwargs) + assert record_transaction_called + captured_events = recorded_span_events.pop(-1) + + mismatches = [] + matching_span_events = 0 + + def _span_details(): + details = [ + f"matching_span_events={matching_span_events}", + f"mismatches={mismatches}", + f"captured_events={captured_events}", + ] + return "\n".join(details) + + for captured_event in captured_events: + if Span and isinstance(captured_event, Span): + intrinsics = captured_event.intrinsics + user_attrs = captured_event.user_attributes + agent_attrs = captured_event.agent_attributes + else: + intrinsics, _, agent_attrs = captured_event + + # Find the span by name. + if not check_value_equals(intrinsics, "name", name): + continue + assert check_value_length(agent_attrs, "nr.ids", compressed_span_count - 1, mismatches), _span_details() + assert check_value_between( + agent_attrs, + "nr.durations", + expected_nr_durations_low_bound, + expected_nr_durations_high_bound, + mismatches, + ), _span_details() + matching_span_events += 1 + + assert matching_span_events == 1, _span_details() + return val + + return _validate_wrapper + + +def check_value_between(dictionary, key, expected_min, expected_max, mismatches): + value = dictionary.get(key) + if AttributeValue and isinstance(value, AttributeValue): + for _, val in value.ListFields(): + if not (expected_min < val < expected_max): + mismatches.append(f"key: {key}, not {expected_min} < {val} < {expected_max}") + return False + return True + else: + if not (expected_min < value < expected_max): + mismatches.append(f"key: {key}, not {expected_min} < {value} < {expected_max}") + return False + return True + + +def check_value_length(dictionary, key, expected_length, mismatches): + value = dictionary.get(key) + if AttributeValue and isinstance(value, AttributeValue): + for _, val in value.ListFields(): + if len(val) != expected_length: + mismatches.append(f"key: {key}, not len({val}) == {expected_length}") + return False + return True + else: + if len(value) != expected_length: + mismatches.append(f"key: {key}, not len({value}) == {expected_length}") + return False + return True + + @wsgi_application() def target_wsgi_application(environ, start_response): status = "200 OK" @@ -468,8 +586,99 @@ def test_distributed_trace_remote_parent_sampling_decision_full_granularity( test_settings = _override_settings.copy() test_settings.update( { - "distributed_tracing.sampler.remote_parent_sampled": remote_parent_sampled_setting, - "distributed_tracing.sampler.remote_parent_not_sampled": remote_parent_not_sampled_setting, + "distributed_tracing.sampler.full_granularity.remote_parent_sampled": remote_parent_sampled_setting, + "distributed_tracing.sampler.full_granularity.remote_parent_not_sampled": remote_parent_not_sampled_setting, + "span_events.enabled": True, + } + ) + if expected_adaptive_sampling_algo_called: + function_called_decorator = validate_function_called( + "newrelic.core.adaptive_sampler", "AdaptiveSampler.compute_sampled" + ) + else: + function_called_decorator = validate_function_not_called( + "newrelic.core.adaptive_sampler", "AdaptiveSampler.compute_sampled" + ) + + @function_called_decorator + @override_application_settings(test_settings) + @validate_attributes_complete("intrinsic", required_intrinsics) + @background_task(name="test_distributed_trace_attributes") + def _test(): + txn = current_transaction() + + if traceparent_sampled is not None: + headers = { + "traceparent": f"00-0af7651916cd43dd8448eb211c80319c-00f067aa0ba902b7-{int(traceparent_sampled):02x}", + "newrelic": '{"v":[0,1],"d":{"ty":"Mobile","ac":"123","ap":"51424","id":"5f474d64b9cc9b2a","tr":"6e2fea0b173fdad0","pr":0.1234,"sa":true,"ti":1482959525577,"tx":"27856f70d3d314b7"}}', # This header should be ignored. + } + if newrelic_sampled is not None: + headers["tracestate"] = ( + f"1@nr=0-0-1-2827902-0af7651916cd43dd-00f067aa0ba902b7-{int(newrelic_sampled)}-1.23456-1518469636035" + ) + else: + headers = { + "newrelic": '{"v":[0,1],"d":{"ty":"Mobile","ac":"1","ap":"51424","id":"00f067aa0ba902b7","tr":"0af7651916cd43dd8448eb211c80319c","pr":0.1234,"sa":%s,"ti":1482959525577,"tx":"0af7651916cd43dd"}}' + % (str(newrelic_sampled).lower()) + } + accept_distributed_trace_headers(headers) + + _test() + + +@pytest.mark.parametrize( + "traceparent_sampled,newrelic_sampled,remote_parent_sampled_setting,remote_parent_not_sampled_setting,expected_sampled,expected_priority,expected_adaptive_sampling_algo_called", + ( + (True, None, "default", "default", None, None, True), # Uses adaptive sampling algo. + (True, None, "always_on", "default", True, 2, False), # Always sampled. + (True, None, "always_off", "default", False, 0, False), # Never sampled. + (False, None, "default", "default", None, None, True), # Uses adaptive sampling algo. + (False, None, "always_on", "default", None, None, True), # Uses adaptive sampling alog. + (False, None, "always_off", "default", None, None, True), # Uses adaptive sampling algo. + (True, None, "default", "always_on", None, None, True), # Uses adaptive sampling algo. + (True, None, "default", "always_off", None, None, True), # Uses adaptive sampling algo. + (False, None, "default", "always_on", True, 2, False), # Always sampled. + (False, None, "default", "always_off", False, 0, False), # Never sampled. + (True, True, "default", "default", True, 1.23456, False), # Uses sampling decision in W3C TraceState header. + (True, False, "default", "default", False, 1.23456, False), # Uses sampling decision in W3C TraceState header. + (False, False, "default", "default", False, 1.23456, False), # Uses sampling decision in W3C TraceState header. + (True, False, "always_on", "default", True, 2, False), # Always sampled. + (True, True, "always_off", "default", False, 0, False), # Never sampled. + (False, False, "default", "always_on", True, 2, False), # Always sampled. + (False, True, "default", "always_off", False, 0, False), # Never sampled. + (None, True, "default", "default", True, 0.1234, False), # Uses sampling and priority from newrelic header. + (None, True, "always_on", "default", True, 2, False), # Always sampled. + (None, True, "always_off", "default", False, 0, False), # Never sampled. + (None, False, "default", "default", False, 0.1234, False), # Uses sampling and priority from newrelic header. + (None, False, "always_on", "default", False, 0.1234, False), # Uses sampling and priority from newrelic header. + (None, True, "default", "always_on", True, 0.1234, False), # Uses sampling and priority from newrelic header. + (None, False, "default", "always_on", True, 2, False), # Always sampled. + (None, False, "default", "always_off", False, 0, False), # Never sampled. + (None, None, "default", "default", None, None, True), # Uses adaptive sampling algo. + ), +) +def test_distributed_trace_remote_parent_sampling_decision_partial_granularity( + traceparent_sampled, + newrelic_sampled, + remote_parent_sampled_setting, + remote_parent_not_sampled_setting, + expected_sampled, + expected_priority, + expected_adaptive_sampling_algo_called, +): + required_intrinsics = [] + if expected_sampled is not None: + required_intrinsics.append(Attribute(name="sampled", value=expected_sampled, destinations=0b110)) + if expected_priority is not None: + required_intrinsics.append(Attribute(name="priority", value=expected_priority, destinations=0b110)) + + test_settings = _override_settings.copy() + test_settings.update( + { + "distributed_tracing.sampler.full_granularity.enabled": False, + "distributed_tracing.sampler.partial_granularity.enabled": True, + "distributed_tracing.sampler.partial_granularity.remote_parent_sampled": remote_parent_sampled_setting, + "distributed_tracing.sampler.partial_granularity.remote_parent_not_sampled": remote_parent_not_sampled_setting, "span_events.enabled": True, } ) @@ -506,3 +715,320 @@ def _test(): accept_distributed_trace_headers(headers) _test() + + +@pytest.mark.parametrize( + "full_granularity_enabled,full_granularity_remote_parent_sampled_setting,partial_granularity_enabled,partial_granularity_remote_parent_sampled_setting,expected_sampled,expected_priority,expected_adaptive_sampling_algo_called", + ( + (True, "always_off", True, "adaptive", None, None, True), # Uses adaptive sampling algo. + (True, "always_on", True, "adaptive", True, 2, False), # Uses adaptive sampling algo. + (False, "always_on", False, "adaptive", False, 0, False), # Uses adaptive sampling algo. + ), +) +def test_distributed_trace_remote_parent_sampling_decision_between_full_and_partial_granularity( + full_granularity_enabled, + full_granularity_remote_parent_sampled_setting, + partial_granularity_enabled, + partial_granularity_remote_parent_sampled_setting, + expected_sampled, + expected_priority, + expected_adaptive_sampling_algo_called, +): + required_intrinsics = [] + if expected_sampled is not None: + required_intrinsics.append(Attribute(name="sampled", value=expected_sampled, destinations=0b110)) + if expected_priority is not None: + required_intrinsics.append(Attribute(name="priority", value=expected_priority, destinations=0b110)) + + test_settings = _override_settings.copy() + test_settings.update( + { + "distributed_tracing.sampler.full_granularity.enabled": full_granularity_enabled, + "distributed_tracing.sampler.partial_granularity.enabled": partial_granularity_enabled, + "distributed_tracing.sampler.full_granularity.remote_parent_sampled": full_granularity_remote_parent_sampled_setting, + "distributed_tracing.sampler.partial_granularity.remote_parent_sampled": partial_granularity_remote_parent_sampled_setting, + "span_events.enabled": True, + } + ) + if expected_adaptive_sampling_algo_called: + function_called_decorator = validate_function_called( + "newrelic.core.adaptive_sampler", "AdaptiveSampler.compute_sampled" + ) + else: + function_called_decorator = validate_function_not_called( + "newrelic.core.adaptive_sampler", "AdaptiveSampler.compute_sampled" + ) + + @function_called_decorator + @override_application_settings(test_settings) + @validate_attributes_complete("intrinsic", required_intrinsics) + @background_task(name="test_distributed_trace_attributes") + def _test(): + headers = {"traceparent": "00-0af7651916cd43dd8448eb211c80319c-00f067aa0ba902b7-01"} + accept_distributed_trace_headers(headers) + + _test() + + +def test_partial_granularity_max_compressed_spans(): + """ + Tests `nr.ids` does not exceed 1024 byte limit. + """ + + async def test(index): + with ExternalTrace("requests", "http://localhost:3000/", method="GET") as trace: + time.sleep(0.1) + + @function_trace() + async def call_tests(): + tasks = [test(i) for i in range(65)] + await asyncio.gather(*tasks) + + @validate_span_events( + count=1, # Entry span. + exact_intrinsics={ + "name": "Function/test_distributed_tracing:test_partial_granularity_max_compressed_spans.._test" + }, + expected_intrinsics=["duration", "timestamp"], + ) + @validate_span_events( + count=1, # 1 external compressed span. + exact_intrinsics={"name": "External/localhost:3000/requests/GET"}, + exact_agents={"http.url": "http://localhost:3000/"}, + expected_agents=["nr.durations", "nr.ids"], + ) + @validate_compact_span_event( + name="External/localhost:3000/requests/GET", + # `nr.ids` can only hold 63 ids but duration reflects all compressed spans. + compressed_span_count=64, + expected_nr_durations_low_bound=6.5, + expected_nr_durations_high_bound=6.8, # 64 of these adds > .2 overhead. + ) + @background_task() + def _test(): + headers = {"traceparent": "00-0af7651916cd43dd8448eb211c80319c-00f067aa0ba902b7-01"} + accept_distributed_trace_headers(headers) + asyncio.run(call_tests()) + + _test = override_application_settings( + { + "distributed_tracing.sampler.full_granularity.enabled": False, + "distributed_tracing.sampler.partial_granularity.enabled": True, + "distributed_tracing.sampler.partial_granularity.type": "compact", + "distributed_tracing.sampler.partial_granularity.remote_parent_sampled": "always_on", + "span_events.enabled": True, + } + )(_test) + + _test() + + +def test_partial_granularity_compressed_span_attributes_in_series(): + """ + Tests compressed span attributes when compressed span times are serial. + Aka: each span ends before the next compressed span begins. + """ + + async def test(index): + with ExternalTrace("requests", "http://localhost:3000/", method="GET") as trace: + time.sleep(0.1) + + @function_trace() + async def call_tests(): + tasks = [test(i) for i in range(3)] + await asyncio.gather(*tasks) + + @validate_span_events( + count=1, # Entry span. + exact_intrinsics={ + "name": "Function/test_distributed_tracing:test_partial_granularity_compressed_span_attributes_in_series.._test" + }, + expected_intrinsics=["duration", "timestamp"], + ) + @validate_span_events( + count=1, # 1 external compressed span. + exact_intrinsics={"name": "External/localhost:3000/requests/GET"}, + exact_agents={"http.url": "http://localhost:3000/"}, + expected_agents=["nr.durations", "nr.ids"], + ) + @validate_compact_span_event( + name="External/localhost:3000/requests/GET", + compressed_span_count=3, + expected_nr_durations_low_bound=0.3, + expected_nr_durations_high_bound=0.4, + ) + @background_task() + def _test(): + headers = {"traceparent": "00-0af7651916cd43dd8448eb211c80319c-00f067aa0ba902b7-01"} + accept_distributed_trace_headers(headers) + asyncio.run(call_tests()) + + _test = override_application_settings( + { + "distributed_tracing.sampler.full_granularity.enabled": False, + "distributed_tracing.sampler.partial_granularity.enabled": True, + "distributed_tracing.sampler.partial_granularity.type": "compact", + "distributed_tracing.sampler.partial_granularity.remote_parent_sampled": "always_on", + "span_events.enabled": True, + } + )(_test) + + _test() + + +def test_partial_granularity_compressed_span_attributes_overlapping(): + """ + Tests compressed span attributes when compressed span times overlap. + Aka: the next span begins in the middle of the first span. + """ + + @validate_span_events( + count=1, # Entry span. + exact_intrinsics={ + "name": "Function/test_distributed_tracing:test_partial_granularity_compressed_span_attributes_overlapping.._test" + }, + expected_intrinsics=["duration", "timestamp"], + ) + @validate_span_events( + count=1, # 1 external compressed span. + exact_intrinsics={"name": "External/localhost:3000/requests/GET"}, + exact_agents={"http.url": "http://localhost:3000/"}, + expected_agents=["nr.durations", "nr.ids"], + ) + @validate_compact_span_event( + name="External/localhost:3000/requests/GET", + compressed_span_count=2, + expected_nr_durations_low_bound=0.1, + expected_nr_durations_high_bound=0.2, + ) + @background_task() + def _test(): + headers = {"traceparent": "00-0af7651916cd43dd8448eb211c80319c-00f067aa0ba902b7-01"} + accept_distributed_trace_headers(headers) + with ExternalTrace("requests", "http://localhost:3000/", method="GET") as trace1: + # Override terminal_node so we can create a nested exit span. + trace1.terminal_node = lambda: False + trace2 = ExternalTrace("requests", "http://localhost:3000/", method="GET") + trace2.__enter__() + time.sleep(0.1) + trace2.__exit__(None, None, None) + + _test = override_application_settings( + { + "distributed_tracing.sampler.full_granularity.enabled": False, + "distributed_tracing.sampler.partial_granularity.enabled": True, + "distributed_tracing.sampler.partial_granularity.type": "compact", + "distributed_tracing.sampler.partial_granularity.remote_parent_sampled": "always_on", + "span_events.enabled": True, + } + )(_test) + + _test() + + +def test_partial_granularity_reduced_span_attributes(): + """ + In reduced mode, only inprocess spans are dropped. + """ + + @function_trace() + def foo(): + with ExternalTrace("requests", "http://localhost:3000/", method="GET") as trace: + trace.add_custom_attribute("custom", "bar") + + @validate_span_events( + count=1, # Entry span. + exact_intrinsics={ + "name": "Function/test_distributed_tracing:test_partial_granularity_reduced_span_attributes.._test" + }, + expected_intrinsics=["duration", "timestamp"], + expected_agents=["code.function", "code.lineno", "code.namespace"], + ) + @validate_span_events( + count=0, # Function foo span should not be present. + exact_intrinsics={ + "name": "Function/test_distributed_tracing:test_partial_granularity_reduced_span_attributes..foo" + }, + expected_intrinsics=["duration", "timestamp"], + ) + @validate_span_events( + count=2, # 2 external spans. + exact_intrinsics={"name": "External/localhost:3000/requests/GET"}, + exact_agents={"http.url": "http://localhost:3000/"}, + exact_users={"custom": "bar"}, + ) + @background_task() + def _test(): + headers = {"traceparent": "00-0af7651916cd43dd8448eb211c80319c-00f067aa0ba902b7-01"} + accept_distributed_trace_headers(headers) + with ExternalTrace("requests", "http://localhost:3000/", method="GET") as trace: + # Override terminal_node so we can create a nested exit span. + trace.terminal_node = lambda: False + trace.add_custom_attribute("custom", "bar") + foo() + + _test = override_application_settings( + { + "distributed_tracing.sampler.full_granularity.enabled": False, + "distributed_tracing.sampler.partial_granularity.enabled": True, + "distributed_tracing.sampler.partial_granularity.type": "reduced", + "distributed_tracing.sampler.partial_granularity.remote_parent_sampled": "always_on", + "span_events.enabled": True, + } + )(_test) + + _test() + + +def test_partial_granularity_essential_span_attributes(): + """ + In essential mode, inprocess spans are dropped and non-entity synthesis attributes. + """ + + @function_trace() + def foo(): + with ExternalTrace("requests", "http://localhost:3000/", method="GET") as trace: + trace.add_custom_attribute("custom", "bar") + + @validate_span_events( + count=1, # Entry span. + exact_intrinsics={ + "name": "Function/test_distributed_tracing:test_partial_granularity_essential_span_attributes.._test" + }, + expected_intrinsics=["duration", "timestamp"], + unexpected_agents=["code.function", "code.lineno", "code.namespace"], + ) + @validate_span_events( + count=0, # Function foo span should not be present. + exact_intrinsics={ + "name": "Function/test_distributed_tracing:test_partial_granularity_essential_span_attributes..foo" + }, + expected_intrinsics=["duration", "timestamp"], + ) + @validate_span_events( + count=2, # 2 external spans. + exact_intrinsics={"name": "External/localhost:3000/requests/GET"}, + exact_agents={"http.url": "http://localhost:3000/"}, + unexpected_users=["custom"], + ) + @background_task() + def _test(): + headers = {"traceparent": "00-0af7651916cd43dd8448eb211c80319c-00f067aa0ba902b7-01"} + accept_distributed_trace_headers(headers) + with ExternalTrace("requests", "http://localhost:3000/", method="GET") as trace: + # Override terminal_node so we can create a nested exit span. + trace.terminal_node = lambda: False + trace.add_custom_attribute("custom", "bar") + foo() + + _test = override_application_settings( + { + "distributed_tracing.sampler.full_granularity.enabled": False, + "distributed_tracing.sampler.partial_granularity.enabled": True, + "distributed_tracing.sampler.partial_granularity.type": "essential", + "distributed_tracing.sampler.partial_granularity.remote_parent_sampled": "always_on", + "span_events.enabled": True, + } + )(_test) + + _test() diff --git a/tests/agent_unittests/test_distributed_tracing_settings.py b/tests/agent_unittests/test_distributed_tracing_settings.py index a1c99da58d..3668cfbe32 100644 --- a/tests/agent_unittests/test_distributed_tracing_settings.py +++ b/tests/agent_unittests/test_distributed_tracing_settings.py @@ -14,6 +14,8 @@ import pytest +from newrelic.core.config import finalize_application_settings + INI_FILE_EMPTY = b""" [newrelic] """ @@ -30,3 +32,35 @@ def test_distributed_trace_setings(ini, env, expected_format, global_settings): settings = global_settings() assert settings.distributed_tracing.exclude_newrelic_header == expected_format + + +@pytest.mark.parametrize( + "ini,env", + ( + ( + INI_FILE_EMPTY, + { + "NEW_RELIC_ENABLED": "true", + "NEW_RELIC_DISTRIBUTED_TRACING_SAMPLER_REMOTE_PARENT_SAMPLED": "default", + "NEW_RELIC_DISTRIBUTED_TRACING_SAMPLER_REMOTE_PARENT_NOT_SAMPLED": "default", + "NEW_RELIC_DISTRIBUTED_TRACING_SAMPLER_FULL_GRANULARITY_REMOTE_PARENT_SAMPLED": "always_on", + "NEW_RELIC_DISTRIBUTED_TRACING_SAMPLER_FULL_GRANULARITY_REMOTE_PARENT_NOT_SAMPLED": "always_off", + }, + ), + ( + INI_FILE_EMPTY, + { + "NEW_RELIC_ENABLED": "true", + "NEW_RELIC_DISTRIBUTED_TRACING_SAMPLER_REMOTE_PARENT_SAMPLED": "always_on", + "NEW_RELIC_DISTRIBUTED_TRACING_SAMPLER_REMOTE_PARENT_NOT_SAMPLED": "always_off", + }, + ), + ), +) +def test_full_granularity_precedence(ini, env, global_settings): + settings = global_settings() + + app_settings = finalize_application_settings(settings=settings) + + assert app_settings.distributed_tracing.sampler.full_granularity.remote_parent_sampled == "always_on" + assert app_settings.distributed_tracing.sampler.full_granularity.remote_parent_not_sampled == "always_off" diff --git a/tests/agent_unittests/test_harvest_loop.py b/tests/agent_unittests/test_harvest_loop.py index 9717e956ba..8447b18eb5 100644 --- a/tests/agent_unittests/test_harvest_loop.py +++ b/tests/agent_unittests/test_harvest_loop.py @@ -166,6 +166,7 @@ def transaction_node(request): root_span_guid=None, trace_id="4485b89db608aece", loop_time=0.0, + partial_granularity_sampled=False, ) return node diff --git a/tests/agent_unittests/test_infinite_trace_settings.py b/tests/agent_unittests/test_infinite_trace_settings.py index 4b47a72398..31c8e6819e 100644 --- a/tests/agent_unittests/test_infinite_trace_settings.py +++ b/tests/agent_unittests/test_infinite_trace_settings.py @@ -14,6 +14,8 @@ import pytest +from newrelic.core.config import finalize_application_settings + INI_FILE_EMPTY = b""" [newrelic] """ @@ -77,3 +79,18 @@ def test_infinite_tracing_port(ini, env, expected_port, global_settings): def test_infinite_tracing_span_queue_size(ini, env, expected_size, global_settings): settings = global_settings() assert settings.infinite_tracing.span_queue_size == expected_size + + +@pytest.mark.parametrize( + "ini,env", + ((INI_FILE_INFINITE_TRACING, {"NEW_RELIC_DISTRIBUTED_TRACING_SAMPLER_PARTIAL_GRANULARITY_ENABLED": "true"}),), +) +def test_partial_granularity_dissabled_when_infinite_tracing_enabled(ini, env, global_settings): + settings = global_settings() + assert settings.distributed_tracing.sampler.partial_granularity.enabled + assert settings.infinite_tracing.enabled + + app_settings = finalize_application_settings(settings=settings) + + assert not app_settings.distributed_tracing.sampler.partial_granularity.enabled + assert app_settings.infinite_tracing.enabled diff --git a/tests/datastore_aiomysql/test_database.py b/tests/datastore_aiomysql/test_database.py index 20d1a48586..8cc386cfe1 100644 --- a/tests/datastore_aiomysql/test_database.py +++ b/tests/datastore_aiomysql/test_database.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +import inspect + import aiomysql from testing_support.db_settings import mysql_settings from testing_support.util import instance_hostname @@ -150,3 +152,35 @@ async def _test(): await pool.wait_closed() loop.run_until_complete(_test()) + + +@background_task() +def test_connection_pool_no_double_wrap(loop): + async def _test(): + pool = await aiomysql.create_pool( + db=DB_SETTINGS["name"], + user=DB_SETTINGS["user"], + password=DB_SETTINGS["password"], + host=DB_SETTINGS["host"], + port=DB_SETTINGS["port"], + loop=loop, + ) + + # Retrieve the same connection from the pool twice to see if it gets double wrapped + async with pool.acquire() as first_connection: + first_connection_unwrapped = inspect.unwrap(first_connection) + async with pool.acquire() as second_connection: + second_connection_unwrapped = inspect.unwrap(second_connection) + + # Ensure we actually retrieved the same underlying connection object from the pool twice + assert first_connection_unwrapped is second_connection_unwrapped, "Did not get same connection from pool" + + # Check that wrapping occurred only once + assert hasattr(first_connection, "__wrapped__"), "first_connection object was not wrapped" + assert hasattr(second_connection, "__wrapped__"), "second_connection object was not wrapped" + assert not hasattr(second_connection.__wrapped__, "__wrapped__"), "second_connection was double wrapped" + + pool.close() + await pool.wait_closed() + + loop.run_until_complete(_test()) diff --git a/tests/external_botocore/test_boto3_kinesis.py b/tests/external_botocore/test_boto3_kinesis.py index 9c03fa154a..9c92c669aa 100644 --- a/tests/external_botocore/test_boto3_kinesis.py +++ b/tests/external_botocore/test_boto3_kinesis.py @@ -46,6 +46,8 @@ } } +UNINSTRUMENTED_KINESIS_METHODS = ("generate_presigned_url", "close", "get_waiter", "can_paginate", "get_paginator") + _kinesis_scoped_metrics = [ (f"MessageBroker/Kinesis/Stream/Produce/Named/{TEST_STREAM}", 2), (f"MessageBroker/Kinesis/Stream/Consume/Named/{TEST_STREAM}", 1), @@ -117,10 +119,7 @@ def test_instrumented_kinesis_methods(): region_name=AWS_REGION, ) - ignored_methods = { - ("kinesis", method) - for method in ("generate_presigned_url", "close", "get_waiter", "can_paginate", "get_paginator") - } + ignored_methods = {("kinesis", method) for method in UNINSTRUMENTED_KINESIS_METHODS} client_methods = inspect.getmembers(client, predicate=inspect.ismethod) methods = {("kinesis", name) for (name, method) in client_methods if not name.startswith("_")} diff --git a/tests/logger_structlog/test_attributes.py b/tests/logger_structlog/test_attributes.py index c41591f192..f76821cd4a 100644 --- a/tests/logger_structlog/test_attributes.py +++ b/tests/logger_structlog/test_attributes.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +import sys + import pytest from testing_support.validators.validate_log_event_count import validate_log_event_count from testing_support.validators.validate_log_events import validate_log_events @@ -23,12 +25,18 @@ def logger(structlog_caplog): import structlog + # For Python < 3.11 co_qualname does not exist and causes errors. + # Remove it from the CallsiteParameterAdder input list. + _callsite_params = set(structlog.processors.CallsiteParameter) + if sys.version_info < (3, 11) and hasattr(structlog.processors.CallsiteParameter, "QUAL_NAME"): + _callsite_params.remove(structlog.processors.CallsiteParameter.QUAL_NAME) + structlog.configure( processors=[ structlog.contextvars.merge_contextvars, structlog.processors.format_exc_info, structlog.processors.StackInfoRenderer(), - structlog.processors.CallsiteParameterAdder(), + structlog.processors.CallsiteParameterAdder(parameters=_callsite_params), ], logger_factory=lambda *args, **kwargs: structlog_caplog, ) diff --git a/tests/mlmodel_openai/test_chat_completion_v1.py b/tests/mlmodel_openai/test_chat_completion_v1.py index 817db35d8e..969e4233bf 100644 --- a/tests/mlmodel_openai/test_chat_completion_v1.py +++ b/tests/mlmodel_openai/test_chat_completion_v1.py @@ -11,9 +11,14 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - import openai -from testing_support.fixtures import override_llm_token_callback_settings, reset_core_stats_engine, validate_attributes +import pytest +from testing_support.fixtures import ( + override_application_settings, + override_llm_token_callback_settings, + reset_core_stats_engine, + validate_attributes, +) from testing_support.ml_testing_utils import ( add_token_count_to_events, disabled_ai_monitoring_record_content_settings, @@ -31,7 +36,7 @@ from newrelic.api.background_task import background_task from newrelic.api.llm_custom_attributes import WithLlmCustomAttributes -from newrelic.api.transaction import add_custom_attribute +from newrelic.api.transaction import accept_distributed_trace_headers, add_custom_attribute _test_openai_chat_completion_messages = ( {"role": "system", "content": "You are a scientist."}, @@ -387,6 +392,46 @@ def test_openai_chat_completion_async_with_llm_metadata_no_content(loop, set_tra ) +@pytest.mark.parametrize("partial_granularity_type", ("reduced", "essential", "compact")) +def test_openai_chat_completion_async_in_txn_with_token_count_partial_granularity_dt( + partial_granularity_type, set_trace_info, loop, async_openai_client +): + @reset_core_stats_engine() + @disabled_ai_monitoring_record_content_settings + @validate_custom_events(events_sans_content(chat_completion_recorded_events)) + @validate_custom_event_count(count=4) + @validate_transaction_metrics( + "test_chat_completion_v1:test_openai_chat_completion_async_in_txn_with_token_count_partial_granularity_dt.._test", + scoped_metrics=[("Llm/completion/OpenAI/create", 1)], + rollup_metrics=[("Llm/completion/OpenAI/create", 1)], + custom_metrics=[(f"Supportability/Python/ML/OpenAI/{openai.__version__}", 1)], + background_task=True, + ) + @validate_attributes("agent", ["llm"]) + @override_application_settings( + { + "distributed_tracing.sampler.full_granularity.enabled": False, + "distributed_tracing.sampler.partial_granularity.enabled": True, + "distributed_tracing.sampler.partial_granularity.type": partial_granularity_type, + "distributed_tracing.sampler.partial_granularity.remote_parent_sampled": "always_on", + "span_events.enabled": True, + } + ) + @background_task() + def _test(): + add_custom_attribute("llm.conversation_id", "my-awesome-id") + add_custom_attribute("llm.foo", "bar") + accept_distributed_trace_headers({"traceparent": "00-0af7651916cd43dd8448eb211c80319c-00f067aa0ba902b7-01"}) + set_trace_info() + loop.run_until_complete( + async_openai_client.chat.completions.create( + model="gpt-3.5-turbo", messages=_test_openai_chat_completion_messages, temperature=0.7, max_tokens=100 + ) + ) + + _test() + + @reset_core_stats_engine() @override_llm_token_callback_settings(llm_token_count_callback) @validate_custom_events(add_token_count_to_events(chat_completion_recorded_events))