diff --git a/.github/workflows/core_contrib_test_0.yml b/.github/workflows/core_contrib_test_0.yml
index 67bda629ff..11c28aad89 100644
--- a/.github/workflows/core_contrib_test_0.yml
+++ b/.github/workflows/core_contrib_test_0.yml
@@ -63,6 +63,50 @@ jobs:
- name: Run tests
run: tox -e py38-test-instrumentation-openai-v2-1 -- -ra
+ py38-test-instrumentation-vertexai-v2-0:
+ name: instrumentation-vertexai-v2-0
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout contrib repo @ SHA - ${{ env.CONTRIB_REPO_SHA }}
+ uses: actions/checkout@v4
+ with:
+ repository: open-telemetry/opentelemetry-python-contrib
+ ref: ${{ env.CONTRIB_REPO_SHA }}
+
+ - name: Set up Python 3.8
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.8"
+ architecture: "x64"
+
+ - name: Install tox
+ run: pip install tox
+
+ - name: Run tests
+ run: tox -e py38-test-instrumentation-vertexai-v2-0 -- -ra
+
+ py38-test-instrumentation-vertexai-v2-1:
+ name: instrumentation-vertexai-v2-1
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout contrib repo @ SHA - ${{ env.CONTRIB_REPO_SHA }}
+ uses: actions/checkout@v4
+ with:
+ repository: open-telemetry/opentelemetry-python-contrib
+ ref: ${{ env.CONTRIB_REPO_SHA }}
+
+ - name: Set up Python 3.8
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.8"
+ architecture: "x64"
+
+ - name: Install tox
+ run: pip install tox
+
+ - name: Run tests
+ run: tox -e py38-test-instrumentation-vertexai-v2-1 -- -ra
+
py38-test-resource-detector-container:
name: resource-detector-container
runs-on: ubuntu-latest
diff --git a/.github/workflows/lint_0.yml b/.github/workflows/lint_0.yml
index 9d77ef5e27..bd0f083dcc 100644
--- a/.github/workflows/lint_0.yml
+++ b/.github/workflows/lint_0.yml
@@ -34,6 +34,24 @@ jobs:
- name: Run tests
run: tox -e lint-instrumentation-openai-v2
+ lint-instrumentation-vertexai-v2:
+ name: instrumentation-vertexai-v2
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.12
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.12"
+
+ - name: Install tox
+ run: pip install tox
+
+ - name: Run tests
+ run: tox -e lint-instrumentation-vertexai-v2
+
lint-resource-detector-container:
name: resource-detector-container
runs-on: ubuntu-latest
diff --git a/.github/workflows/test_0.yml b/.github/workflows/test_0.yml
index 47c9a19cf3..bdf4bd5a7c 100644
--- a/.github/workflows/test_0.yml
+++ b/.github/workflows/test_0.yml
@@ -232,6 +232,186 @@ jobs:
- name: Run tests
run: tox -e pypy3-test-instrumentation-openai-v2-1 -- -ra
+ py38-test-instrumentation-vertexai-v2-0_ubuntu-latest:
+ name: instrumentation-vertexai-v2-0 3.8 Ubuntu
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.8
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.8"
+
+ - name: Install tox
+ run: pip install tox
+
+ - name: Run tests
+ run: tox -e py38-test-instrumentation-vertexai-v2-0 -- -ra
+
+ py38-test-instrumentation-vertexai-v2-1_ubuntu-latest:
+ name: instrumentation-vertexai-v2-1 3.8 Ubuntu
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.8
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.8"
+
+ - name: Install tox
+ run: pip install tox
+
+ - name: Run tests
+ run: tox -e py38-test-instrumentation-vertexai-v2-1 -- -ra
+
+ py39-test-instrumentation-vertexai-v2-0_ubuntu-latest:
+ name: instrumentation-vertexai-v2-0 3.9 Ubuntu
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.9
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.9"
+
+ - name: Install tox
+ run: pip install tox
+
+ - name: Run tests
+ run: tox -e py39-test-instrumentation-vertexai-v2-0 -- -ra
+
+ py39-test-instrumentation-vertexai-v2-1_ubuntu-latest:
+ name: instrumentation-vertexai-v2-1 3.9 Ubuntu
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.9
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.9"
+
+ - name: Install tox
+ run: pip install tox
+
+ - name: Run tests
+ run: tox -e py39-test-instrumentation-vertexai-v2-1 -- -ra
+
+ py310-test-instrumentation-vertexai-v2-0_ubuntu-latest:
+ name: instrumentation-vertexai-v2-0 3.10 Ubuntu
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.10
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.10"
+
+ - name: Install tox
+ run: pip install tox
+
+ - name: Run tests
+ run: tox -e py310-test-instrumentation-vertexai-v2-0 -- -ra
+
+ py310-test-instrumentation-vertexai-v2-1_ubuntu-latest:
+ name: instrumentation-vertexai-v2-1 3.10 Ubuntu
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.10
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.10"
+
+ - name: Install tox
+ run: pip install tox
+
+ - name: Run tests
+ run: tox -e py310-test-instrumentation-vertexai-v2-1 -- -ra
+
+ py311-test-instrumentation-vertexai-v2-0_ubuntu-latest:
+ name: instrumentation-vertexai-v2-0 3.11 Ubuntu
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.11
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.11"
+
+ - name: Install tox
+ run: pip install tox
+
+ - name: Run tests
+ run: tox -e py311-test-instrumentation-vertexai-v2-0 -- -ra
+
+ py311-test-instrumentation-vertexai-v2-1_ubuntu-latest:
+ name: instrumentation-vertexai-v2-1 3.11 Ubuntu
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.11
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.11"
+
+ - name: Install tox
+ run: pip install tox
+
+ - name: Run tests
+ run: tox -e py311-test-instrumentation-vertexai-v2-1 -- -ra
+
+ py312-test-instrumentation-vertexai-v2-0_ubuntu-latest:
+ name: instrumentation-vertexai-v2-0 3.12 Ubuntu
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.12
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.12"
+
+ - name: Install tox
+ run: pip install tox
+
+ - name: Run tests
+ run: tox -e py312-test-instrumentation-vertexai-v2-0 -- -ra
+
+ py312-test-instrumentation-vertexai-v2-1_ubuntu-latest:
+ name: instrumentation-vertexai-v2-1 3.12 Ubuntu
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.12
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.12"
+
+ - name: Install tox
+ run: pip install tox
+
+ - name: Run tests
+ run: tox -e py312-test-instrumentation-vertexai-v2-1 -- -ra
+
py38-test-resource-detector-container_ubuntu-latest:
name: resource-detector-container 3.8 Ubuntu
runs-on: ubuntu-latest
@@ -4335,183 +4515,3 @@ jobs:
- name: Run tests
run: tox -e py38-test-instrumentation-mysqlclient -- -ra
-
- py39-test-instrumentation-mysqlclient_ubuntu-latest:
- name: instrumentation-mysqlclient 3.9 Ubuntu
- runs-on: ubuntu-latest
- steps:
- - name: Checkout repo @ SHA - ${{ github.sha }}
- uses: actions/checkout@v4
-
- - name: Set up Python 3.9
- uses: actions/setup-python@v5
- with:
- python-version: "3.9"
-
- - name: Install tox
- run: pip install tox
-
- - name: Run tests
- run: tox -e py39-test-instrumentation-mysqlclient -- -ra
-
- py310-test-instrumentation-mysqlclient_ubuntu-latest:
- name: instrumentation-mysqlclient 3.10 Ubuntu
- runs-on: ubuntu-latest
- steps:
- - name: Checkout repo @ SHA - ${{ github.sha }}
- uses: actions/checkout@v4
-
- - name: Set up Python 3.10
- uses: actions/setup-python@v5
- with:
- python-version: "3.10"
-
- - name: Install tox
- run: pip install tox
-
- - name: Run tests
- run: tox -e py310-test-instrumentation-mysqlclient -- -ra
-
- py311-test-instrumentation-mysqlclient_ubuntu-latest:
- name: instrumentation-mysqlclient 3.11 Ubuntu
- runs-on: ubuntu-latest
- steps:
- - name: Checkout repo @ SHA - ${{ github.sha }}
- uses: actions/checkout@v4
-
- - name: Set up Python 3.11
- uses: actions/setup-python@v5
- with:
- python-version: "3.11"
-
- - name: Install tox
- run: pip install tox
-
- - name: Run tests
- run: tox -e py311-test-instrumentation-mysqlclient -- -ra
-
- py312-test-instrumentation-mysqlclient_ubuntu-latest:
- name: instrumentation-mysqlclient 3.12 Ubuntu
- runs-on: ubuntu-latest
- steps:
- - name: Checkout repo @ SHA - ${{ github.sha }}
- uses: actions/checkout@v4
-
- - name: Set up Python 3.12
- uses: actions/setup-python@v5
- with:
- python-version: "3.12"
-
- - name: Install tox
- run: pip install tox
-
- - name: Run tests
- run: tox -e py312-test-instrumentation-mysqlclient -- -ra
-
- pypy3-test-instrumentation-mysqlclient_ubuntu-latest:
- name: instrumentation-mysqlclient pypy-3.8 Ubuntu
- runs-on: ubuntu-latest
- steps:
- - name: Checkout repo @ SHA - ${{ github.sha }}
- uses: actions/checkout@v4
-
- - name: Set up Python pypy-3.8
- uses: actions/setup-python@v5
- with:
- python-version: "pypy-3.8"
-
- - name: Install tox
- run: pip install tox
-
- - name: Run tests
- run: tox -e pypy3-test-instrumentation-mysqlclient -- -ra
-
- py38-test-instrumentation-psycopg2_ubuntu-latest:
- name: instrumentation-psycopg2 3.8 Ubuntu
- runs-on: ubuntu-latest
- steps:
- - name: Checkout repo @ SHA - ${{ github.sha }}
- uses: actions/checkout@v4
-
- - name: Set up Python 3.8
- uses: actions/setup-python@v5
- with:
- python-version: "3.8"
-
- - name: Install tox
- run: pip install tox
-
- - name: Run tests
- run: tox -e py38-test-instrumentation-psycopg2 -- -ra
-
- py39-test-instrumentation-psycopg2_ubuntu-latest:
- name: instrumentation-psycopg2 3.9 Ubuntu
- runs-on: ubuntu-latest
- steps:
- - name: Checkout repo @ SHA - ${{ github.sha }}
- uses: actions/checkout@v4
-
- - name: Set up Python 3.9
- uses: actions/setup-python@v5
- with:
- python-version: "3.9"
-
- - name: Install tox
- run: pip install tox
-
- - name: Run tests
- run: tox -e py39-test-instrumentation-psycopg2 -- -ra
-
- py310-test-instrumentation-psycopg2_ubuntu-latest:
- name: instrumentation-psycopg2 3.10 Ubuntu
- runs-on: ubuntu-latest
- steps:
- - name: Checkout repo @ SHA - ${{ github.sha }}
- uses: actions/checkout@v4
-
- - name: Set up Python 3.10
- uses: actions/setup-python@v5
- with:
- python-version: "3.10"
-
- - name: Install tox
- run: pip install tox
-
- - name: Run tests
- run: tox -e py310-test-instrumentation-psycopg2 -- -ra
-
- py311-test-instrumentation-psycopg2_ubuntu-latest:
- name: instrumentation-psycopg2 3.11 Ubuntu
- runs-on: ubuntu-latest
- steps:
- - name: Checkout repo @ SHA - ${{ github.sha }}
- uses: actions/checkout@v4
-
- - name: Set up Python 3.11
- uses: actions/setup-python@v5
- with:
- python-version: "3.11"
-
- - name: Install tox
- run: pip install tox
-
- - name: Run tests
- run: tox -e py311-test-instrumentation-psycopg2 -- -ra
-
- py312-test-instrumentation-psycopg2_ubuntu-latest:
- name: instrumentation-psycopg2 3.12 Ubuntu
- runs-on: ubuntu-latest
- steps:
- - name: Checkout repo @ SHA - ${{ github.sha }}
- uses: actions/checkout@v4
-
- - name: Set up Python 3.12
- uses: actions/setup-python@v5
- with:
- python-version: "3.12"
-
- - name: Install tox
- run: pip install tox
-
- - name: Run tests
- run: tox -e py312-test-instrumentation-psycopg2 -- -ra
diff --git a/.github/workflows/test_1.yml b/.github/workflows/test_1.yml
index 9c5d48aea3..e6aa293f9d 100644
--- a/.github/workflows/test_1.yml
+++ b/.github/workflows/test_1.yml
@@ -16,6 +16,186 @@ env:
jobs:
+ py39-test-instrumentation-mysqlclient_ubuntu-latest:
+ name: instrumentation-mysqlclient 3.9 Ubuntu
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.9
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.9"
+
+ - name: Install tox
+ run: pip install tox
+
+ - name: Run tests
+ run: tox -e py39-test-instrumentation-mysqlclient -- -ra
+
+ py310-test-instrumentation-mysqlclient_ubuntu-latest:
+ name: instrumentation-mysqlclient 3.10 Ubuntu
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.10
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.10"
+
+ - name: Install tox
+ run: pip install tox
+
+ - name: Run tests
+ run: tox -e py310-test-instrumentation-mysqlclient -- -ra
+
+ py311-test-instrumentation-mysqlclient_ubuntu-latest:
+ name: instrumentation-mysqlclient 3.11 Ubuntu
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.11
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.11"
+
+ - name: Install tox
+ run: pip install tox
+
+ - name: Run tests
+ run: tox -e py311-test-instrumentation-mysqlclient -- -ra
+
+ py312-test-instrumentation-mysqlclient_ubuntu-latest:
+ name: instrumentation-mysqlclient 3.12 Ubuntu
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.12
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.12"
+
+ - name: Install tox
+ run: pip install tox
+
+ - name: Run tests
+ run: tox -e py312-test-instrumentation-mysqlclient -- -ra
+
+ pypy3-test-instrumentation-mysqlclient_ubuntu-latest:
+ name: instrumentation-mysqlclient pypy-3.8 Ubuntu
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python pypy-3.8
+ uses: actions/setup-python@v5
+ with:
+ python-version: "pypy-3.8"
+
+ - name: Install tox
+ run: pip install tox
+
+ - name: Run tests
+ run: tox -e pypy3-test-instrumentation-mysqlclient -- -ra
+
+ py38-test-instrumentation-psycopg2_ubuntu-latest:
+ name: instrumentation-psycopg2 3.8 Ubuntu
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.8
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.8"
+
+ - name: Install tox
+ run: pip install tox
+
+ - name: Run tests
+ run: tox -e py38-test-instrumentation-psycopg2 -- -ra
+
+ py39-test-instrumentation-psycopg2_ubuntu-latest:
+ name: instrumentation-psycopg2 3.9 Ubuntu
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.9
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.9"
+
+ - name: Install tox
+ run: pip install tox
+
+ - name: Run tests
+ run: tox -e py39-test-instrumentation-psycopg2 -- -ra
+
+ py310-test-instrumentation-psycopg2_ubuntu-latest:
+ name: instrumentation-psycopg2 3.10 Ubuntu
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.10
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.10"
+
+ - name: Install tox
+ run: pip install tox
+
+ - name: Run tests
+ run: tox -e py310-test-instrumentation-psycopg2 -- -ra
+
+ py311-test-instrumentation-psycopg2_ubuntu-latest:
+ name: instrumentation-psycopg2 3.11 Ubuntu
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.11
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.11"
+
+ - name: Install tox
+ run: pip install tox
+
+ - name: Run tests
+ run: tox -e py311-test-instrumentation-psycopg2 -- -ra
+
+ py312-test-instrumentation-psycopg2_ubuntu-latest:
+ name: instrumentation-psycopg2 3.12 Ubuntu
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.12
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.12"
+
+ - name: Install tox
+ run: pip install tox
+
+ - name: Run tests
+ run: tox -e py312-test-instrumentation-psycopg2 -- -ra
+
py38-test-instrumentation-psycopg_ubuntu-latest:
name: instrumentation-psycopg 3.8 Ubuntu
runs-on: ubuntu-latest
@@ -4335,183 +4515,3 @@ jobs:
- name: Run tests
run: tox -e py312-test-instrumentation-asyncio -- -ra
-
- py38-test-instrumentation-cassandra_ubuntu-latest:
- name: instrumentation-cassandra 3.8 Ubuntu
- runs-on: ubuntu-latest
- steps:
- - name: Checkout repo @ SHA - ${{ github.sha }}
- uses: actions/checkout@v4
-
- - name: Set up Python 3.8
- uses: actions/setup-python@v5
- with:
- python-version: "3.8"
-
- - name: Install tox
- run: pip install tox
-
- - name: Run tests
- run: tox -e py38-test-instrumentation-cassandra -- -ra
-
- py39-test-instrumentation-cassandra_ubuntu-latest:
- name: instrumentation-cassandra 3.9 Ubuntu
- runs-on: ubuntu-latest
- steps:
- - name: Checkout repo @ SHA - ${{ github.sha }}
- uses: actions/checkout@v4
-
- - name: Set up Python 3.9
- uses: actions/setup-python@v5
- with:
- python-version: "3.9"
-
- - name: Install tox
- run: pip install tox
-
- - name: Run tests
- run: tox -e py39-test-instrumentation-cassandra -- -ra
-
- py310-test-instrumentation-cassandra_ubuntu-latest:
- name: instrumentation-cassandra 3.10 Ubuntu
- runs-on: ubuntu-latest
- steps:
- - name: Checkout repo @ SHA - ${{ github.sha }}
- uses: actions/checkout@v4
-
- - name: Set up Python 3.10
- uses: actions/setup-python@v5
- with:
- python-version: "3.10"
-
- - name: Install tox
- run: pip install tox
-
- - name: Run tests
- run: tox -e py310-test-instrumentation-cassandra -- -ra
-
- py311-test-instrumentation-cassandra_ubuntu-latest:
- name: instrumentation-cassandra 3.11 Ubuntu
- runs-on: ubuntu-latest
- steps:
- - name: Checkout repo @ SHA - ${{ github.sha }}
- uses: actions/checkout@v4
-
- - name: Set up Python 3.11
- uses: actions/setup-python@v5
- with:
- python-version: "3.11"
-
- - name: Install tox
- run: pip install tox
-
- - name: Run tests
- run: tox -e py311-test-instrumentation-cassandra -- -ra
-
- py312-test-instrumentation-cassandra_ubuntu-latest:
- name: instrumentation-cassandra 3.12 Ubuntu
- runs-on: ubuntu-latest
- steps:
- - name: Checkout repo @ SHA - ${{ github.sha }}
- uses: actions/checkout@v4
-
- - name: Set up Python 3.12
- uses: actions/setup-python@v5
- with:
- python-version: "3.12"
-
- - name: Install tox
- run: pip install tox
-
- - name: Run tests
- run: tox -e py312-test-instrumentation-cassandra -- -ra
-
- pypy3-test-instrumentation-cassandra_ubuntu-latest:
- name: instrumentation-cassandra pypy-3.8 Ubuntu
- runs-on: ubuntu-latest
- steps:
- - name: Checkout repo @ SHA - ${{ github.sha }}
- uses: actions/checkout@v4
-
- - name: Set up Python pypy-3.8
- uses: actions/setup-python@v5
- with:
- python-version: "pypy-3.8"
-
- - name: Install tox
- run: pip install tox
-
- - name: Run tests
- run: tox -e pypy3-test-instrumentation-cassandra -- -ra
-
- py38-test-processor-baggage_ubuntu-latest:
- name: processor-baggage 3.8 Ubuntu
- runs-on: ubuntu-latest
- steps:
- - name: Checkout repo @ SHA - ${{ github.sha }}
- uses: actions/checkout@v4
-
- - name: Set up Python 3.8
- uses: actions/setup-python@v5
- with:
- python-version: "3.8"
-
- - name: Install tox
- run: pip install tox
-
- - name: Run tests
- run: tox -e py38-test-processor-baggage -- -ra
-
- py39-test-processor-baggage_ubuntu-latest:
- name: processor-baggage 3.9 Ubuntu
- runs-on: ubuntu-latest
- steps:
- - name: Checkout repo @ SHA - ${{ github.sha }}
- uses: actions/checkout@v4
-
- - name: Set up Python 3.9
- uses: actions/setup-python@v5
- with:
- python-version: "3.9"
-
- - name: Install tox
- run: pip install tox
-
- - name: Run tests
- run: tox -e py39-test-processor-baggage -- -ra
-
- py310-test-processor-baggage_ubuntu-latest:
- name: processor-baggage 3.10 Ubuntu
- runs-on: ubuntu-latest
- steps:
- - name: Checkout repo @ SHA - ${{ github.sha }}
- uses: actions/checkout@v4
-
- - name: Set up Python 3.10
- uses: actions/setup-python@v5
- with:
- python-version: "3.10"
-
- - name: Install tox
- run: pip install tox
-
- - name: Run tests
- run: tox -e py310-test-processor-baggage -- -ra
-
- py311-test-processor-baggage_ubuntu-latest:
- name: processor-baggage 3.11 Ubuntu
- runs-on: ubuntu-latest
- steps:
- - name: Checkout repo @ SHA - ${{ github.sha }}
- uses: actions/checkout@v4
-
- - name: Set up Python 3.11
- uses: actions/setup-python@v5
- with:
- python-version: "3.11"
-
- - name: Install tox
- run: pip install tox
-
- - name: Run tests
- run: tox -e py311-test-processor-baggage -- -ra
diff --git a/.github/workflows/test_2.yml b/.github/workflows/test_2.yml
index c23866ffa8..7614c8988f 100644
--- a/.github/workflows/test_2.yml
+++ b/.github/workflows/test_2.yml
@@ -16,6 +16,186 @@ env:
jobs:
+ py38-test-instrumentation-cassandra_ubuntu-latest:
+ name: instrumentation-cassandra 3.8 Ubuntu
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.8
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.8"
+
+ - name: Install tox
+ run: pip install tox
+
+ - name: Run tests
+ run: tox -e py38-test-instrumentation-cassandra -- -ra
+
+ py39-test-instrumentation-cassandra_ubuntu-latest:
+ name: instrumentation-cassandra 3.9 Ubuntu
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.9
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.9"
+
+ - name: Install tox
+ run: pip install tox
+
+ - name: Run tests
+ run: tox -e py39-test-instrumentation-cassandra -- -ra
+
+ py310-test-instrumentation-cassandra_ubuntu-latest:
+ name: instrumentation-cassandra 3.10 Ubuntu
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.10
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.10"
+
+ - name: Install tox
+ run: pip install tox
+
+ - name: Run tests
+ run: tox -e py310-test-instrumentation-cassandra -- -ra
+
+ py311-test-instrumentation-cassandra_ubuntu-latest:
+ name: instrumentation-cassandra 3.11 Ubuntu
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.11
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.11"
+
+ - name: Install tox
+ run: pip install tox
+
+ - name: Run tests
+ run: tox -e py311-test-instrumentation-cassandra -- -ra
+
+ py312-test-instrumentation-cassandra_ubuntu-latest:
+ name: instrumentation-cassandra 3.12 Ubuntu
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.12
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.12"
+
+ - name: Install tox
+ run: pip install tox
+
+ - name: Run tests
+ run: tox -e py312-test-instrumentation-cassandra -- -ra
+
+ pypy3-test-instrumentation-cassandra_ubuntu-latest:
+ name: instrumentation-cassandra pypy-3.8 Ubuntu
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python pypy-3.8
+ uses: actions/setup-python@v5
+ with:
+ python-version: "pypy-3.8"
+
+ - name: Install tox
+ run: pip install tox
+
+ - name: Run tests
+ run: tox -e pypy3-test-instrumentation-cassandra -- -ra
+
+ py38-test-processor-baggage_ubuntu-latest:
+ name: processor-baggage 3.8 Ubuntu
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.8
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.8"
+
+ - name: Install tox
+ run: pip install tox
+
+ - name: Run tests
+ run: tox -e py38-test-processor-baggage -- -ra
+
+ py39-test-processor-baggage_ubuntu-latest:
+ name: processor-baggage 3.9 Ubuntu
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.9
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.9"
+
+ - name: Install tox
+ run: pip install tox
+
+ - name: Run tests
+ run: tox -e py39-test-processor-baggage -- -ra
+
+ py310-test-processor-baggage_ubuntu-latest:
+ name: processor-baggage 3.10 Ubuntu
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.10
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.10"
+
+ - name: Install tox
+ run: pip install tox
+
+ - name: Run tests
+ run: tox -e py310-test-processor-baggage -- -ra
+
+ py311-test-processor-baggage_ubuntu-latest:
+ name: processor-baggage 3.11 Ubuntu
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout repo @ SHA - ${{ github.sha }}
+ uses: actions/checkout@v4
+
+ - name: Set up Python 3.11
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.11"
+
+ - name: Install tox
+ run: pip install tox
+
+ - name: Run tests
+ run: tox -e py311-test-processor-baggage -- -ra
+
py312-test-processor-baggage_ubuntu-latest:
name: processor-baggage 3.12 Ubuntu
runs-on: ubuntu-latest
diff --git a/instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/CHANGELOG.md b/instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/CHANGELOG.md
new file mode 100644
index 0000000000..33e7cea173
--- /dev/null
+++ b/instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/CHANGELOG.md
@@ -0,0 +1,10 @@
+# Changelog
+
+All notable changes to this project will be documented in this file.
+
+The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
+and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
+
+## Unreleased
+
+- Add boilerplate for `opentelemetry-instrumentation-vertexai-v2`
diff --git a/instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/LICENSE b/instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/LICENSE
new file mode 100644
index 0000000000..261eeb9e9f
--- /dev/null
+++ b/instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/README.rst b/instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/README.rst
new file mode 100644
index 0000000000..6a46a55c85
--- /dev/null
+++ b/instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/README.rst
@@ -0,0 +1,29 @@
+OpenTelemetry VertexAI Instrumentation
+====================================
+
+|pypi|
+
+.. |pypi| image:: https://badge.fury.io/py/opentelemetry-instrumentation-vertexai-v2.svg
+ :target: https://pypi.org/project/opentelemetry-instrumentation-vertexai-v2/
+
+This library allows tracing LLM requests and logging of messages made by the
+`VertexAI Python API library `_.
+
+
+Installation
+------------
+
+If your application is already instrumented with OpenTelemetry, add this
+package to your requirements.
+::
+
+ pip install opentelemetry-instrumentation-vertexai-v2
+
+If you don't have an VertexAI application, yet, try our `example `_.
+
+References
+----------
+* `OpenTelemetry VertexAI Instrumentation `_
+* `OpenTelemetry Project `_
+* `OpenTelemetry Python Examples `_
+
diff --git a/instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/example/.env b/instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/example/.env
new file mode 100644
index 0000000000..0a92e5539d
--- /dev/null
+++ b/instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/example/.env
@@ -0,0 +1,10 @@
+OTEL_EXPORTER_OTLP_ENDPOINT=http://localhost:4318
+OTEL_EXPORTER_OTLP_PROTOCOL=http/protobuf
+OTEL_SERVICE_NAME=opentelemetry-python-openai
+
+# Change to 'false' to disable logging
+OTEL_PYTHON_LOGGING_AUTO_INSTRUMENTATION_ENABLED=true
+# Change to 'console' if your OTLP endpoint doesn't support logs
+OTEL_LOGS_EXPORTER=otlp_proto_http
+# Change to 'false' to hide prompt and completion content
+OTEL_INSTRUMENTATION_GENAI_CAPTURE_MESSAGE_CONTENT=true
diff --git a/instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/example/README.rst b/instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/example/README.rst
new file mode 100644
index 0000000000..6fe161f82f
--- /dev/null
+++ b/instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/example/README.rst
@@ -0,0 +1,38 @@
+OpenTelemetry VertexAI Instrumentation Example
+============================================
+
+This is an example of how to instrument VertexAI calls with zero code changes,
+using `opentelemetry-instrument`.
+
+When `main.py `_ is run, it exports traces and logs to an OTLP
+compatible endpoint. Traces include details such as the model used and the
+duration of the chat request. Logs capture the chat request and the generated
+response, providing a comprehensive view of the performance and behavior of
+your VertexAI requests.
+
+Setup
+-----
+
+An OTLP compatible endpoint should be listening for traces and logs on http://localhost:4318.
+If not, update "OTEL_EXPORTER_OTLP_ENDPOINT" as well.
+
+Next, set up a virtual environment like this:
+
+::
+
+ python3 -m venv .venv
+ source .venv/bin/activate
+ pip install "python-dotenv[cli]"
+ pip install -r requirements.txt
+
+Run
+---
+
+Run the example like this:
+
+::
+
+ dotenv run -- opentelemetry-instrument python main.py
+
+You should see a poem generated by VertexAI while traces and logs export to your
+configured observability tool.
diff --git a/instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/example/main.py b/instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/example/main.py
new file mode 100644
index 0000000000..2ddfdb73bb
--- /dev/null
+++ b/instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/example/main.py
@@ -0,0 +1,16 @@
+import vertexai
+from vertexai.generative_models import GenerativeModel
+
+vertexai.init(location="us-central1")
+
+model = GenerativeModel("gemini-1.5-flash-002")
+
+response = model.generate_content("Write a short poem on OpenTelemetry.")
+
+print(response.text)
+# Example response:
+# **Emphasizing the Dried Aspect:**
+# * Everlasting Blooms
+# * Dried & Delightful
+# * The Petal Preserve
+# ...
diff --git a/instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/example/requirements.txt b/instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/example/requirements.txt
new file mode 100644
index 0000000000..3aea8bcb24
--- /dev/null
+++ b/instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/example/requirements.txt
@@ -0,0 +1,6 @@
+google-cloud-aiplatform>=1.64
+
+opentelemetry-sdk~=1.28.2
+opentelemetry-exporter-otlp-proto-http~=1.28.2
+opentelemetry-distro~=0.49b2
+opentelemetry-instrumentation-vertexai-v2~=2.0b0
diff --git a/instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/pyproject.toml b/instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/pyproject.toml
new file mode 100644
index 0000000000..45faacdc06
--- /dev/null
+++ b/instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/pyproject.toml
@@ -0,0 +1,49 @@
+[build-system]
+requires = ["hatchling"]
+build-backend = "hatchling.build"
+
+[project]
+name = "opentelemetry-instrumentation-vertexai-v2"
+dynamic = ["version"]
+description = "OpenTelemetry Official VertexAI instrumentation"
+readme = "README.rst"
+license = "Apache-2.0"
+requires-python = ">=3.8"
+authors = [
+ { name = "OpenTelemetry Authors", email = "cncf-opentelemetry-contributors@lists.cncf.io" },
+]
+classifiers = [
+ "Development Status :: 4 - Beta",
+ "Intended Audience :: Developers",
+ "License :: OSI Approved :: Apache Software License",
+ "Programming Language :: Python",
+ "Programming Language :: Python :: 3",
+ "Programming Language :: Python :: 3.8",
+ "Programming Language :: Python :: 3.9",
+ "Programming Language :: Python :: 3.10",
+ "Programming Language :: Python :: 3.11",
+ "Programming Language :: Python :: 3.12",
+]
+dependencies = [
+ "opentelemetry-api ~= 1.28",
+ "opentelemetry-instrumentation ~= 0.49b0",
+ "opentelemetry-semantic-conventions ~= 0.49b0",
+]
+
+[project.optional-dependencies]
+instruments = ["google-cloud-aiplatform >= 1.64"]
+
+[project.entry-points.opentelemetry_instrumentor]
+vertexai = "opentelemetry.instrumentation.vertexai_v2:VertexAIInstrumentor"
+
+[project.urls]
+Homepage = "https://github.com/open-telemetry/opentelemetry-python-contrib/tree/main/instrumentation/opentelemetry-instrumentation-vertexai-v2"
+
+[tool.hatch.version]
+path = "src/opentelemetry/instrumentation/vertexai_v2/version.py"
+
+[tool.hatch.build.targets.sdist]
+include = ["/src", "/tests"]
+
+[tool.hatch.build.targets.wheel]
+packages = ["src/opentelemetry"]
diff --git a/instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/src/opentelemetry/instrumentation/vertexai_v2/__init__.py b/instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/src/opentelemetry/instrumentation/vertexai_v2/__init__.py
new file mode 100644
index 0000000000..928486d4c3
--- /dev/null
+++ b/instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/src/opentelemetry/instrumentation/vertexai_v2/__init__.py
@@ -0,0 +1,450 @@
+# Copyright The OpenTelemetry Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""OpenTelemetry Vertex AI instrumentation"""
+
+import logging
+import types
+from functools import partial
+from typing import Collection, Optional
+
+from wrapt import wrap_function_wrapper
+
+from opentelemetry._events import (
+ EventLogger,
+ EventLoggerProvider,
+ get_event_logger,
+)
+from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
+from opentelemetry.instrumentation.utils import (
+ is_instrumentation_enabled,
+ unwrap,
+)
+from opentelemetry.instrumentation.vertexai_v2.events import (
+ assistant_event,
+ user_event,
+)
+from opentelemetry.instrumentation.vertexai_v2.utils import dont_throw
+from opentelemetry.instrumentation.vertexai_v2.version import __version__
+from opentelemetry.semconv._incubating.attributes import gen_ai_attributes
+from opentelemetry.trace import SpanKind, TracerProvider, get_tracer
+from opentelemetry.trace.status import Status, StatusCode
+
+logger = logging.getLogger(__name__)
+
+_instruments = ("google-cloud-aiplatform >= 1.38.1",)
+
+# TODO: span_name should no longer be needed as it comes from `{gen_ai.operation.name} {gen_ai.request.model}`
+WRAPPED_METHODS = [
+ {
+ "package": "vertexai.generative_models",
+ "object": "GenerativeModel",
+ "method": "generate_content",
+ "span_name": "vertexai.generate_content",
+ "is_async": False,
+ },
+ {
+ "package": "vertexai.generative_models",
+ "object": "GenerativeModel",
+ "method": "generate_content_async",
+ "span_name": "vertexai.generate_content_async",
+ "is_async": True,
+ },
+ {
+ "package": "vertexai.preview.generative_models",
+ "object": "GenerativeModel",
+ "method": "generate_content",
+ "span_name": "vertexai.generate_content",
+ "is_async": False,
+ },
+ {
+ "package": "vertexai.preview.generative_models",
+ "object": "GenerativeModel",
+ "method": "generate_content_async",
+ "span_name": "vertexai.generate_content_async",
+ "is_async": True,
+ },
+ {
+ "package": "vertexai.language_models",
+ "object": "TextGenerationModel",
+ "method": "predict",
+ "span_name": "vertexai.predict",
+ "is_async": False,
+ },
+ {
+ "package": "vertexai.language_models",
+ "object": "TextGenerationModel",
+ "method": "predict_async",
+ "span_name": "vertexai.predict_async",
+ "is_async": True,
+ },
+ {
+ "package": "vertexai.language_models",
+ "object": "TextGenerationModel",
+ "method": "predict_streaming",
+ "span_name": "vertexai.predict_streaming",
+ "is_async": False,
+ },
+ {
+ "package": "vertexai.language_models",
+ "object": "TextGenerationModel",
+ "method": "predict_streaming_async",
+ "span_name": "vertexai.predict_streaming_async",
+ "is_async": True,
+ },
+ {
+ "package": "vertexai.language_models",
+ "object": "ChatSession",
+ "method": "send_message",
+ "span_name": "vertexai.send_message",
+ "is_async": False,
+ },
+ {
+ "package": "vertexai.language_models",
+ "object": "ChatSession",
+ "method": "send_message_streaming",
+ "span_name": "vertexai.send_message_streaming",
+ "is_async": False,
+ },
+]
+
+
+def should_send_prompts():
+ # Previously was opt-in by the following check for privacy reasons:
+ #
+ # return (
+ # os.getenv("TRACELOOP_TRACE_CONTENT") or "true"
+ # ).lower() == "true" or context_api.get_value(
+ # "override_enable_content_tracing"
+ # )
+ return True
+
+
+def is_streaming_response(response):
+ return isinstance(response, types.GeneratorType)
+
+
+def is_async_streaming_response(response):
+ return isinstance(response, types.AsyncGeneratorType)
+
+
+def _set_span_attribute(span, name, value):
+ if value is not None:
+ if value != "":
+ span.set_attribute(name, value)
+
+
+def _set_input_attributes(
+ span, event_logger: EventLogger, args, kwargs, llm_model
+):
+ if should_send_prompts() and args is not None and len(args) > 0:
+ prompt = ""
+ for arg in args:
+ if isinstance(arg, str):
+ prompt = f"{prompt}{arg}\n"
+ elif isinstance(arg, list):
+ for subarg in arg:
+ prompt = f"{prompt}{subarg}\n"
+
+ # _set_span_attribute(
+ # span,
+ # f"{SpanAttributes.LLM_PROMPTS}.0.user",
+ # prompt,
+ # )
+ if prompt:
+ event_logger.emit(
+ user_event(
+ gen_ai_system=gen_ai_attributes.GenAiSystemValues.VERTEX_AI.value,
+ content=prompt,
+ span_context=span.get_span_context(),
+ )
+ )
+
+ # Copied from openllmetry logic
+ # https://github.com/traceloop/openllmetry/blob/v0.33.12/packages/opentelemetry-instrumentation-vertexai/opentelemetry/instrumentation/vertexai/__init__.py#L141-L143
+ # I guess prompt may be in kwargs instead or in addition?
+ prompt = kwargs.get("prompt")
+ if prompt:
+ event_logger.emit(
+ user_event(
+ gen_ai_system=gen_ai_attributes.GenAiSystemValues.VERTEX_AI.value,
+ content=prompt,
+ span_context=span.get_span_context(),
+ )
+ )
+
+ _set_span_attribute(
+ span, gen_ai_attributes.GEN_AI_REQUEST_MODEL, llm_model
+ )
+ _set_span_attribute(
+ span,
+ gen_ai_attributes.GEN_AI_REQUEST_TEMPERATURE,
+ kwargs.get("temperature"),
+ )
+ _set_span_attribute(
+ span,
+ gen_ai_attributes.GEN_AI_REQUEST_MAX_TOKENS,
+ kwargs.get("max_output_tokens"),
+ )
+ _set_span_attribute(
+ span, gen_ai_attributes.GEN_AI_REQUEST_TOP_P, kwargs.get("top_p")
+ )
+ _set_span_attribute(
+ span, gen_ai_attributes.GEN_AI_REQUEST_TOP_K, kwargs.get("top_k")
+ )
+ _set_span_attribute(
+ span,
+ gen_ai_attributes.GEN_AI_REQUEST_PRESENCE_PENALTY,
+ kwargs.get("presence_penalty"),
+ )
+ _set_span_attribute(
+ span,
+ gen_ai_attributes.GEN_AI_REQUEST_FREQUENCY_PENALTY,
+ kwargs.get("frequency_penalty"),
+ )
+
+
+@dont_throw
+def _set_response_attributes(
+ span, event_logger: EventLogger, llm_model, generation_text, token_usage
+):
+ _set_span_attribute(
+ span, gen_ai_attributes.GEN_AI_RESPONSE_MODEL, llm_model
+ )
+
+ if token_usage:
+ _set_span_attribute(
+ span,
+ gen_ai_attributes.GEN_AI_USAGE_OUTPUT_TOKENS,
+ token_usage.candidates_token_count,
+ )
+ _set_span_attribute(
+ span,
+ gen_ai_attributes.GEN_AI_USAGE_INPUT_TOKENS,
+ token_usage.prompt_token_count,
+ )
+
+ if generation_text:
+ event_logger.emit(
+ assistant_event(
+ gen_ai_system=gen_ai_attributes.GenAiSystemValues.VERTEX_AI.value,
+ content=generation_text,
+ span_context=span.get_span_context(),
+ )
+ )
+
+
+def _build_from_streaming_response(
+ span, event_logger: EventLogger, response, llm_model
+):
+ complete_response = ""
+ token_usage = None
+ for item in response:
+ item_to_yield = item
+ complete_response += str(item.text)
+ if item.usage_metadata:
+ token_usage = item.usage_metadata
+
+ yield item_to_yield
+
+ _set_response_attributes(
+ span, event_logger, llm_model, complete_response, token_usage
+ )
+
+ span.set_status(Status(StatusCode.OK))
+ span.end()
+
+
+async def _abuild_from_streaming_response(
+ span, event_logger: EventLogger, response, llm_model
+):
+ complete_response = ""
+ token_usage = None
+ async for item in response:
+ item_to_yield = item
+ complete_response += str(item.text)
+ if item.usage_metadata:
+ token_usage = item.usage_metadata
+
+ yield item_to_yield
+
+ _set_response_attributes(
+ span, event_logger, llm_model, complete_response, token_usage
+ )
+
+ span.set_status(Status(StatusCode.OK))
+ span.end()
+
+
+@dont_throw
+def _handle_request(span, event_logger, args, kwargs, llm_model):
+ if span.is_recording():
+ _set_input_attributes(span, event_logger, args, kwargs, llm_model)
+
+
+@dont_throw
+def _handle_response(span, event_logger: EventLogger, response, llm_model):
+ if span.is_recording():
+ _set_response_attributes(
+ span,
+ event_logger,
+ llm_model,
+ response.candidates[0].text,
+ response.usage_metadata,
+ )
+
+ span.set_status(Status(StatusCode.OK))
+
+
+async def _awrap(
+ tracer, event_logger: EventLogger, to_wrap, wrapped, instance, args, kwargs
+):
+ """Instruments and calls every function defined in TO_WRAP."""
+ if not is_instrumentation_enabled():
+ return await wrapped(*args, **kwargs)
+
+ llm_model = "unknown"
+ if hasattr(instance, "_model_id"):
+ llm_model = instance._model_id
+ if hasattr(instance, "_model_name"):
+ llm_model = instance._model_name.replace(
+ "publishers/google/models/", ""
+ )
+
+ operation_name = (
+ gen_ai_attributes.GenAiOperationNameValues.TEXT_COMPLETION.value
+ )
+ name = f"{operation_name} {llm_model}"
+ span = tracer.start_span(
+ name,
+ kind=SpanKind.CLIENT,
+ attributes={
+ gen_ai_attributes.GEN_AI_SYSTEM: gen_ai_attributes.GenAiSystemValues.VERTEX_AI.value,
+ gen_ai_attributes.GEN_AI_OPERATION_NAME: operation_name,
+ },
+ )
+
+ _handle_request(span, event_logger, args, kwargs, llm_model)
+
+ response = await wrapped(*args, **kwargs)
+
+ if response:
+ if is_streaming_response(response):
+ return _build_from_streaming_response(
+ span, event_logger, response, llm_model
+ )
+ if is_async_streaming_response(response):
+ return _abuild_from_streaming_response(
+ span, event_logger, response, llm_model
+ )
+ _handle_response(span, event_logger, response, llm_model)
+
+ span.end()
+ return response
+
+
+def _wrap(
+ tracer, event_logger: EventLogger, to_wrap, wrapped, instance, args, kwargs
+):
+ """Instruments and calls every function defined in TO_WRAP."""
+ if not is_instrumentation_enabled():
+ return wrapped(*args, **kwargs)
+
+ llm_model = "unknown"
+ if hasattr(instance, "_model_id"):
+ llm_model = instance._model_id
+ if hasattr(instance, "_model_name"):
+ llm_model = instance._model_name.replace(
+ "publishers/google/models/", ""
+ )
+
+ operation_name = (
+ gen_ai_attributes.GenAiOperationNameValues.TEXT_COMPLETION.value
+ )
+ name = f"{operation_name} {llm_model}"
+ span = tracer.start_span(
+ name,
+ kind=SpanKind.CLIENT,
+ attributes={
+ gen_ai_attributes.GEN_AI_SYSTEM: gen_ai_attributes.GenAiSystemValues.VERTEX_AI.value,
+ gen_ai_attributes.GEN_AI_OPERATION_NAME: operation_name,
+ },
+ )
+
+ _handle_request(span, event_logger, args, kwargs, llm_model)
+
+ response = wrapped(*args, **kwargs)
+
+ if response:
+ if is_streaming_response(response):
+ return _build_from_streaming_response(
+ span, event_logger, response, llm_model
+ )
+ if is_async_streaming_response(response):
+ return _abuild_from_streaming_response(
+ span, event_logger, response, llm_model
+ )
+ _handle_response(span, event_logger, response, llm_model)
+
+ span.end()
+ return response
+
+
+class VertexAIInstrumentor(BaseInstrumentor):
+ """An instrumentor for VertextAI's client library."""
+
+ def __init__(self, exception_logger=None):
+ super().__init__()
+
+ def instrumentation_dependencies(self) -> Collection[str]:
+ return _instruments
+
+ def _instrument(
+ self,
+ *,
+ tracer_provider: Optional[TracerProvider] = None,
+ event_logger_provider: Optional[EventLoggerProvider] = None,
+ **kwargs,
+ ):
+ tracer = get_tracer(
+ __name__, __version__, tracer_provider=tracer_provider
+ )
+ event_logger = get_event_logger(
+ __name__,
+ version=__version__,
+ event_logger_provider=event_logger_provider,
+ )
+ for wrapped_method in WRAPPED_METHODS:
+ wrap_package = wrapped_method.get("package")
+ wrap_object = wrapped_method.get("object")
+ wrap_method = wrapped_method.get("method")
+
+ wrap_function_wrapper(
+ wrap_package,
+ f"{wrap_object}.{wrap_method}",
+ (
+ partial(_awrap, tracer, event_logger, wrapped_method)
+ if wrapped_method.get("is_async")
+ else partial(_wrap, tracer, event_logger, wrapped_method)
+ ),
+ )
+
+ def _uninstrument(self, **kwargs):
+ for wrapped_method in WRAPPED_METHODS:
+ wrap_package = wrapped_method.get("package")
+ wrap_object = wrapped_method.get("object")
+ unwrap(
+ f"{wrap_package}.{wrap_object}",
+ wrapped_method.get("method", ""),
+ )
diff --git a/instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/src/opentelemetry/instrumentation/vertexai_v2/events.py b/instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/src/opentelemetry/instrumentation/vertexai_v2/events.py
new file mode 100644
index 0000000000..8807c7bfaa
--- /dev/null
+++ b/instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/src/opentelemetry/instrumentation/vertexai_v2/events.py
@@ -0,0 +1,87 @@
+# Copyright The OpenTelemetry Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Factories for event types described in
+https://github.com/open-telemetry/semantic-conventions/blob/main/docs/gen-ai/gen-ai-events.md#system-event.
+
+Hopefully this code can be autogenerated by Weaver once Gen AI semantic conventions are
+schematized in YAML and the Weaver tool supports it.
+"""
+
+from typing import Optional
+
+from opentelemetry._events import Event
+from opentelemetry.semconv._incubating.attributes import gen_ai_attributes
+from opentelemetry.trace import SpanContext
+from opentelemetry.util.types import AnyValue
+
+
+def _set_span_context(event: Event, span_context: Optional[SpanContext]):
+ if not span_context:
+ return
+ event.span_id = span_context.span_id
+ event.trace_id = span_context.trace_id
+ event.trace_flags = span_context.trace_flags
+
+
+def user_event(
+ *,
+ gen_ai_system: str,
+ # TODO: should I just leave role out since it's not required if "user"
+ role: str = "user",
+ content: AnyValue,
+ span_context: Optional[SpanContext] = None,
+) -> Event:
+ """Creates a User event
+ https://github.com/open-telemetry/semantic-conventions/blob/v1.28.0/docs/gen-ai/gen-ai-events.md#user-event
+ """
+ event = Event(
+ name="gen_ai.user.message",
+ attributes={
+ gen_ai_attributes.GEN_AI_SYSTEM: gen_ai_system,
+ },
+ body={
+ "role": role,
+ "content": content,
+ },
+ )
+ _set_span_context(event, span_context)
+ return event
+
+
+# TODO: add tool_calls once instrumentation supports it
+def assistant_event(
+ *,
+ gen_ai_system: str,
+ # TODO: should I just leave role out since it's not required if "assistant"
+ role: str = "assistant",
+ content: AnyValue,
+ span_context: Optional[SpanContext] = None,
+) -> Event:
+ """Creates an Assistant event
+ https://github.com/open-telemetry/semantic-conventions/blob/v1.28.0/docs/gen-ai/gen-ai-events.md#assistant-event
+ """
+ event = Event(
+ name="gen_ai.assistant.message",
+ attributes={
+ gen_ai_attributes.GEN_AI_SYSTEM: gen_ai_system,
+ },
+ body={
+ "role": role,
+ "content": content,
+ },
+ )
+ _set_span_context(event, span_context)
+ return event
diff --git a/instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/src/opentelemetry/instrumentation/vertexai_v2/utils.py b/instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/src/opentelemetry/instrumentation/vertexai_v2/utils.py
new file mode 100644
index 0000000000..323f11f6f2
--- /dev/null
+++ b/instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/src/opentelemetry/instrumentation/vertexai_v2/utils.py
@@ -0,0 +1,43 @@
+# Copyright The OpenTelemetry Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import traceback
+from typing import Any, Callable, Optional, TypeVar
+
+R = TypeVar("R")
+
+
+def dont_throw(func: Callable[..., R]) -> Callable[..., Optional[R]]:
+ """
+ A decorator that wraps the passed in function and logs exceptions instead of throwing them.
+
+ @param func: The function to wrap
+ @return: The wrapper function
+ """
+ # Obtain a logger specific to the function's module
+ logger = logging.getLogger(func.__module__)
+
+ def wrapper(*args: Any, **kwargs: Any) -> Optional[R]:
+ try:
+ return func(*args, **kwargs)
+ except Exception: # pylint: disable=broad-except
+ logger.debug(
+ "failed to trace in %s, error: %s",
+ func.__name__,
+ traceback.format_exc(),
+ )
+ return None
+
+ return wrapper
diff --git a/instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/src/opentelemetry/instrumentation/vertexai_v2/version.py b/instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/src/opentelemetry/instrumentation/vertexai_v2/version.py
new file mode 100644
index 0000000000..5b77207d9d
--- /dev/null
+++ b/instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/src/opentelemetry/instrumentation/vertexai_v2/version.py
@@ -0,0 +1,15 @@
+# Copyright The OpenTelemetry Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+__version__ = "2.1b0.dev"
diff --git a/instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/test-requirements-0.txt b/instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/test-requirements-0.txt
new file mode 100644
index 0000000000..537073dee1
--- /dev/null
+++ b/instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/test-requirements-0.txt
@@ -0,0 +1,57 @@
+annotated-types==0.7.0
+cachetools==5.5.0
+certifi==2024.8.30
+charset-normalizer==3.4.0
+Deprecated==1.2.14
+docstring_parser==0.16
+exceptiongroup==1.2.2
+google-api-core==2.23.0
+google-auth==2.36.0
+google-cloud-aiplatform==1.74.0
+google-cloud-bigquery==3.27.0
+google-cloud-core==2.4.1
+google-cloud-resource-manager==1.13.1
+google-cloud-storage==2.19.0
+google-crc32c==1.5.0
+google-resumable-media==2.7.2
+googleapis-common-protos==1.66.0
+grpc-google-iam-v1==0.13.1
+grpcio==1.68.1
+grpcio-status==1.68.1
+idna==3.10
+importlib-metadata==6.11.0
+iniconfig==2.0.0
+multidict==6.1.0
+packaging==24.0
+pluggy==1.5.0
+propcache==0.2.0
+proto-plus==1.25.0
+protobuf==5.29.1
+pyasn1==0.6.1
+pyasn1_modules==0.4.1
+pydantic==2.8.2
+pydantic_core==2.20.1
+pytest==7.4.4
+pytest-asyncio==0.21.0
+pytest-vcr==1.0.2
+python-dateutil==2.9.0.post0
+PyYAML==6.0.2
+requests==2.32.3
+rsa==4.9
+shapely==2.0.6
+six==1.17.0
+tomli==2.2.1
+typing_extensions==4.12.2
+urllib3==1.26.20
+vcrpy==6.0.2
+wrapt==1.16.0
+yarl==1.15.2
+zipp==3.20.2
+
+# when updating, also update in pyproject.toml
+opentelemetry-api==1.28
+opentelemetry-sdk==1.28
+opentelemetry-semantic-conventions==0.49b0
+opentelemetry-instrumentation==0.49b0
+
+-e instrumentation-genai/opentelemetry-instrumentation-vertexai-v2[instruments]
diff --git a/instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/test-requirements-1.txt b/instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/test-requirements-1.txt
new file mode 100644
index 0000000000..6959df4655
--- /dev/null
+++ b/instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/test-requirements-1.txt
@@ -0,0 +1,54 @@
+annotated-types==0.7.0
+asgiref==3.8.1
+cachetools==5.5.0
+certifi==2024.8.30
+charset-normalizer==3.4.0
+Deprecated==1.2.15
+docstring_parser==0.16
+exceptiongroup==1.2.2
+google-api-core==2.23.0
+google-auth==2.36.0
+google-cloud-aiplatform==1.74.0
+google-cloud-bigquery==3.27.0
+google-cloud-core==2.4.1
+google-cloud-resource-manager==1.13.1
+google-cloud-storage==2.19.0
+google-crc32c==1.5.0
+google-resumable-media==2.7.2
+googleapis-common-protos==1.66.0
+grpc-google-iam-v1==0.13.1
+grpcio==1.68.1
+grpcio-status==1.68.1
+idna==3.10
+importlib_metadata==8.5.0
+iniconfig==2.0.0
+multidict==6.1.0
+packaging==24.2
+pluggy==1.5.0
+propcache==0.2.0
+proto-plus==1.25.0
+protobuf==5.29.1
+pyasn1==0.6.1
+pyasn1_modules==0.4.1
+pydantic==2.10.3
+pydantic_core==2.27.1
+pytest==7.4.4
+pytest-asyncio==0.21.0
+pytest-vcr==1.0.2
+python-dateutil==2.9.0.post0
+PyYAML==6.0.2
+requests==2.32.3
+rsa==4.9
+shapely==2.0.6
+six==1.17.0
+tomli==2.2.1
+typing_extensions==4.12.2
+urllib3==1.26.20
+vcrpy==6.0.2
+wrapt==1.17.0
+yarl==1.15.2
+zipp==3.20.2
+# test with the latest version of opentelemetry-api, sdk, and semantic conventions
+
+-e opentelemetry-instrumentation
+-e instrumentation-genai/opentelemetry-instrumentation-vertexai-v2[instruments]
diff --git a/instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/tests/__init__.py b/instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/tests/__init__.py
new file mode 100644
index 0000000000..d8e96c603f
--- /dev/null
+++ b/instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/tests/__init__.py
@@ -0,0 +1 @@
+"""unit tests."""
diff --git a/instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/tests/cassettes/test_vertexai_generate_content.yaml b/instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/tests/cassettes/test_vertexai_generate_content.yaml
new file mode 100644
index 0000000000..48cf3524e1
--- /dev/null
+++ b/instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/tests/cassettes/test_vertexai_generate_content.yaml
@@ -0,0 +1,106 @@
+interactions:
+- request:
+ body: |-
+ {
+ "contents": [
+ {
+ "role": "user",
+ "parts": [
+ {
+ "fileData": {
+ "mimeType": "image/jpeg",
+ "fileUri": "gs://generativeai-downloads/images/scones.jpg"
+ }
+ },
+ {
+ "text": "what is shown in this image?"
+ }
+ ]
+ }
+ ]
+ }
+ headers:
+ Accept:
+ - '*/*'
+ Accept-Encoding:
+ - gzip, deflate
+ Connection:
+ - keep-alive
+ Content-Length:
+ - '317'
+ Content-Type:
+ - application/json
+ User-Agent:
+ - python-requests/2.32.3
+ method: POST
+ uri: https://us-central1-aiplatform.googleapis.com/v1beta1/projects/fake-project/locations/us-central1/publishers/google/models/gemini-pro-vision:generateContent?%24alt=json%3Benum-encoding%3Dint
+ response:
+ body:
+ string: |-
+ {
+ "candidates": [
+ {
+ "content": {
+ "role": "model",
+ "parts": [
+ {
+ "text": " The image shows a table with a cup of coffee, a bowl of blueberries, and a plate of scones with blueberries on top. There are also pink flowers on the table."
+ }
+ ]
+ },
+ "finishReason": 1,
+ "safetyRatings": [
+ {
+ "category": 1,
+ "probability": 1,
+ "probabilityScore": 0.025512695,
+ "severity": 1,
+ "severityScore": 0.06933594
+ },
+ {
+ "category": 2,
+ "probability": 1,
+ "probabilityScore": 0.026367188,
+ "severity": 1,
+ "severityScore": 0.07080078
+ },
+ {
+ "category": 3,
+ "probability": 1,
+ "probabilityScore": 0.041503906,
+ "severity": 1,
+ "severityScore": 0.03466797
+ },
+ {
+ "category": 4,
+ "probability": 1,
+ "probabilityScore": 0.091308594,
+ "severity": 1,
+ "severityScore": 0.09033203
+ }
+ ],
+ "avgLogprobs": -0.09557106835501535
+ }
+ ],
+ "usageMetadata": {
+ "promptTokenCount": 265,
+ "candidatesTokenCount": 35,
+ "totalTokenCount": 300
+ },
+ "modelVersion": "gemini-pro-vision"
+ }
+ headers:
+ Content-Type:
+ - application/json; charset=UTF-8
+ Transfer-Encoding:
+ - chunked
+ Vary:
+ - Origin
+ - X-Origin
+ - Referer
+ content-length:
+ - '1299'
+ status:
+ code: 200
+ message: OK
+version: 1
diff --git a/instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/tests/conftest.py b/instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/tests/conftest.py
new file mode 100644
index 0000000000..32fc19a333
--- /dev/null
+++ b/instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/tests/conftest.py
@@ -0,0 +1,168 @@
+"""Unit tests configuration module."""
+
+import json
+import re
+from typing import Any, Mapping, MutableMapping
+
+import pytest
+import vertexai
+import yaml
+from google.auth.credentials import AnonymousCredentials
+from vcr import VCR
+from vcr.record_mode import RecordMode
+from vcr.request import Request
+
+from opentelemetry import trace
+from opentelemetry.instrumentation.vertexai_v2 import VertexAIInstrumentor
+from opentelemetry.sdk.trace import TracerProvider
+from opentelemetry.sdk.trace.export import SimpleSpanProcessor
+from opentelemetry.sdk.trace.export.in_memory_span_exporter import (
+ InMemorySpanExporter,
+)
+
+pytest_plugins = []
+
+FAKE_PROJECT = "fake-project"
+
+
+@pytest.fixture(scope="session")
+def exporter():
+ span_exporter = InMemorySpanExporter()
+ processor = SimpleSpanProcessor(span_exporter)
+
+ provider = TracerProvider()
+ provider.add_span_processor(processor)
+ trace.set_tracer_provider(provider)
+
+ VertexAIInstrumentor().instrument()
+
+ return span_exporter
+
+
+@pytest.fixture(autouse=True)
+def clear_exporter(exporter): # pylint: disable=redefined-outer-name
+ exporter.clear()
+
+
+@pytest.fixture(autouse=True)
+def vertexai_init(vcr: VCR) -> None:
+ # Unfortunately I couldn't find a nice way to globally reset the global_config for each
+ # test because different vertex submodules reference the global instance directly
+ # https://github.com/googleapis/python-aiplatform/blob/v1.74.0/google/cloud/aiplatform/initializer.py#L687
+ # so this config will leak if we don't call init() for each test.
+
+ # When not recording (in CI), don't do any auth. That prevents trying to read application
+ # default credentials from the filesystem or metadata server and oauth token exchange. This
+ # is not the interesting part of our instrumentation to test.
+ print(f"VCR Mode is {vcr.record_mode=}, {RecordMode.NONE}")
+ vertex_init_kwargs = {"api_transport": "rest"}
+ if vcr.record_mode == RecordMode.NONE:
+ vertex_init_kwargs["credentials"] = AnonymousCredentials()
+ vertex_init_kwargs["project"] = FAKE_PROJECT
+ vertexai.init(**vertex_init_kwargs)
+
+
+@pytest.fixture(scope="module")
+def vcr_config():
+ filter_header_regexes = [
+ r"X-.*",
+ "Server",
+ "Date",
+ "Expires",
+ "Authorization",
+ ]
+
+ def filter_headers(headers: Mapping[str, str]) -> Mapping[str, str]:
+ return {
+ key: val
+ for key, val in headers.items()
+ if not any(
+ re.match(filter_re, key, re.IGNORECASE)
+ for filter_re in filter_header_regexes
+ )
+ }
+
+ def before_record_cb(request: Request):
+ request.headers = filter_headers(request.headers)
+ request.uri = re.sub(
+ r"/projects/[^/]+/", f"/projects/{FAKE_PROJECT}/", request.uri
+ )
+ return request
+
+ def before_response_cb(response: MutableMapping[str, Any]):
+ response["headers"] = filter_headers(response["headers"])
+ return response
+
+ return {
+ "decode_compressed_response": True,
+ "before_record_request": before_record_cb,
+ "before_record_response": before_response_cb,
+ "ignore_hosts": ["oauth2.googleapis.com"],
+ }
+
+
+class LiteralBlockScalar(str):
+ """Formats the string as a literal block scalar, preserving whitespace and
+ without interpreting escape characters"""
+
+
+def literal_block_scalar_presenter(dumper, data):
+ """Represents a scalar string as a literal block, via '|' syntax"""
+ return dumper.represent_scalar("tag:yaml.org,2002:str", data, style="|")
+
+
+yaml.add_representer(LiteralBlockScalar, literal_block_scalar_presenter)
+
+
+def process_string_value(string_value):
+ """Pretty-prints JSON or returns long strings as a LiteralBlockScalar"""
+ try:
+ json_data = json.loads(string_value)
+ return LiteralBlockScalar(json.dumps(json_data, indent=2))
+ except (ValueError, TypeError):
+ if len(string_value) > 80:
+ return LiteralBlockScalar(string_value)
+ return string_value
+
+
+def convert_body_to_literal(data):
+ """Searches the data for body strings, attempting to pretty-print JSON"""
+ if isinstance(data, dict):
+ for key, value in data.items():
+ # Handle response body case (e.g., response.body.string)
+ if key == "body" and isinstance(value, dict) and "string" in value:
+ value["string"] = process_string_value(value["string"])
+
+ # Handle request body case (e.g., request.body)
+ elif key == "body" and isinstance(value, str):
+ data[key] = process_string_value(value)
+
+ else:
+ convert_body_to_literal(value)
+
+ elif isinstance(data, list):
+ for idx, choice in enumerate(data):
+ data[idx] = convert_body_to_literal(choice)
+
+ return data
+
+
+class PrettyPrintJSONBody:
+ """This makes request and response body recordings more readable."""
+
+ @staticmethod
+ def serialize(cassette_dict):
+ cassette_dict = convert_body_to_literal(cassette_dict)
+ return yaml.dump(
+ cassette_dict, default_flow_style=False, allow_unicode=True
+ )
+
+ @staticmethod
+ def deserialize(cassette_string):
+ return yaml.load(cassette_string, Loader=yaml.Loader)
+
+
+@pytest.fixture(scope="module", autouse=True)
+def fixture_vcr(vcr):
+ vcr.register_serializer("yaml", PrettyPrintJSONBody)
+ return vcr
diff --git a/instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/tests/test_gemini.py b/instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/tests/test_gemini.py
new file mode 100644
index 0000000000..59ae91cccf
--- /dev/null
+++ b/instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/tests/test_gemini.py
@@ -0,0 +1,37 @@
+import pytest
+from vertexai.preview.generative_models import GenerativeModel, Part
+
+# from opentelemetry.semconv_ai import SpanAttributes
+
+
+@pytest.mark.vcr
+def test_vertexai_generate_content(exporter):
+ multimodal_model = GenerativeModel("gemini-pro-vision")
+ multimodal_model.generate_content(
+ [
+ Part.from_uri(
+ "gs://generativeai-downloads/images/scones.jpg",
+ mime_type="image/jpeg",
+ ),
+ "what is shown in this image?",
+ ]
+ )
+
+ spans = exporter.get_finished_spans()
+ assert [span.name for span in spans] == [
+ "text_completion gemini-pro-vision"
+ ]
+
+ vertexai_span = spans[0]
+ assert len(spans) == 1
+
+ assert dict(vertexai_span.attributes) == {
+ "gen_ai.system": "vertex_ai",
+ "gen_ai.operation.name": "text_completion",
+ "gen_ai.request.model": "gemini-pro-vision",
+ "gen_ai.response.model": "gemini-pro-vision",
+ "gen_ai.usage.output_tokens": 35,
+ "gen_ai.usage.input_tokens": 265,
+ }
+
+ # TODO: verify Events
diff --git a/opentelemetry-instrumentation/src/opentelemetry/instrumentation/bootstrap_gen.py b/opentelemetry-instrumentation/src/opentelemetry/instrumentation/bootstrap_gen.py
index a292299d70..0dffd10c24 100644
--- a/opentelemetry-instrumentation/src/opentelemetry/instrumentation/bootstrap_gen.py
+++ b/opentelemetry-instrumentation/src/opentelemetry/instrumentation/bootstrap_gen.py
@@ -20,6 +20,10 @@
"library": "openai >= 1.26.0",
"instrumentation": "opentelemetry-instrumentation-openai-v2==2.1b0.dev",
},
+ {
+ "library": "google-cloud-aiplatform >= 1.64",
+ "instrumentation": "opentelemetry-instrumentation-vertexai-v2==2.1b0.dev",
+ },
{
"library": "aio_pika >= 7.2.0, < 10.0.0",
"instrumentation": "opentelemetry-instrumentation-aio-pika==0.50b0.dev",
diff --git a/tox.ini b/tox.ini
index 5fa58e5139..d911a1ff25 100644
--- a/tox.ini
+++ b/tox.ini
@@ -11,6 +11,13 @@ envlist =
pypy3-test-instrumentation-openai-v2-{0,1}
lint-instrumentation-openai-v2
+ ; instrumentation-vertexai
+ py3{8,9,10,11,12}-test-instrumentation-vertexai-v2-{0,1}
+ # Disable pypy which fails in CI because shapely does not have wheels for PyPI and requires
+ # some C libraries
+ ## pypy3-test-instrumentation-vertexai-v2-{0,1}
+ lint-instrumentation-vertexai-v2
+
; opentelemetry-resource-detector-container
py3{8,9,10,11,12}-test-resource-detector-container
pypy3-test-resource-detector-container
@@ -427,6 +434,17 @@ commands_pre =
openai-1: pip install -r {toxinidir}/instrumentation-genai/opentelemetry-instrumentation-openai-v2/test-requirements-1.txt
lint-instrumentation-openai-v2: pip install -r {toxinidir}/instrumentation-genai/opentelemetry-instrumentation-openai-v2/test-requirements-0.txt
+ # packages that are released individually should provide a test-requirements.txt with the lowest version of OTel API
+ # and SDK supported to test we are honoring it
+ vertexai-0: pip install -r {toxinidir}/instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/test-requirements-0.txt
+ # and the latest version of OTel API and SDK
+ vertexai-1: pip install opentelemetry-api@{env:CORE_REPO}\#egg=opentelemetry-api&subdirectory=opentelemetry-api
+ vertexai-1: pip install opentelemetry-semantic-conventions@{env:CORE_REPO}\#egg=opentelemetry-semantic-conventions&subdirectory=opentelemetry-semantic-conventions
+ vertexai-1: pip install opentelemetry-sdk@{env:CORE_REPO}\#egg=opentelemetry-sdk&subdirectory=opentelemetry-sdk
+ vertexai-1: pip install opentelemetry-test-utils@{env:CORE_REPO}\#egg=opentelemetry-test-utils&subdirectory=tests/opentelemetry-test-utils
+ vertexai-1: pip install -r {toxinidir}/instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/test-requirements-1.txt
+ lint-instrumentation-vertexai-v2: pip install -r {toxinidir}/instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/test-requirements-0.txt
+
distro: pip install opentelemetry-api@{env:CORE_REPO}\#egg=opentelemetry-api&subdirectory=opentelemetry-api
distro: pip install opentelemetry-semantic-conventions@{env:CORE_REPO}\#egg=opentelemetry-semantic-conventions&subdirectory=opentelemetry-semantic-conventions
distro: pip install opentelemetry-sdk@{env:CORE_REPO}\#egg=opentelemetry-sdk&subdirectory=opentelemetry-sdk
@@ -944,6 +962,9 @@ commands =
test-instrumentation-openai-v2: pytest {toxinidir}/instrumentation-genai/opentelemetry-instrumentation-openai-v2/tests {posargs}
lint-instrumentation-openai-v2: sh -c "cd instrumentation-genai && pylint --rcfile ../.pylintrc opentelemetry-instrumentation-openai-v2"
+ test-instrumentation-vertexai-v2: pytest {toxinidir}/instrumentation-genai/opentelemetry-instrumentation-vertexai-v2/tests --vcr-record=none {posargs}
+ lint-instrumentation-vertexai-v2: sh -c "cd instrumentation-genai && pylint --rcfile ../.pylintrc opentelemetry-instrumentation-vertexai-v2"
+
test-instrumentation-sio-pika: pytest {toxinidir}/instrumentation/opentelemetry-instrumentation-pika/tests {posargs}
lint-instrumentation-sio-pika: sh -c "cd instrumentation && pylint --rcfile ../.pylintrc opentelemetry-instrumentation-pika"