diff --git a/.github/workflows/test-integrations-dbs.yml b/.github/workflows/test-integrations-dbs.yml index 5fc0be029b..2d6af43bc3 100644 --- a/.github/workflows/test-integrations-dbs.yml +++ b/.github/workflows/test-integrations-dbs.yml @@ -29,7 +29,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.7","3.8","3.11","3.12","3.13"] + python-version: ["3.7","3.12","3.13"] # python3.6 reached EOL and is no longer being supported on # new versions of hosted runners on Github Actions # ubuntu-20.04 is the last version that supported python3.6 diff --git a/scripts/populate_tox/config.py b/scripts/populate_tox/config.py index 6795e36303..1dbc78ccf0 100644 --- a/scripts/populate_tox/config.py +++ b/scripts/populate_tox/config.py @@ -36,6 +36,13 @@ "<=0.23": ["pydantic<2"], }, }, + "asyncpg": { + "package": "asyncpg", + "deps": { + "*": ["pytest-asyncio"], + }, + "python": ">=3.7", + }, "beam": { "package": "apache-beam", "python": ">=3.7", diff --git a/scripts/populate_tox/populate_tox.py b/scripts/populate_tox/populate_tox.py index 3d9ef23b66..076a8358f7 100644 --- a/scripts/populate_tox/populate_tox.py +++ b/scripts/populate_tox/populate_tox.py @@ -67,7 +67,6 @@ "potel", # Integrations that can be migrated -- we should eventually remove all # of these from the IGNORE list - "asyncpg", "boto3", "chalice", "gcp", diff --git a/scripts/populate_tox/tox.jinja b/scripts/populate_tox/tox.jinja index 241e0ca288..0ad9af8321 100755 --- a/scripts/populate_tox/tox.jinja +++ b/scripts/populate_tox/tox.jinja @@ -39,10 +39,6 @@ envlist = # Asgi {py3.7,py3.12,py3.13}-asgi - # asyncpg - {py3.7,py3.10}-asyncpg-v{0.23} - {py3.8,py3.11,py3.12}-asyncpg-latest - # AWS Lambda {py3.8,py3.9,py3.11,py3.13}-aws_lambda @@ -160,11 +156,6 @@ deps = asgi: pytest-asyncio asgi: async-asgi-testclient - # Asyncpg - asyncpg-v0.23: asyncpg~=0.23.0 - asyncpg-latest: asyncpg - asyncpg: pytest-asyncio - # AWS Lambda aws_lambda: aws-cdk-lib aws_lambda: aws-sam-cli diff --git a/tests/integrations/asyncpg/test_asyncpg.py b/tests/integrations/asyncpg/test_asyncpg.py index e36d15c5d2..e23612c055 100644 --- a/tests/integrations/asyncpg/test_asyncpg.py +++ b/tests/integrations/asyncpg/test_asyncpg.py @@ -3,21 +3,13 @@ Tests need a local postgresql instance running, this can best be done using ```sh -docker run --rm --name some-postgres -e POSTGRES_USER=foo -e POSTGRES_PASSWORD=bar -d -p 5432:5432 postgres +docker run --rm --name some-postgres -e POSTGRES_USER=postgres -e POSTGRES_PASSWORD=sentry -d -p 5432:5432 postgres ``` The tests use the following credentials to establish a database connection. """ import os - - -PG_HOST = os.getenv("SENTRY_PYTHON_TEST_POSTGRES_HOST", "localhost") -PG_PORT = int(os.getenv("SENTRY_PYTHON_TEST_POSTGRES_PORT", "5432")) -PG_USER = os.getenv("SENTRY_PYTHON_TEST_POSTGRES_USER", "postgres") -PG_PASSWORD = os.getenv("SENTRY_PYTHON_TEST_POSTGRES_PASSWORD", "sentry") -PG_NAME = os.getenv("SENTRY_PYTHON_TEST_POSTGRES_NAME", "postgres") - import datetime from contextlib import contextmanager from unittest import mock @@ -33,6 +25,19 @@ from sentry_sdk.tracing_utils import record_sql_queries from tests.conftest import ApproxDict +PG_HOST = os.getenv("SENTRY_PYTHON_TEST_POSTGRES_HOST", "localhost") +PG_PORT = int(os.getenv("SENTRY_PYTHON_TEST_POSTGRES_PORT", "5432")) +PG_USER = os.getenv("SENTRY_PYTHON_TEST_POSTGRES_USER", "postgres") +PG_PASSWORD = os.getenv("SENTRY_PYTHON_TEST_POSTGRES_PASSWORD", "sentry") +PG_NAME_BASE = os.getenv("SENTRY_PYTHON_TEST_POSTGRES_NAME", "postgres") + + +def _get_db_name(): + pid = os.getpid() + return f"{PG_NAME_BASE}_{pid}" + + +PG_NAME = _get_db_name() PG_CONNECTION_URI = "postgresql://{}:{}@{}/{}".format( PG_USER, PG_PASSWORD, PG_HOST, PG_NAME @@ -55,6 +60,21 @@ @pytest_asyncio.fixture(autouse=True) async def _clean_pg(): + # Create the test database if it doesn't exist + default_conn = await connect( + "postgresql://{}:{}@{}".format(PG_USER, PG_PASSWORD, PG_HOST) + ) + try: + # Check if database exists, create if not + result = await default_conn.fetchval( + "SELECT 1 FROM pg_database WHERE datname = $1", PG_NAME + ) + if not result: + await default_conn.execute(f'CREATE DATABASE "{PG_NAME}"') + finally: + await default_conn.close() + + # Now connect to our test database and set up the table conn = await connect(PG_CONNECTION_URI) await conn.execute("DROP TABLE IF EXISTS users") await conn.execute( diff --git a/tox.ini b/tox.ini index 40afc2a6a7..1627cf2458 100644 --- a/tox.ini +++ b/tox.ini @@ -10,7 +10,7 @@ # The file (and all resulting CI YAMLs) then need to be regenerated via # "scripts/generate-test-files.sh". # -# Last generated: 2025-09-04T12:59:44.328902+00:00 +# Last generated: 2025-09-04T13:56:54.117272+00:00 [tox] requires = @@ -39,10 +39,6 @@ envlist = # Asgi {py3.7,py3.12,py3.13}-asgi - # asyncpg - {py3.7,py3.10}-asyncpg-v{0.23} - {py3.8,py3.11,py3.12}-asyncpg-latest - # AWS Lambda {py3.8,py3.9,py3.11,py3.13}-aws_lambda @@ -135,12 +131,12 @@ envlist = {py3.8,py3.11,py3.12}-openai-base-v1.0.1 {py3.8,py3.11,py3.12}-openai-base-v1.36.1 {py3.8,py3.11,py3.12}-openai-base-v1.71.0 - {py3.8,py3.12,py3.13}-openai-base-v1.105.0 + {py3.8,py3.12,py3.13}-openai-base-v1.106.0 {py3.8,py3.11,py3.12}-openai-notiktoken-v1.0.1 {py3.8,py3.11,py3.12}-openai-notiktoken-v1.36.1 {py3.8,py3.11,py3.12}-openai-notiktoken-v1.71.0 - {py3.8,py3.12,py3.13}-openai-notiktoken-v1.105.0 + {py3.8,py3.12,py3.13}-openai-notiktoken-v1.106.0 {py3.9,py3.12,py3.13}-langgraph-v0.6.6 {py3.10,py3.12,py3.13}-langgraph-v1.0.0a2 @@ -157,6 +153,11 @@ envlist = # ~~~ DBs ~~~ + {py3.7,py3.8,py3.9}-asyncpg-v0.23.0 + {py3.7,py3.9,py3.10}-asyncpg-v0.25.0 + {py3.7,py3.9,py3.10}-asyncpg-v0.27.0 + {py3.8,py3.11,py3.12}-asyncpg-v0.30.0 + {py3.7,py3.11,py3.12}-clickhouse_driver-v0.2.9 {py3.6}-pymongo-v3.5.1 @@ -362,11 +363,6 @@ deps = asgi: pytest-asyncio asgi: async-asgi-testclient - # Asyncpg - asyncpg-v0.23: asyncpg~=0.23.0 - asyncpg-latest: asyncpg - asyncpg: pytest-asyncio - # AWS Lambda aws_lambda: aws-cdk-lib aws_lambda: aws-sam-cli @@ -514,7 +510,7 @@ deps = openai-base-v1.0.1: openai==1.0.1 openai-base-v1.36.1: openai==1.36.1 openai-base-v1.71.0: openai==1.71.0 - openai-base-v1.105.0: openai==1.105.0 + openai-base-v1.106.0: openai==1.106.0 openai-base: pytest-asyncio openai-base: tiktoken openai-base-v1.0.1: httpx<0.28 @@ -523,7 +519,7 @@ deps = openai-notiktoken-v1.0.1: openai==1.0.1 openai-notiktoken-v1.36.1: openai==1.36.1 openai-notiktoken-v1.71.0: openai==1.71.0 - openai-notiktoken-v1.105.0: openai==1.105.0 + openai-notiktoken-v1.106.0: openai==1.106.0 openai-notiktoken: pytest-asyncio openai-notiktoken-v1.0.1: httpx<0.28 openai-notiktoken-v1.36.1: httpx<0.28 @@ -544,6 +540,12 @@ deps = # ~~~ DBs ~~~ + asyncpg-v0.23.0: asyncpg==0.23.0 + asyncpg-v0.25.0: asyncpg==0.25.0 + asyncpg-v0.27.0: asyncpg==0.27.0 + asyncpg-v0.30.0: asyncpg==0.30.0 + asyncpg: pytest-asyncio + clickhouse_driver-v0.2.9: clickhouse-driver==0.2.9 pymongo-v3.5.1: pymongo==3.5.1