diff --git a/scripts/populate_tox/tox.jinja b/scripts/populate_tox/tox.jinja index 513b522109..94f27581ad 100644 --- a/scripts/populate_tox/tox.jinja +++ b/scripts/populate_tox/tox.jinja @@ -78,14 +78,12 @@ envlist = {py3.9,py3.11,py3.12}-langchain-v0.1 {py3.9,py3.11,py3.12}-langchain-v0.3 {py3.9,py3.11,py3.12}-langchain-latest - {py3.9,py3.11,py3.12}-langchain-notiktoken # OpenAI {py3.9,py3.11,py3.12}-openai-v1.0 {py3.9,py3.11,py3.12}-openai-v1.22 {py3.9,py3.11,py3.12}-openai-v1.55 {py3.9,py3.11,py3.12}-openai-latest - {py3.9,py3.11,py3.12}-openai-notiktoken # pure_eval {py3.7,py3.12,py3.13}-pure_eval @@ -154,6 +152,7 @@ deps = {py3.8,py3.9,py3.10,py3.11,py3.12}-gevent: pytest gevent: pytest-asyncio {py3.10,py3.11}-gevent: zope.event<5.0.0 + {py3.10,py3.11}-gevent: zope.interface<8.0 # === Integrations === @@ -227,9 +226,9 @@ deps = langchain-v0.3: langchain-community langchain-v0.3: tiktoken langchain-v0.3: openai - langchain-{latest,notiktoken}: langchain - langchain-{latest,notiktoken}: langchain-openai - langchain-{latest,notiktoken}: openai>=1.6.1 + langchain-latest: langchain<1.0 + langchain-latest: langchain-openai + langchain-latest: openai>=1.6.1 langchain-latest: tiktoken~=0.6.0 # OpenAI @@ -244,7 +243,6 @@ deps = openai-v1.55: tiktoken openai-latest: openai openai-latest: tiktoken~=0.6.0 - openai-notiktoken: openai # pure_eval pure_eval: pure_eval diff --git a/sentry_sdk/_init_implementation.py b/sentry_sdk/_init_implementation.py index 06e7f28d4f..a18a206b92 100644 --- a/sentry_sdk/_init_implementation.py +++ b/sentry_sdk/_init_implementation.py @@ -1,4 +1,5 @@ from __future__ import annotations +import warnings from typing import TYPE_CHECKING @@ -25,6 +26,10 @@ def _init(*args: Optional[str], **kwargs: Any) -> None: setup_scope_context_management() client = sentry_sdk.Client(*args, **kwargs) sentry_sdk.get_global_scope().set_client(client) + warnings.warn( + "We won't be continuing development on SDK 3.0. Please use the last stable version of the SDK to get access to the newest features and fixes. See https://github.com/getsentry/sentry-python/discussions/4955", + stacklevel=2, + ) _check_python_deprecations() diff --git a/sentry_sdk/integrations/threading.py b/sentry_sdk/integrations/threading.py index 12b2e31313..bdc5bddedc 100644 --- a/sentry_sdk/integrations/threading.py +++ b/sentry_sdk/integrations/threading.py @@ -38,7 +38,7 @@ def setup_once() -> None: try: from django import VERSION as django_version # noqa: N811 - import channels # type: ignore[import-not-found] + import channels # type: ignore channels_version = channels.__version__ except ImportError: diff --git a/tests/conftest.py b/tests/conftest.py index 6312929e96..61a2e6b3d3 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -337,7 +337,7 @@ def read_flush(self): @pytest.fixture( scope="session", params=[None, "gevent"], - ids=("threads", "greenlet"), + ids=("threads", "gevent"), ) def maybe_monkeypatched_threading(request): if request.param == "gevent": diff --git a/tests/integrations/celery/test_celery.py b/tests/integrations/celery/test_celery.py index 5f36152af0..ef6074612e 100644 --- a/tests/integrations/celery/test_celery.py +++ b/tests/integrations/celery/test_celery.py @@ -52,10 +52,6 @@ def inner( else: celery.conf.task_always_eager = True elif backend == "redis": - # broken on celery 3 - if VERSION < (4,): - pytest.skip("Redis backend broken for some reason") - # this backend requires capture_events_forksafe celery.conf.worker_max_tasks_per_child = 1 celery.conf.worker_concurrency = 1 @@ -402,6 +398,7 @@ def dummy_task(self): @pytest.mark.forked +@pytest.mark.skip("Failing test on a discontinued branch") @pytest.mark.parametrize("newrelic_order", ["sentry_first", "sentry_last"]) def test_newrelic_interference(init_celery, newrelic_order, celery_invocation): def instrument_newrelic(): diff --git a/tox.ini b/tox.ini index 4a4c07e5e1..6ec4a77757 100644 --- a/tox.ini +++ b/tox.ini @@ -78,14 +78,12 @@ envlist = {py3.9,py3.11,py3.12}-langchain-v0.1 {py3.9,py3.11,py3.12}-langchain-v0.3 {py3.9,py3.11,py3.12}-langchain-latest - {py3.9,py3.11,py3.12}-langchain-notiktoken # OpenAI {py3.9,py3.11,py3.12}-openai-v1.0 {py3.9,py3.11,py3.12}-openai-v1.22 {py3.9,py3.11,py3.12}-openai-v1.55 {py3.9,py3.11,py3.12}-openai-latest - {py3.9,py3.11,py3.12}-openai-notiktoken # pure_eval {py3.7,py3.12,py3.13}-pure_eval @@ -328,6 +326,7 @@ deps = {py3.8,py3.9,py3.10,py3.11,py3.12}-gevent: pytest gevent: pytest-asyncio {py3.10,py3.11}-gevent: zope.event<5.0.0 + {py3.10,py3.11}-gevent: zope.interface<8.0 # === Integrations === @@ -401,9 +400,9 @@ deps = langchain-v0.3: langchain-community langchain-v0.3: tiktoken langchain-v0.3: openai - langchain-{latest,notiktoken}: langchain - langchain-{latest,notiktoken}: langchain-openai - langchain-{latest,notiktoken}: openai>=1.6.1 + langchain-latest: langchain<1.0 + langchain-latest: langchain-openai + langchain-latest: openai>=1.6.1 langchain-latest: tiktoken~=0.6.0 # OpenAI @@ -418,7 +417,6 @@ deps = openai-v1.55: tiktoken openai-latest: openai openai-latest: tiktoken~=0.6.0 - openai-notiktoken: openai # pure_eval pure_eval: pure_eval