Skip to content

Commit 6ac821b

Browse files
authored
Merge branch 'master' into fix-litestar-middleware-failed-request-status-codes
2 parents 2b91e0d + 48ebd73 commit 6ac821b

File tree

10 files changed

+182
-32
lines changed

10 files changed

+182
-32
lines changed

scripts/populate_tox/README.md

Lines changed: 35 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -45,9 +45,15 @@ integration_name: {
4545
rule2: [package3, package4, ...],
4646
},
4747
"python": python_version_specifier,
48+
"include": package_version_specifier,
4849
}
4950
```
5051

52+
When talking about version specifiers, we mean
53+
[version specifiers as defined](https://packaging.python.org/en/latest/specifications/version-specifiers/#id5)
54+
by the Python Packaging Authority. See also the actual implementation
55+
in [packaging.specifiers](https://packaging.pypa.io/en/stable/specifiers.html).
56+
5157
### `package`
5258

5359
The name of the third party package as it's listed on PyPI. The script will
@@ -118,6 +124,35 @@ metadata or the SDK is explicitly not supporting some packages on specific
118124
Python versions (because of, for example, broken context vars), the `python`
119125
key can be used.
120126

127+
### `include`
128+
129+
Sometimes we only want to consider testing some specific versions of packages.
130+
For example, the Starlite package has two alpha prereleases of version 2.0.0, but
131+
we do not want to test these, since Starlite 2.0 was renamed to Litestar.
132+
133+
The value of the `include` key expects a version specifier defining which
134+
versions should be considered for testing. For example, since we only want to test
135+
versions below 2.x in Starlite, we can use
136+
137+
```python
138+
"starlite": {
139+
"include": "<2",
140+
...
141+
}
142+
```
143+
144+
The `include` key can also be used to exclude a set of specific versions by using
145+
`!=` version specifiers. For example, the Starlite restriction above could equivalently
146+
be expressed like so:
147+
148+
149+
```python
150+
"starlite": {
151+
"include": "!=2.0.0a1,!=2.0.0a2",
152+
...
153+
}
154+
```
155+
121156

122157
## How-Tos
123158

scripts/populate_tox/config.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -129,6 +129,7 @@
129129
],
130130
},
131131
"python": "<=3.11",
132+
"include": "!=2.0.0a1,!=2.0.0a2", # these are not relevant as there will never be a stable 2.0 release (starlite continues as litestar)
132133
},
133134
"statsig": {
134135
"package": "statsig",

scripts/populate_tox/populate_tox.py

Lines changed: 58 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -111,7 +111,7 @@ def fetch_release(package: str, version: Version) -> dict:
111111

112112
def _prefilter_releases(
113113
integration: str, releases: dict[str, dict], older_than: Optional[datetime] = None
114-
) -> list[Version]:
114+
) -> tuple[list[Version], Optional[Version]]:
115115
"""
116116
Filter `releases`, removing releases that are for sure unsupported.
117117
@@ -120,6 +120,10 @@ def _prefilter_releases(
120120
they require additional API calls to be made. The purpose of this function is
121121
to slim down the list so that we don't have to make more API calls than
122122
necessary for releases that are for sure not supported.
123+
124+
The function returns a tuple with:
125+
- the list of prefiltered releases
126+
- an optional prerelease if there is one that should be tested
123127
"""
124128
min_supported = _MIN_VERSIONS.get(integration)
125129
if min_supported is not None:
@@ -129,7 +133,14 @@ def _prefilter_releases(
129133
f" {integration} doesn't have a minimum version defined in sentry_sdk/integrations/__init__.py. Consider defining one"
130134
)
131135

136+
include_versions = None
137+
if TEST_SUITE_CONFIG[integration].get("include") is not None:
138+
include_versions = SpecifierSet(
139+
TEST_SUITE_CONFIG[integration]["include"], prereleases=True
140+
)
141+
132142
filtered_releases = []
143+
last_prerelease = None
133144

134145
for release, data in releases.items():
135146
if not data:
@@ -149,9 +160,15 @@ def _prefilter_releases(
149160
if min_supported and version < min_supported:
150161
continue
151162

152-
if version.is_prerelease or version.is_postrelease:
153-
# TODO: consider the newest prerelease unless obsolete
154-
# https://github.com/getsentry/sentry-python/issues/4030
163+
if version.is_postrelease or version.is_devrelease:
164+
continue
165+
166+
if include_versions is not None and version not in include_versions:
167+
continue
168+
169+
if version.is_prerelease:
170+
if last_prerelease is None or version > last_prerelease:
171+
last_prerelease = version
155172
continue
156173

157174
for i, saved_version in enumerate(filtered_releases):
@@ -166,18 +183,30 @@ def _prefilter_releases(
166183
else:
167184
filtered_releases.append(version)
168185

169-
return sorted(filtered_releases)
186+
filtered_releases.sort()
187+
188+
# Check if the latest prerelease is relevant (i.e., it's for a version higher
189+
# than the last released version); if not, don't consider it
190+
if last_prerelease is not None:
191+
if not filtered_releases or last_prerelease > filtered_releases[-1]:
192+
return filtered_releases, last_prerelease
193+
194+
return filtered_releases, None
170195

171196

172197
def get_supported_releases(
173198
integration: str, pypi_data: dict, older_than: Optional[datetime] = None
174-
) -> list[Version]:
199+
) -> tuple[list[Version], Optional[Version]]:
175200
"""
176201
Get a list of releases that are currently supported by the SDK.
177202
178203
This takes into account a handful of parameters (Python support, the lowest
179204
version we've defined for the framework, the date of the release).
180205
206+
We return the list of supported releases and optionally also the newest
207+
prerelease, if it should be tested (meaning it's for a version higher than
208+
the current stable version).
209+
181210
If an `older_than` timestamp is provided, no release newer than that will be
182211
considered.
183212
"""
@@ -186,7 +215,9 @@ def get_supported_releases(
186215
# Get a consolidated list without taking into account Python support yet
187216
# (because that might require an additional API call for some
188217
# of the releases)
189-
releases = _prefilter_releases(integration, pypi_data["releases"], older_than)
218+
releases, latest_prerelease = _prefilter_releases(
219+
integration, pypi_data["releases"], older_than
220+
)
190221

191222
# Determine Python support
192223
expected_python_versions = TEST_SUITE_CONFIG[integration].get("python")
@@ -210,14 +241,18 @@ def _supports_lowest(release: Version) -> bool:
210241
# version(s) that we do, cut off the rest
211242
releases = releases[i:]
212243

213-
return releases
244+
return releases, latest_prerelease
214245

215246

216-
def pick_releases_to_test(releases: list[Version]) -> list[Version]:
247+
def pick_releases_to_test(
248+
releases: list[Version], last_prerelease: Optional[Version]
249+
) -> list[Version]:
217250
"""Pick a handful of releases to test from a sorted list of supported releases."""
218251
# If the package has majors (or major-like releases, even if they don't do
219252
# semver), we want to make sure we're testing them all. If not, we just pick
220253
# the oldest, the newest, and a couple in between.
254+
#
255+
# If there is a relevant prerelease, also test that in addition to the above.
221256
has_majors = len(set([v.major for v in releases])) > 1
222257
filtered_releases = set()
223258

@@ -252,7 +287,11 @@ def pick_releases_to_test(releases: list[Version]) -> list[Version]:
252287
releases[-1], # latest
253288
}
254289

255-
return sorted(filtered_releases)
290+
filtered_releases = sorted(filtered_releases)
291+
if last_prerelease is not None:
292+
filtered_releases.append(last_prerelease)
293+
294+
return filtered_releases
256295

257296

258297
def supported_python_versions(
@@ -553,19 +592,24 @@ def main(fail_on_changes: bool = False) -> None:
553592
pypi_data = fetch_package(package)
554593

555594
# Get the list of all supported releases
556-
# If in check mode, ignore releases newer than `last_updated`
595+
596+
# If in fail-on-changes mode, ignore releases newer than `last_updated`
557597
older_than = last_updated if fail_on_changes else None
558-
releases = get_supported_releases(integration, pypi_data, older_than)
598+
599+
releases, latest_prerelease = get_supported_releases(
600+
integration, pypi_data, older_than
601+
)
602+
559603
if not releases:
560604
print(" Found no supported releases.")
561605
continue
562606

563607
_compare_min_version_with_defined(integration, releases)
564608

565609
# Pick a handful of the supported releases to actually test against
566-
# and fetch the PYPI data for each to determine which Python versions
610+
# and fetch the PyPI data for each to determine which Python versions
567611
# to test it on
568-
test_releases = pick_releases_to_test(releases)
612+
test_releases = pick_releases_to_test(releases, latest_prerelease)
569613

570614
for release in test_releases:
571615
_add_python_versions_to_release(integration, package, release)

sentry_sdk/integrations/anthropic.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -101,6 +101,8 @@ def _collect_ai_data(event, input_tokens, output_tokens, content_blocks):
101101
elif event.type == "content_block_delta":
102102
if hasattr(event.delta, "text"):
103103
content_blocks.append(event.delta.text)
104+
elif hasattr(event.delta, "partial_json"):
105+
content_blocks.append(event.delta.partial_json)
104106
elif event.type == "content_block_stop":
105107
pass
106108
elif event.type == "message_delta":

sentry_sdk/integrations/aws_lambda.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,10 @@ def sentry_init_error(*args, **kwargs):
6161

6262
else:
6363
# Fall back to AWS lambdas JSON representation of the error
64-
sentry_event = _event_from_error_json(json.loads(args[1]))
64+
error_info = args[1]
65+
if isinstance(error_info, str):
66+
error_info = json.loads(error_info)
67+
sentry_event = _event_from_error_json(error_info)
6568
sentry_sdk.capture_event(sentry_event)
6669

6770
return init_error(*args, **kwargs)

sentry_sdk/profiler/__init__.py

Lines changed: 10 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,9 @@
1-
from sentry_sdk.profiler.continuous_profiler import start_profiler, stop_profiler
1+
from sentry_sdk.profiler.continuous_profiler import (
2+
start_profile_session,
3+
start_profiler,
4+
stop_profile_session,
5+
stop_profiler,
6+
)
27
from sentry_sdk.profiler.transaction_profiler import (
38
MAX_PROFILE_DURATION_NS,
49
PROFILE_MINIMUM_SAMPLES,
@@ -20,8 +25,10 @@
2025
)
2126

2227
__all__ = [
23-
"start_profiler",
24-
"stop_profiler",
28+
"start_profile_session",
29+
"start_profiler", # TODO: Deprecate this in favor of `start_profile_session`
30+
"stop_profile_session",
31+
"stop_profiler", # TODO: Deprecate this in favor of `stop_profile_session`
2532
# DEPRECATED: The following was re-exported for backwards compatibility. It
2633
# will be removed from sentry_sdk.profiler in a future release.
2734
"MAX_PROFILE_DURATION_NS",

sentry_sdk/scope.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1568,7 +1568,7 @@ def update_from_kwargs(
15681568
user=None, # type: Optional[Any]
15691569
level=None, # type: Optional[LogLevelStr]
15701570
extras=None, # type: Optional[Dict[str, Any]]
1571-
contexts=None, # type: Optional[Dict[str, Any]]
1571+
contexts=None, # type: Optional[Dict[str, Dict[str, Any]]]
15721572
tags=None, # type: Optional[Dict[str, str]]
15731573
fingerprint=None, # type: Optional[List[str]]
15741574
):

tests/integrations/anthropic/test_anthropic.py

Lines changed: 64 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
from unittest import mock
22

3+
34
try:
45
from unittest.mock import AsyncMock
56
except ImportError:
@@ -10,7 +11,7 @@ async def __call__(self, *args, **kwargs):
1011

1112

1213
import pytest
13-
from anthropic import AsyncAnthropic, Anthropic, AnthropicError, AsyncStream, Stream
14+
from anthropic import Anthropic, AnthropicError, AsyncAnthropic, AsyncStream, Stream
1415
from anthropic.types import MessageDeltaUsage, TextDelta, Usage
1516
from anthropic.types.content_block_delta_event import ContentBlockDeltaEvent
1617
from anthropic.types.content_block_start_event import ContentBlockStartEvent
@@ -19,6 +20,7 @@ async def __call__(self, *args, **kwargs):
1920
from anthropic.types.message_delta_event import MessageDeltaEvent
2021
from anthropic.types.message_start_event import MessageStartEvent
2122

23+
from sentry_sdk.integrations.anthropic import _add_ai_data_to_span, _collect_ai_data
2224
from sentry_sdk.utils import package_version
2325

2426
try:
@@ -42,7 +44,7 @@ async def __call__(self, *args, **kwargs):
4244
except ImportError:
4345
from anthropic.types.content_block import ContentBlock as TextBlock
4446

45-
from sentry_sdk import start_transaction
47+
from sentry_sdk import start_transaction, start_span
4648
from sentry_sdk.consts import OP, SPANDATA
4749
from sentry_sdk.integrations.anthropic import AnthropicIntegration
4850

@@ -517,9 +519,8 @@ def test_streaming_create_message_with_input_json_delta(
517519
if send_default_pii and include_prompts:
518520
assert span["data"][SPANDATA.AI_INPUT_MESSAGES] == messages
519521
assert span["data"][SPANDATA.AI_RESPONSES] == [
520-
{"text": "", "type": "text"}
521-
] # we do not record InputJSONDelta because it could contain PII
522-
522+
{"text": "{'location': 'San Francisco, CA'}", "type": "text"}
523+
]
523524
else:
524525
assert SPANDATA.AI_INPUT_MESSAGES not in span["data"]
525526
assert SPANDATA.AI_RESPONSES not in span["data"]
@@ -654,8 +655,8 @@ async def test_streaming_create_message_with_input_json_delta_async(
654655
if send_default_pii and include_prompts:
655656
assert span["data"][SPANDATA.AI_INPUT_MESSAGES] == messages
656657
assert span["data"][SPANDATA.AI_RESPONSES] == [
657-
{"text": "", "type": "text"}
658-
] # we do not record InputJSONDelta because it could contain PII
658+
{"text": "{'location': 'San Francisco, CA'}", "type": "text"}
659+
]
659660

660661
else:
661662
assert SPANDATA.AI_INPUT_MESSAGES not in span["data"]
@@ -757,3 +758,59 @@ async def test_span_origin_async(sentry_init, capture_events):
757758

758759
assert event["contexts"]["trace"]["origin"] == "manual"
759760
assert event["spans"][0]["origin"] == "auto.ai.anthropic"
761+
762+
763+
@pytest.mark.skipif(
764+
ANTHROPIC_VERSION < (0, 27),
765+
reason="Versions <0.27.0 do not include InputJSONDelta.",
766+
)
767+
def test_collect_ai_data_with_input_json_delta():
768+
event = ContentBlockDeltaEvent(
769+
delta=InputJSONDelta(partial_json="test", type="input_json_delta"),
770+
index=0,
771+
type="content_block_delta",
772+
)
773+
774+
input_tokens = 10
775+
output_tokens = 20
776+
content_blocks = []
777+
778+
new_input_tokens, new_output_tokens, new_content_blocks = _collect_ai_data(
779+
event, input_tokens, output_tokens, content_blocks
780+
)
781+
782+
assert new_input_tokens == input_tokens
783+
assert new_output_tokens == output_tokens
784+
assert new_content_blocks == ["test"]
785+
786+
787+
@pytest.mark.skipif(
788+
ANTHROPIC_VERSION < (0, 27),
789+
reason="Versions <0.27.0 do not include InputJSONDelta.",
790+
)
791+
def test_add_ai_data_to_span_with_input_json_delta(sentry_init):
792+
sentry_init(
793+
integrations=[AnthropicIntegration(include_prompts=True)],
794+
traces_sample_rate=1.0,
795+
send_default_pii=True,
796+
)
797+
798+
with start_transaction(name="test"):
799+
span = start_span()
800+
integration = AnthropicIntegration()
801+
802+
_add_ai_data_to_span(
803+
span,
804+
integration,
805+
input_tokens=10,
806+
output_tokens=20,
807+
content_blocks=["{'test': 'data',", "'more': 'json'}"],
808+
)
809+
810+
assert span._data.get(SPANDATA.AI_RESPONSES) == [
811+
{"type": "text", "text": "{'test': 'data','more': 'json'}"}
812+
]
813+
assert span._data.get("ai.streaming") is True
814+
assert span._measurements.get("ai_prompt_tokens_used")["value"] == 10
815+
assert span._measurements.get("ai_completion_tokens_used")["value"] == 20
816+
assert span._measurements.get("ai_total_tokens_used")["value"] == 30

0 commit comments

Comments
 (0)