Skip to content

Commit 2c8dafb

Browse files
authored
Merge branch 'master' into shellmayr/feat/add-breadcrumb-annotationvalue
2 parents a05707f + 0e7e2e6 commit 2c8dafb

File tree

17 files changed

+507
-203
lines changed

17 files changed

+507
-203
lines changed

.github/CODEOWNERS

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
* @getsentry/owners-python-sdk
1+
* @getsentry/team-web-sdk-backend

.github/workflows/release.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ jobs:
2020
steps:
2121
- name: Get auth token
2222
id: token
23-
uses: actions/create-github-app-token@af35edadc00be37caa72ed9f3e6d5f7801bfdf09 # v1.11.7
23+
uses: actions/create-github-app-token@d72941d797fd3113feb6b93fd0dec494b13a2547 # v1.12.0
2424
with:
2525
app-id: ${{ vars.SENTRY_RELEASE_BOT_CLIENT_ID }}
2626
private-key: ${{ secrets.SENTRY_RELEASE_BOT_PRIVATE_KEY }}

CHANGELOG.md

Lines changed: 59 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,64 @@
11
# Changelog
22

3+
## 2.25.1
4+
5+
### Various fixes & improvements
6+
7+
- fix(logs): Add a class which batches groups of logs together. (#4229) by @colin-sentry
8+
- fix(logs): Use repr instead of json for message and arguments (#4227) by @colin-sentry
9+
- fix(logs): Debug output from Sentry logs should always be `debug` level. (#4224) by @antonpirker
10+
- fix(ai): Do not consume anthropic streaming stop (#4232) by @colin-sentry
11+
- fix(spotlight): Do not spam sentry_sdk.warnings logger w/ Spotlight (#4219) by @BYK
12+
- fix(docs): fixed code snippet (#4218) by @antonpirker
13+
- build(deps): bump actions/create-github-app-token from 1.11.7 to 1.12.0 (#4214) by @dependabot
14+
15+
## 2.25.0
16+
17+
### Various fixes & improvements
18+
19+
- **New Beta Feature** Enable Sentry logs in `logging` Integration (#4143) by @colin-sentry
20+
21+
You can now send existing log messages to the new Sentry Logs feature.
22+
23+
For more information see: https://github.com/getsentry/sentry/discussions/86804
24+
25+
This is how you can use it (Sentry Logs is in beta right now so the API can still change):
26+
27+
```python
28+
import logging
29+
30+
import sentry_sdk
31+
from sentry_sdk.integrations.logging import LoggingIntegration
32+
33+
# Setup Sentry SDK to send log messages with a level of "error" or higher to Sentry.
34+
sentry_sdk.init(
35+
dsn="...",
36+
_experiments={
37+
"enable_sentry_logs": True
38+
}
39+
integrations=[
40+
LoggingIntegration(sentry_logs_level=logging.ERROR),
41+
]
42+
)
43+
44+
# Your existing logging setup
45+
some_logger = logging.Logger("some-logger")
46+
47+
some_logger.info('In this example info events will not be sent to Sentry logs. my_value=%s', my_value)
48+
some_logger.error('But error events will be sent to Sentry logs. my_value=%s', my_value)
49+
```
50+
51+
- Spotlight: Sample everything 100% w/ Spotlight & no DSN set (#4207) by @BYK
52+
- Dramatiq: use set_transaction_name (#4175) by @timdrijvers
53+
- toxgen: Make it clearer which suites can be migrated (#4196) by @sentrivana
54+
- Move Litestar under toxgen (#4197) by @sentrivana
55+
- Added flake8 plugings to pre-commit call of flake8 (#4190) by @antonpirker
56+
- Deprecate Scope.user (#4194) by @sentrivana
57+
- Fix hanging when capturing long stacktrace (#4191) by @szokeasaurusrex
58+
- Fix GraphQL failures (#4208) by @sentrivana
59+
- Fix flaky test (#4198) by @sentrivana
60+
- Update Ubuntu in Github test runners (#4204) by @antonpirker
61+
362
## 2.24.1
463

564
### Various fixes & improvements

docs/conf.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@
3131
copyright = "2019-{}, Sentry Team and Contributors".format(datetime.now().year)
3232
author = "Sentry Team and Contributors"
3333

34-
release = "2.24.1"
34+
release = "2.25.1"
3535
version = ".".join(release.split(".")[:2]) # The short X.Y version.
3636

3737

scripts/populate_tox/tox.jinja

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -400,9 +400,9 @@ deps =
400400
rq-v{0.6}: fakeredis<1.0
401401
rq-v{0.6}: redis<3.2.2
402402
rq-v{0.13,1.0,1.5,1.10}: fakeredis>=1.0,<1.7.4
403-
rq-v{1.15,1.16}: fakeredis
403+
rq-v{1.15,1.16}: fakeredis<2.28.0
404404
{py3.6,py3.7}-rq-v{1.15,1.16}: fakeredis!=2.26.0 # https://github.com/cunla/fakeredis-py/issues/341
405-
rq-latest: fakeredis
405+
rq-latest: fakeredis<2.28.0
406406
{py3.6,py3.7}-rq-latest: fakeredis!=2.26.0 # https://github.com/cunla/fakeredis-py/issues/341
407407
rq-v0.6: rq~=0.6.0
408408
rq-v0.13: rq~=0.13.0

sentry_sdk/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@
4545
"start_transaction",
4646
"trace",
4747
"monitor",
48-
"_experimental_logger",
48+
"logger",
4949
]
5050

5151
# Initialize the debug support after everything is loaded

sentry_sdk/_log_batcher.py

Lines changed: 142 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,142 @@
1+
import os
2+
import random
3+
import threading
4+
from datetime import datetime, timezone
5+
from typing import Optional, List, Callable, TYPE_CHECKING, Any
6+
7+
from sentry_sdk.utils import format_timestamp, safe_repr
8+
from sentry_sdk.envelope import Envelope
9+
10+
if TYPE_CHECKING:
11+
from sentry_sdk._types import Log
12+
13+
14+
class LogBatcher:
15+
MAX_LOGS_BEFORE_FLUSH = 100
16+
FLUSH_WAIT_TIME = 5.0
17+
18+
def __init__(
19+
self,
20+
capture_func, # type: Callable[[Envelope], None]
21+
):
22+
# type: (...) -> None
23+
self._log_buffer = [] # type: List[Log]
24+
self._capture_func = capture_func
25+
self._running = True
26+
self._lock = threading.Lock()
27+
28+
self._flush_event = threading.Event() # type: threading.Event
29+
30+
self._flusher = None # type: Optional[threading.Thread]
31+
self._flusher_pid = None # type: Optional[int]
32+
33+
def _ensure_thread(self):
34+
# type: (...) -> bool
35+
"""For forking processes we might need to restart this thread.
36+
This ensures that our process actually has that thread running.
37+
"""
38+
if not self._running:
39+
return False
40+
41+
pid = os.getpid()
42+
if self._flusher_pid == pid:
43+
return True
44+
45+
with self._lock:
46+
# Recheck to make sure another thread didn't get here and start the
47+
# the flusher in the meantime
48+
if self._flusher_pid == pid:
49+
return True
50+
51+
self._flusher_pid = pid
52+
53+
self._flusher = threading.Thread(target=self._flush_loop)
54+
self._flusher.daemon = True
55+
56+
try:
57+
self._flusher.start()
58+
except RuntimeError:
59+
# Unfortunately at this point the interpreter is in a state that no
60+
# longer allows us to spawn a thread and we have to bail.
61+
self._running = False
62+
return False
63+
64+
return True
65+
66+
def _flush_loop(self):
67+
# type: (...) -> None
68+
while self._running:
69+
self._flush_event.wait(self.FLUSH_WAIT_TIME + random.random())
70+
self._flush_event.clear()
71+
self._flush()
72+
73+
def add(
74+
self,
75+
log, # type: Log
76+
):
77+
# type: (...) -> None
78+
if not self._ensure_thread() or self._flusher is None:
79+
return None
80+
81+
with self._lock:
82+
self._log_buffer.append(log)
83+
if len(self._log_buffer) >= self.MAX_LOGS_BEFORE_FLUSH:
84+
self._flush_event.set()
85+
86+
def kill(self):
87+
# type: (...) -> None
88+
if self._flusher is None:
89+
return
90+
91+
self._running = False
92+
self._flush_event.set()
93+
self._flusher = None
94+
95+
def flush(self):
96+
# type: (...) -> None
97+
self._flush()
98+
99+
@staticmethod
100+
def _log_to_otel(log):
101+
# type: (Log) -> Any
102+
def format_attribute(key, val):
103+
# type: (str, int | float | str | bool) -> Any
104+
if isinstance(val, bool):
105+
return {"key": key, "value": {"boolValue": val}}
106+
if isinstance(val, int):
107+
return {"key": key, "value": {"intValue": str(val)}}
108+
if isinstance(val, float):
109+
return {"key": key, "value": {"doubleValue": val}}
110+
if isinstance(val, str):
111+
return {"key": key, "value": {"stringValue": val}}
112+
return {"key": key, "value": {"stringValue": safe_repr(val)}}
113+
114+
otel_log = {
115+
"severityText": log["severity_text"],
116+
"severityNumber": log["severity_number"],
117+
"body": {"stringValue": log["body"]},
118+
"timeUnixNano": str(log["time_unix_nano"]),
119+
"attributes": [
120+
format_attribute(k, v) for (k, v) in log["attributes"].items()
121+
],
122+
}
123+
124+
if "trace_id" in log:
125+
otel_log["traceId"] = log["trace_id"]
126+
127+
return otel_log
128+
129+
def _flush(self):
130+
# type: (...) -> Optional[Envelope]
131+
132+
envelope = Envelope(
133+
headers={"sent_at": format_timestamp(datetime.now(timezone.utc))}
134+
)
135+
with self._lock:
136+
for log in self._log_buffer:
137+
envelope.add_log(self._log_to_otel(log))
138+
self._log_buffer.clear()
139+
if envelope.items:
140+
self._capture_func(envelope)
141+
return envelope
142+
return None

sentry_sdk/client.py

Lines changed: 21 additions & 55 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,7 @@
1-
import json
21
import os
32
import uuid
43
import random
54
import socket
6-
import logging
75
from collections.abc import Mapping
86
from datetime import datetime, timezone
97
from importlib import import_module
@@ -65,6 +63,7 @@
6563
from sentry_sdk.session import Session
6664
from sentry_sdk.spotlight import SpotlightClient
6765
from sentry_sdk.transport import Transport
66+
from sentry_sdk._log_batcher import LogBatcher
6867

6968
I = TypeVar("I", bound=Integration) # noqa: E741
7069

@@ -178,6 +177,7 @@ def __init__(self, options=None):
178177
self.transport = None # type: Optional[Transport]
179178
self.monitor = None # type: Optional[Monitor]
180179
self.metrics_aggregator = None # type: Optional[MetricsAggregator]
180+
self.log_batcher = None # type: Optional[LogBatcher]
181181

182182
def __getstate__(self, *args, **kwargs):
183183
# type: (*Any, **Any) -> Any
@@ -375,6 +375,12 @@ def _capture_envelope(envelope):
375375
"Metrics not supported on Python 3.6 and lower with gevent."
376376
)
377377

378+
self.log_batcher = None
379+
if experiments.get("enable_logs", False):
380+
from sentry_sdk._log_batcher import LogBatcher
381+
382+
self.log_batcher = LogBatcher(capture_func=_capture_envelope)
383+
378384
max_request_body_size = ("always", "never", "small", "medium")
379385
if self.options["max_request_body_size"] not in max_request_body_size:
380386
raise ValueError(
@@ -451,6 +457,7 @@ def _capture_envelope(envelope):
451457
if (
452458
self.monitor
453459
or self.metrics_aggregator
460+
or self.log_batcher
454461
or has_profiling_enabled(self.options)
455462
or isinstance(self.transport, BaseHttpTransport)
456463
):
@@ -877,15 +884,11 @@ def capture_event(
877884

878885
def _capture_experimental_log(self, current_scope, log):
879886
# type: (Scope, Log) -> None
880-
logs_enabled = self.options["_experiments"].get("enable_sentry_logs", False)
887+
logs_enabled = self.options["_experiments"].get("enable_logs", False)
881888
if not logs_enabled:
882889
return
883890
isolation_scope = current_scope.get_isolation_scope()
884891

885-
headers = {
886-
"sent_at": format_timestamp(datetime.now(timezone.utc)),
887-
} # type: dict[str, object]
888-
889892
environment = self.options.get("environment")
890893
if environment is not None and "sentry.environment" not in log["attributes"]:
891894
log["attributes"]["sentry.environment"] = environment
@@ -909,59 +912,18 @@ def _capture_experimental_log(self, current_scope, log):
909912
# If debug is enabled, log the log to the console
910913
debug = self.options.get("debug", False)
911914
if debug:
912-
severity_text_to_logging_level = {
913-
"trace": logging.DEBUG,
914-
"debug": logging.DEBUG,
915-
"info": logging.INFO,
916-
"warn": logging.WARNING,
917-
"error": logging.ERROR,
918-
"fatal": logging.CRITICAL,
919-
}
920-
logger.log(
921-
severity_text_to_logging_level.get(log["severity_text"], logging.DEBUG),
922-
f'[Sentry Logs] {log["body"]}',
915+
logger.debug(
916+
f'[Sentry Logs] [{log.get("severity_text")}] {log.get("body")}'
923917
)
924918

925-
envelope = Envelope(headers=headers)
926-
927-
before_emit_log = self.options["_experiments"].get("before_emit_log")
928-
if before_emit_log is not None:
929-
log = before_emit_log(log, {})
919+
before_send_log = self.options["_experiments"].get("before_send_log")
920+
if before_send_log is not None:
921+
log = before_send_log(log, {})
930922
if log is None:
931923
return
932924

933-
def format_attribute(key, val):
934-
# type: (str, int | float | str | bool) -> Any
935-
if isinstance(val, bool):
936-
return {"key": key, "value": {"boolValue": val}}
937-
if isinstance(val, int):
938-
return {"key": key, "value": {"intValue": str(val)}}
939-
if isinstance(val, float):
940-
return {"key": key, "value": {"doubleValue": val}}
941-
if isinstance(val, str):
942-
return {"key": key, "value": {"stringValue": val}}
943-
return {"key": key, "value": {"stringValue": json.dumps(val)}}
944-
945-
otel_log = {
946-
"severityText": log["severity_text"],
947-
"severityNumber": log["severity_number"],
948-
"body": {"stringValue": log["body"]},
949-
"timeUnixNano": str(log["time_unix_nano"]),
950-
"attributes": [
951-
format_attribute(k, v) for (k, v) in log["attributes"].items()
952-
],
953-
}
954-
955-
if "trace_id" in log:
956-
otel_log["traceId"] = log["trace_id"]
957-
958-
envelope.add_log(otel_log) # TODO: batch these
959-
960-
if self.spotlight:
961-
self.spotlight.capture_envelope(envelope)
962-
963-
if self.transport is not None:
964-
self.transport.capture_envelope(envelope)
925+
if self.log_batcher:
926+
self.log_batcher.add(log)
965927

966928
def capture_session(
967929
self, session # type: Session
@@ -1015,6 +977,8 @@ def close(
1015977
self.session_flusher.kill()
1016978
if self.metrics_aggregator is not None:
1017979
self.metrics_aggregator.kill()
980+
if self.log_batcher is not None:
981+
self.log_batcher.kill()
1018982
if self.monitor:
1019983
self.monitor.kill()
1020984
self.transport.kill()
@@ -1039,6 +1003,8 @@ def flush(
10391003
self.session_flusher.flush()
10401004
if self.metrics_aggregator is not None:
10411005
self.metrics_aggregator.flush()
1006+
if self.log_batcher is not None:
1007+
self.log_batcher.flush()
10421008
self.transport.flush(timeout=timeout, callback=callback)
10431009

10441010
def __enter__(self):

0 commit comments

Comments
 (0)