Skip to content

Commit 72c7d3d

Browse files
authored
Merge branch 'master' into antonpirker/fix/modified-decimal-context
2 parents d19053e + 0e7e2e6 commit 72c7d3d

File tree

12 files changed

+439
-190
lines changed

12 files changed

+439
-190
lines changed

CHANGELOG.md

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,17 @@
11
# Changelog
22

3+
## 2.25.1
4+
5+
### Various fixes & improvements
6+
7+
- fix(logs): Add a class which batches groups of logs together. (#4229) by @colin-sentry
8+
- fix(logs): Use repr instead of json for message and arguments (#4227) by @colin-sentry
9+
- fix(logs): Debug output from Sentry logs should always be `debug` level. (#4224) by @antonpirker
10+
- fix(ai): Do not consume anthropic streaming stop (#4232) by @colin-sentry
11+
- fix(spotlight): Do not spam sentry_sdk.warnings logger w/ Spotlight (#4219) by @BYK
12+
- fix(docs): fixed code snippet (#4218) by @antonpirker
13+
- build(deps): bump actions/create-github-app-token from 1.11.7 to 1.12.0 (#4214) by @dependabot
14+
315
## 2.25.0
416

517
### Various fixes & improvements

docs/conf.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@
3131
copyright = "2019-{}, Sentry Team and Contributors".format(datetime.now().year)
3232
author = "Sentry Team and Contributors"
3333

34-
release = "2.25.0"
34+
release = "2.25.1"
3535
version = ".".join(release.split(".")[:2]) # The short X.Y version.
3636

3737

sentry_sdk/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@
4545
"start_transaction",
4646
"trace",
4747
"monitor",
48-
"_experimental_logger",
48+
"logger",
4949
]
5050

5151
# Initialize the debug support after everything is loaded

sentry_sdk/_log_batcher.py

Lines changed: 142 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,142 @@
1+
import os
2+
import random
3+
import threading
4+
from datetime import datetime, timezone
5+
from typing import Optional, List, Callable, TYPE_CHECKING, Any
6+
7+
from sentry_sdk.utils import format_timestamp, safe_repr
8+
from sentry_sdk.envelope import Envelope
9+
10+
if TYPE_CHECKING:
11+
from sentry_sdk._types import Log
12+
13+
14+
class LogBatcher:
15+
MAX_LOGS_BEFORE_FLUSH = 100
16+
FLUSH_WAIT_TIME = 5.0
17+
18+
def __init__(
19+
self,
20+
capture_func, # type: Callable[[Envelope], None]
21+
):
22+
# type: (...) -> None
23+
self._log_buffer = [] # type: List[Log]
24+
self._capture_func = capture_func
25+
self._running = True
26+
self._lock = threading.Lock()
27+
28+
self._flush_event = threading.Event() # type: threading.Event
29+
30+
self._flusher = None # type: Optional[threading.Thread]
31+
self._flusher_pid = None # type: Optional[int]
32+
33+
def _ensure_thread(self):
34+
# type: (...) -> bool
35+
"""For forking processes we might need to restart this thread.
36+
This ensures that our process actually has that thread running.
37+
"""
38+
if not self._running:
39+
return False
40+
41+
pid = os.getpid()
42+
if self._flusher_pid == pid:
43+
return True
44+
45+
with self._lock:
46+
# Recheck to make sure another thread didn't get here and start the
47+
# the flusher in the meantime
48+
if self._flusher_pid == pid:
49+
return True
50+
51+
self._flusher_pid = pid
52+
53+
self._flusher = threading.Thread(target=self._flush_loop)
54+
self._flusher.daemon = True
55+
56+
try:
57+
self._flusher.start()
58+
except RuntimeError:
59+
# Unfortunately at this point the interpreter is in a state that no
60+
# longer allows us to spawn a thread and we have to bail.
61+
self._running = False
62+
return False
63+
64+
return True
65+
66+
def _flush_loop(self):
67+
# type: (...) -> None
68+
while self._running:
69+
self._flush_event.wait(self.FLUSH_WAIT_TIME + random.random())
70+
self._flush_event.clear()
71+
self._flush()
72+
73+
def add(
74+
self,
75+
log, # type: Log
76+
):
77+
# type: (...) -> None
78+
if not self._ensure_thread() or self._flusher is None:
79+
return None
80+
81+
with self._lock:
82+
self._log_buffer.append(log)
83+
if len(self._log_buffer) >= self.MAX_LOGS_BEFORE_FLUSH:
84+
self._flush_event.set()
85+
86+
def kill(self):
87+
# type: (...) -> None
88+
if self._flusher is None:
89+
return
90+
91+
self._running = False
92+
self._flush_event.set()
93+
self._flusher = None
94+
95+
def flush(self):
96+
# type: (...) -> None
97+
self._flush()
98+
99+
@staticmethod
100+
def _log_to_otel(log):
101+
# type: (Log) -> Any
102+
def format_attribute(key, val):
103+
# type: (str, int | float | str | bool) -> Any
104+
if isinstance(val, bool):
105+
return {"key": key, "value": {"boolValue": val}}
106+
if isinstance(val, int):
107+
return {"key": key, "value": {"intValue": str(val)}}
108+
if isinstance(val, float):
109+
return {"key": key, "value": {"doubleValue": val}}
110+
if isinstance(val, str):
111+
return {"key": key, "value": {"stringValue": val}}
112+
return {"key": key, "value": {"stringValue": safe_repr(val)}}
113+
114+
otel_log = {
115+
"severityText": log["severity_text"],
116+
"severityNumber": log["severity_number"],
117+
"body": {"stringValue": log["body"]},
118+
"timeUnixNano": str(log["time_unix_nano"]),
119+
"attributes": [
120+
format_attribute(k, v) for (k, v) in log["attributes"].items()
121+
],
122+
}
123+
124+
if "trace_id" in log:
125+
otel_log["traceId"] = log["trace_id"]
126+
127+
return otel_log
128+
129+
def _flush(self):
130+
# type: (...) -> Optional[Envelope]
131+
132+
envelope = Envelope(
133+
headers={"sent_at": format_timestamp(datetime.now(timezone.utc))}
134+
)
135+
with self._lock:
136+
for log in self._log_buffer:
137+
envelope.add_log(self._log_to_otel(log))
138+
self._log_buffer.clear()
139+
if envelope.items:
140+
self._capture_func(envelope)
141+
return envelope
142+
return None

sentry_sdk/client.py

Lines changed: 21 additions & 55 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,7 @@
1-
import json
21
import os
32
import uuid
43
import random
54
import socket
6-
import logging
75
from collections.abc import Mapping
86
from datetime import datetime, timezone
97
from importlib import import_module
@@ -65,6 +63,7 @@
6563
from sentry_sdk.session import Session
6664
from sentry_sdk.spotlight import SpotlightClient
6765
from sentry_sdk.transport import Transport
66+
from sentry_sdk._log_batcher import LogBatcher
6867

6968
I = TypeVar("I", bound=Integration) # noqa: E741
7069

@@ -178,6 +177,7 @@ def __init__(self, options=None):
178177
self.transport = None # type: Optional[Transport]
179178
self.monitor = None # type: Optional[Monitor]
180179
self.metrics_aggregator = None # type: Optional[MetricsAggregator]
180+
self.log_batcher = None # type: Optional[LogBatcher]
181181

182182
def __getstate__(self, *args, **kwargs):
183183
# type: (*Any, **Any) -> Any
@@ -375,6 +375,12 @@ def _capture_envelope(envelope):
375375
"Metrics not supported on Python 3.6 and lower with gevent."
376376
)
377377

378+
self.log_batcher = None
379+
if experiments.get("enable_logs", False):
380+
from sentry_sdk._log_batcher import LogBatcher
381+
382+
self.log_batcher = LogBatcher(capture_func=_capture_envelope)
383+
378384
max_request_body_size = ("always", "never", "small", "medium")
379385
if self.options["max_request_body_size"] not in max_request_body_size:
380386
raise ValueError(
@@ -451,6 +457,7 @@ def _capture_envelope(envelope):
451457
if (
452458
self.monitor
453459
or self.metrics_aggregator
460+
or self.log_batcher
454461
or has_profiling_enabled(self.options)
455462
or isinstance(self.transport, BaseHttpTransport)
456463
):
@@ -868,15 +875,11 @@ def capture_event(
868875

869876
def _capture_experimental_log(self, current_scope, log):
870877
# type: (Scope, Log) -> None
871-
logs_enabled = self.options["_experiments"].get("enable_sentry_logs", False)
878+
logs_enabled = self.options["_experiments"].get("enable_logs", False)
872879
if not logs_enabled:
873880
return
874881
isolation_scope = current_scope.get_isolation_scope()
875882

876-
headers = {
877-
"sent_at": format_timestamp(datetime.now(timezone.utc)),
878-
} # type: dict[str, object]
879-
880883
environment = self.options.get("environment")
881884
if environment is not None and "sentry.environment" not in log["attributes"]:
882885
log["attributes"]["sentry.environment"] = environment
@@ -900,59 +903,18 @@ def _capture_experimental_log(self, current_scope, log):
900903
# If debug is enabled, log the log to the console
901904
debug = self.options.get("debug", False)
902905
if debug:
903-
severity_text_to_logging_level = {
904-
"trace": logging.DEBUG,
905-
"debug": logging.DEBUG,
906-
"info": logging.INFO,
907-
"warn": logging.WARNING,
908-
"error": logging.ERROR,
909-
"fatal": logging.CRITICAL,
910-
}
911-
logger.log(
912-
severity_text_to_logging_level.get(log["severity_text"], logging.DEBUG),
913-
f'[Sentry Logs] {log["body"]}',
906+
logger.debug(
907+
f'[Sentry Logs] [{log.get("severity_text")}] {log.get("body")}'
914908
)
915909

916-
envelope = Envelope(headers=headers)
917-
918-
before_emit_log = self.options["_experiments"].get("before_emit_log")
919-
if before_emit_log is not None:
920-
log = before_emit_log(log, {})
910+
before_send_log = self.options["_experiments"].get("before_send_log")
911+
if before_send_log is not None:
912+
log = before_send_log(log, {})
921913
if log is None:
922914
return
923915

924-
def format_attribute(key, val):
925-
# type: (str, int | float | str | bool) -> Any
926-
if isinstance(val, bool):
927-
return {"key": key, "value": {"boolValue": val}}
928-
if isinstance(val, int):
929-
return {"key": key, "value": {"intValue": str(val)}}
930-
if isinstance(val, float):
931-
return {"key": key, "value": {"doubleValue": val}}
932-
if isinstance(val, str):
933-
return {"key": key, "value": {"stringValue": val}}
934-
return {"key": key, "value": {"stringValue": json.dumps(val)}}
935-
936-
otel_log = {
937-
"severityText": log["severity_text"],
938-
"severityNumber": log["severity_number"],
939-
"body": {"stringValue": log["body"]},
940-
"timeUnixNano": str(log["time_unix_nano"]),
941-
"attributes": [
942-
format_attribute(k, v) for (k, v) in log["attributes"].items()
943-
],
944-
}
945-
946-
if "trace_id" in log:
947-
otel_log["traceId"] = log["trace_id"]
948-
949-
envelope.add_log(otel_log) # TODO: batch these
950-
951-
if self.spotlight:
952-
self.spotlight.capture_envelope(envelope)
953-
954-
if self.transport is not None:
955-
self.transport.capture_envelope(envelope)
916+
if self.log_batcher:
917+
self.log_batcher.add(log)
956918

957919
def capture_session(
958920
self, session # type: Session
@@ -1006,6 +968,8 @@ def close(
1006968
self.session_flusher.kill()
1007969
if self.metrics_aggregator is not None:
1008970
self.metrics_aggregator.kill()
971+
if self.log_batcher is not None:
972+
self.log_batcher.kill()
1009973
if self.monitor:
1010974
self.monitor.kill()
1011975
self.transport.kill()
@@ -1030,6 +994,8 @@ def flush(
1030994
self.session_flusher.flush()
1031995
if self.metrics_aggregator is not None:
1032996
self.metrics_aggregator.flush()
997+
if self.log_batcher is not None:
998+
self.log_batcher.flush()
1033999
self.transport.flush(timeout=timeout, callback=callback)
10341000

10351001
def __enter__(self):

sentry_sdk/consts.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -78,7 +78,7 @@ class CompressionAlgo(Enum):
7878
Callable[[str, MetricValue, MeasurementUnit, MetricTags], bool]
7979
],
8080
"metric_code_locations": Optional[bool],
81-
"enable_sentry_logs": Optional[bool],
81+
"enable_logs": Optional[bool],
8282
},
8383
total=False,
8484
)
@@ -966,4 +966,4 @@ def _get_default_options():
966966
del _get_default_options
967967

968968

969-
VERSION = "2.25.0"
969+
VERSION = "2.25.1"

sentry_sdk/integrations/anthropic.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -184,8 +184,7 @@ def new_iterator():
184184
input_tokens, output_tokens, content_blocks = _collect_ai_data(
185185
event, input_tokens, output_tokens, content_blocks
186186
)
187-
if event.type != "message_stop":
188-
yield event
187+
yield event
189188

190189
_add_ai_data_to_span(
191190
span, integration, input_tokens, output_tokens, content_blocks
@@ -202,8 +201,7 @@ async def new_iterator_async():
202201
input_tokens, output_tokens, content_blocks = _collect_ai_data(
203202
event, input_tokens, output_tokens, content_blocks
204203
)
205-
if event.type != "message_stop":
206-
yield event
204+
yield event
207205

208206
_add_ai_data_to_span(
209207
span, integration, input_tokens, output_tokens, content_blocks

0 commit comments

Comments
 (0)