Skip to content

Commit 73e7b39

Browse files
Rm Context inclusion from to_json of LogRecord
1 parent 9ff670e commit 73e7b39

File tree

2 files changed

+1
-178
lines changed

2 files changed

+1
-178
lines changed

opentelemetry-sdk/src/opentelemetry/sdk/_logs/_internal/__init__.py

Lines changed: 0 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -235,19 +235,6 @@ def __eq__(self, other: object) -> bool:
235235
return NotImplemented
236236
return self.__dict__ == other.__dict__
237237

238-
def serialized_context(self) -> dict:
239-
"""Returns JSON-serializable copy of stored Context"""
240-
context_dict = {}
241-
if self.context is not None:
242-
for key, value in self.context.items():
243-
try:
244-
json.dumps(value)
245-
context_dict[key] = value
246-
except TypeError:
247-
# If not JSON-serializable, use string representation
248-
context_dict[key] = str(value)
249-
return context_dict
250-
251238
def to_json(self, indent: int | None = 4) -> str:
252239
return json.dumps(
253240
{
@@ -262,7 +249,6 @@ def to_json(self, indent: int | None = 4) -> str:
262249
"dropped_attributes": self.dropped_attributes,
263250
"timestamp": ns_to_iso_str(self.timestamp),
264251
"observed_timestamp": ns_to_iso_str(self.observed_timestamp),
265-
"context": self.serialized_context(),
266252
"trace_id": (
267253
f"0x{format_trace_id(self.trace_id)}"
268254
if self.trace_id is not None

opentelemetry-sdk/tests/logs/test_log_record.py

Lines changed: 1 addition & 164 deletions
Original file line numberDiff line numberDiff line change
@@ -13,59 +13,20 @@
1313
# limitations under the License.
1414

1515
import json
16-
import logging
1716
import unittest
1817
import warnings
19-
from unittest.mock import patch
2018

2119
from opentelemetry._logs.severity import SeverityNumber
2220
from opentelemetry.attributes import BoundedAttributes
23-
from opentelemetry.sdk import trace
2421
from opentelemetry.sdk._logs import (
25-
LogData,
2622
LogDroppedAttributesWarning,
27-
LoggerProvider,
28-
LoggingHandler,
2923
LogLimits,
3024
LogRecord,
31-
LogRecordProcessor,
3225
)
3326
from opentelemetry.sdk.resources import Resource
34-
from opentelemetry.trace import (
35-
INVALID_SPAN,
36-
format_span_id,
37-
format_trace_id,
38-
set_span_in_context,
39-
)
4027

4128

4229
class TestLogRecord(unittest.TestCase):
43-
def test_serialized_context_none(self):
44-
record = LogRecord(context=None)
45-
self.assertEqual({}, record.serialized_context())
46-
47-
def test_serialized_context_serializable(self):
48-
context = {
49-
"test-string": "value",
50-
"test-number": 42,
51-
"test-list": [1, 2, 3],
52-
"test-dict": {"key": "value"},
53-
"test-null": None,
54-
"test-bool": True,
55-
}
56-
record = LogRecord(context=context)
57-
self.assertEqual(context, record.serialized_context())
58-
59-
def test_serialized_context_non_serializable(self):
60-
class MyTestObject:
61-
def __str__(self):
62-
return "foo-bar"
63-
64-
context = {"test-string": "value", "test-object": MyTestObject()}
65-
record = LogRecord(context=context)
66-
expected = {"test-string": "value", "test-object": "foo-bar"}
67-
self.assertEqual(expected, record.serialized_context())
68-
6930
def test_log_record_to_json(self):
7031
expected = json.dumps(
7132
{
@@ -81,7 +42,6 @@ def test_log_record_to_json(self):
8142
"dropped_attributes": 0,
8243
"timestamp": "1970-01-01T00:00:00.000000Z",
8344
"observed_timestamp": "1970-01-01T00:00:00.000000Z",
84-
"context": {},
8545
"trace_id": "",
8646
"span_id": "",
8747
"trace_flags": None,
@@ -108,100 +68,9 @@ def test_log_record_to_json(self):
10868
self.assertEqual(expected, actual.to_json(indent=4))
10969
self.assertEqual(
11070
actual.to_json(indent=None),
111-
'{"body": "a log line", "severity_number": null, "severity_text": null, "attributes": {"mapping": {"key": "value"}, "none": null, "sequence": [1, 2], "str": "string"}, "dropped_attributes": 0, "timestamp": "1970-01-01T00:00:00.000000Z", "observed_timestamp": "1970-01-01T00:00:00.000000Z", "context": {}, "trace_id": "", "span_id": "", "trace_flags": null, "resource": {"attributes": {"service.name": "foo"}, "schema_url": ""}}',
71+
'{"body": "a log line", "severity_number": null, "severity_text": null, "attributes": {"mapping": {"key": "value"}, "none": null, "sequence": [1, 2], "str": "string"}, "dropped_attributes": 0, "timestamp": "1970-01-01T00:00:00.000000Z", "observed_timestamp": "1970-01-01T00:00:00.000000Z", "trace_id": "", "span_id": "", "trace_flags": null, "resource": {"attributes": {"service.name": "foo"}, "schema_url": ""}}',
11272
)
11373

114-
# pylint: disable=too-many-locals
115-
@patch("opentelemetry.sdk._logs._internal.get_current_span")
116-
@patch("opentelemetry.trace.propagation.set_value")
117-
@patch("opentelemetry.sdk.trace.RandomIdGenerator.generate_span_id")
118-
@patch("opentelemetry.sdk.trace.RandomIdGenerator.generate_trace_id")
119-
def test_log_record_to_json_with_span_correlation(
120-
self,
121-
mock_generate_trace_id,
122-
mock_generate_span_id,
123-
mock_set_value,
124-
mock_get_current_span,
125-
):
126-
trace_id = 0x000000000000000000000000DEADBEEF
127-
span_id = 0x00000000DEADBEF0
128-
fixed_key = "current-span-test"
129-
130-
mock_generate_trace_id.return_value = trace_id
131-
mock_generate_span_id.return_value = span_id
132-
133-
def mock_set_value_impl(key, value, context=None):
134-
if context is None:
135-
context = {}
136-
context[fixed_key] = value
137-
return context
138-
139-
mock_set_value.side_effect = mock_set_value_impl
140-
141-
def mock_get_span_impl(context=None):
142-
if context is None or fixed_key not in context:
143-
return INVALID_SPAN
144-
return context[fixed_key]
145-
146-
mock_get_current_span.side_effect = mock_get_span_impl
147-
148-
_, _ = set_up_test_logging(logging.WARNING)
149-
tracer = trace.TracerProvider().get_tracer(__name__)
150-
151-
with tracer.start_as_current_span("test") as span:
152-
context = set_span_in_context(span)
153-
span_context = span.get_span_context()
154-
155-
expected = json.dumps(
156-
{
157-
"body": "a log line",
158-
"severity_number": None,
159-
"severity_text": None,
160-
"attributes": {
161-
"mapping": {"key": "value"},
162-
"none": None,
163-
"sequence": [1, 2],
164-
"str": "string",
165-
},
166-
"dropped_attributes": 0,
167-
"timestamp": "1970-01-01T00:00:00.000000Z",
168-
"observed_timestamp": "1970-01-01T00:00:00.000000Z",
169-
"context": {
170-
fixed_key: f'_Span(name="test", context=SpanContext(trace_id=0x{format_trace_id(trace_id)}, '
171-
f"span_id=0x{format_span_id(span_id)}, "
172-
f"trace_flags=0x01, trace_state=[], is_remote=False))"
173-
},
174-
"trace_id": f"0x{format_trace_id(span_context.trace_id)}",
175-
"span_id": f"0x{format_span_id(span_context.span_id)}",
176-
"trace_flags": span_context.trace_flags,
177-
"resource": {
178-
"attributes": {"service.name": "foo"},
179-
"schema_url": "",
180-
},
181-
},
182-
indent=4,
183-
)
184-
185-
actual = LogRecord(
186-
timestamp=0,
187-
observed_timestamp=0,
188-
context=context,
189-
body="a log line",
190-
resource=Resource({"service.name": "foo"}),
191-
attributes={
192-
"mapping": {"key": "value"},
193-
"none": None,
194-
"sequence": [1, 2],
195-
"str": "string",
196-
},
197-
)
198-
199-
self.assertEqual(expected, actual.to_json(indent=4))
200-
self.assertEqual(
201-
'{"body": "a log line", "severity_number": null, "severity_text": null, "attributes": {"mapping": {"key": "value"}, "none": null, "sequence": [1, 2], "str": "string"}, "dropped_attributes": 0, "timestamp": "1970-01-01T00:00:00.000000Z", "observed_timestamp": "1970-01-01T00:00:00.000000Z", "context": {"current-span-test": "_Span(name=\\"test\\", context=SpanContext(trace_id=0x000000000000000000000000deadbeef, span_id=0x00000000deadbef0, trace_flags=0x01, trace_state=[], is_remote=False))"}, "trace_id": "0x000000000000000000000000deadbeef", "span_id": "0x00000000deadbef0", "trace_flags": 1, "resource": {"attributes": {"service.name": "foo"}, "schema_url": ""}}',
202-
actual.to_json(indent=None),
203-
)
204-
20574
def test_log_record_to_json_serializes_severity_number_as_int(self):
20675
actual = LogRecord(
20776
timestamp=0,
@@ -299,35 +168,3 @@ def test_log_record_dropped_attributes_unset_limits(self):
299168
)
300169
self.assertTrue(result.dropped_attributes == 0)
301170
self.assertEqual(attr, result.attributes)
302-
303-
304-
def set_up_test_logging(level, formatter=None, root_logger=False):
305-
logger_provider = LoggerProvider()
306-
processor = FakeProcessor()
307-
logger_provider.add_log_record_processor(processor)
308-
logger = logging.getLogger(None if root_logger else "foo")
309-
handler = LoggingHandler(level=level, logger_provider=logger_provider)
310-
if formatter:
311-
handler.setFormatter(formatter)
312-
logger.addHandler(handler)
313-
return processor, logger
314-
315-
316-
class FakeProcessor(LogRecordProcessor):
317-
def __init__(self):
318-
self.log_data_emitted = []
319-
320-
def emit(self, log_data: LogData):
321-
self.log_data_emitted.append(log_data)
322-
323-
def shutdown(self):
324-
pass
325-
326-
def force_flush(self, timeout_millis: int = 30000):
327-
pass
328-
329-
def emit_count(self):
330-
return len(self.log_data_emitted)
331-
332-
def get_log_record(self, i):
333-
return self.log_data_emitted[i].log_record

0 commit comments

Comments
 (0)