1313# limitations under the License.
1414
1515import json
16+ import logging
1617import unittest
1718import warnings
19+ from unittest .mock import patch
1820
1921from opentelemetry ._logs .severity import SeverityNumber
2022from opentelemetry .attributes import BoundedAttributes
23+ from opentelemetry .sdk import trace
2124from opentelemetry .sdk ._logs import (
25+ LogData ,
2226 LogDroppedAttributesWarning ,
27+ LoggerProvider ,
28+ LoggingHandler ,
2329 LogLimits ,
2430 LogRecord ,
31+ LogRecordProcessor ,
2532)
2633from opentelemetry .sdk .resources import Resource
34+ from opentelemetry .trace import (
35+ INVALID_SPAN ,
36+ format_span_id ,
37+ format_trace_id ,
38+ set_span_in_context ,
39+ )
2740
2841
2942class TestLogRecord (unittest .TestCase ):
@@ -42,7 +55,7 @@ def test_log_record_to_json(self):
4255 "dropped_attributes" : 0 ,
4356 "timestamp" : "1970-01-01T00:00:00.000000Z" ,
4457 "observed_timestamp" : "1970-01-01T00:00:00.000000Z" ,
45- "context" : "" ,
58+ "context" : None ,
4659 "trace_id" : "" ,
4760 "span_id" : "" ,
4861 "trace_flags" : None ,
@@ -69,9 +82,99 @@ def test_log_record_to_json(self):
6982 self .assertEqual (expected , actual .to_json (indent = 4 ))
7083 self .assertEqual (
7184 actual .to_json (indent = None ),
72- '{"body": "a log line", "severity_number": null, "severity_text": null, "attributes": {"mapping": {"key": "value"}, "none": null, "sequence": [1, 2], "str": "string"}, "dropped_attributes": 0, "timestamp": "1970-01-01T00:00:00.000000Z", "observed_timestamp": "1970-01-01T00:00:00.000000Z", "context": "" , "trace_id": "", "span_id": "", "trace_flags": null, "resource": {"attributes": {"service.name": "foo"}, "schema_url": ""}}' ,
85+ '{"body": "a log line", "severity_number": null, "severity_text": null, "attributes": {"mapping": {"key": "value"}, "none": null, "sequence": [1, 2], "str": "string"}, "dropped_attributes": 0, "timestamp": "1970-01-01T00:00:00.000000Z", "observed_timestamp": "1970-01-01T00:00:00.000000Z", "context": null , "trace_id": "", "span_id": "", "trace_flags": null, "resource": {"attributes": {"service.name": "foo"}, "schema_url": ""}}' ,
7386 )
7487
88+ @patch ("opentelemetry.sdk._logs._internal.get_current_span" )
89+ @patch ("opentelemetry.trace.propagation.set_value" )
90+ @patch ("opentelemetry.sdk.trace.RandomIdGenerator.generate_span_id" )
91+ @patch ("opentelemetry.sdk.trace.RandomIdGenerator.generate_trace_id" )
92+ def test_log_record_to_json_with_span_correlation (
93+ self ,
94+ mock_generate_trace_id ,
95+ mock_generate_span_id ,
96+ mock_set_value ,
97+ mock_get_current_span ,
98+ ):
99+ trace_id = 0x000000000000000000000000DEADBEEF
100+ span_id = 0x00000000DEADBEF0
101+ fixed_key = "current-span-test"
102+
103+ mock_generate_trace_id .return_value = trace_id
104+ mock_generate_span_id .return_value = span_id
105+
106+ def mock_set_value_impl (key , value , context = None ):
107+ if context is None :
108+ context = {}
109+ context [fixed_key ] = value
110+ return context
111+
112+ mock_set_value .side_effect = mock_set_value_impl
113+
114+ def mock_get_span_impl (context = None ):
115+ if context is None or fixed_key not in context :
116+ return INVALID_SPAN
117+ return context [fixed_key ]
118+
119+ mock_get_current_span .side_effect = mock_get_span_impl
120+
121+ _ , _ = set_up_test_logging (logging .WARNING )
122+ tracer = trace .TracerProvider ().get_tracer (__name__ )
123+
124+ with tracer .start_as_current_span ("test" ) as span :
125+ context = set_span_in_context (span )
126+ span_context = span .get_span_context ()
127+
128+ expected = json .dumps (
129+ {
130+ "body" : "a log line" ,
131+ "severity_number" : None ,
132+ "severity_text" : None ,
133+ "attributes" : {
134+ "mapping" : {"key" : "value" },
135+ "none" : None ,
136+ "sequence" : [1 , 2 ],
137+ "str" : "string" ,
138+ },
139+ "dropped_attributes" : 0 ,
140+ "timestamp" : "1970-01-01T00:00:00.000000Z" ,
141+ "observed_timestamp" : "1970-01-01T00:00:00.000000Z" ,
142+ "context" : {
143+ fixed_key : f'_Span(name="test", context=SpanContext(trace_id=0x{ format_trace_id (trace_id )} , '
144+ f"span_id=0x{ format_span_id (span_id )} , "
145+ f"trace_flags=0x01, trace_state=[], is_remote=False))"
146+ },
147+ "trace_id" : f"0x{ format_trace_id (span_context .trace_id )} " ,
148+ "span_id" : f"0x{ format_span_id (span_context .span_id )} " ,
149+ "trace_flags" : span_context .trace_flags ,
150+ "resource" : {
151+ "attributes" : {"service.name" : "foo" },
152+ "schema_url" : "" ,
153+ },
154+ },
155+ indent = 4 ,
156+ )
157+
158+ actual = LogRecord (
159+ timestamp = 0 ,
160+ observed_timestamp = 0 ,
161+ context = context ,
162+ body = "a log line" ,
163+ resource = Resource ({"service.name" : "foo" }),
164+ attributes = {
165+ "mapping" : {"key" : "value" },
166+ "none" : None ,
167+ "sequence" : [1 , 2 ],
168+ "str" : "string" ,
169+ },
170+ )
171+
172+ self .assertEqual (expected , actual .to_json (indent = 4 ))
173+ self .assertEqual (
174+ '{"body": "a log line", "severity_number": null, "severity_text": null, "attributes": {"mapping": {"key": "value"}, "none": null, "sequence": [1, 2], "str": "string"}, "dropped_attributes": 0, "timestamp": "1970-01-01T00:00:00.000000Z", "observed_timestamp": "1970-01-01T00:00:00.000000Z", "context": {"current-span-test": "_Span(name=\\ "test\\ ", context=SpanContext(trace_id=0x000000000000000000000000deadbeef, span_id=0x00000000deadbef0, trace_flags=0x01, trace_state=[], is_remote=False))"}, "trace_id": "0x000000000000000000000000deadbeef", "span_id": "0x00000000deadbef0", "trace_flags": 1, "resource": {"attributes": {"service.name": "foo"}, "schema_url": ""}}' ,
175+ actual .to_json (indent = None ),
176+ )
177+
75178 def test_log_record_to_json_serializes_severity_number_as_int (self ):
76179 actual = LogRecord (
77180 timestamp = 0 ,
@@ -169,3 +272,35 @@ def test_log_record_dropped_attributes_unset_limits(self):
169272 )
170273 self .assertTrue (result .dropped_attributes == 0 )
171274 self .assertEqual (attr , result .attributes )
275+
276+
277+ def set_up_test_logging (level , formatter = None , root_logger = False ):
278+ logger_provider = LoggerProvider ()
279+ processor = FakeProcessor ()
280+ logger_provider .add_log_record_processor (processor )
281+ logger = logging .getLogger (None if root_logger else "foo" )
282+ handler = LoggingHandler (level = level , logger_provider = logger_provider )
283+ if formatter :
284+ handler .setFormatter (formatter )
285+ logger .addHandler (handler )
286+ return processor , logger
287+
288+
289+ class FakeProcessor (LogRecordProcessor ):
290+ def __init__ (self ):
291+ self .log_data_emitted = []
292+
293+ def emit (self , log_data : LogData ):
294+ self .log_data_emitted .append (log_data )
295+
296+ def shutdown (self ):
297+ pass
298+
299+ def force_flush (self , timeout_millis : int = 30000 ):
300+ pass
301+
302+ def emit_count (self ):
303+ return len (self .log_data_emitted )
304+
305+ def get_log_record (self , i ):
306+ return self .log_data_emitted [i ].log_record
0 commit comments