Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions dev-constraints.txt
Original file line number Diff line number Diff line change
Expand Up @@ -17,5 +17,5 @@ setuptools==69.5.1

# pinned for snapshot tests. this should be bumped regularly and snapshots updated by running
# tox -f py311-test -- --snapshot-update
opentelemetry-api==1.38.0
opentelemetry-sdk==1.38.0
opentelemetry-api==1.39.0
opentelemetry-sdk==1.39.0
7 changes: 2 additions & 5 deletions opentelemetry-exporter-gcp-logging/setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -27,11 +27,8 @@ packages=find_namespace:
install_requires =
google-cloud-logging ~= 3.0

# Set upper bound for breaking changes in 1.39.0, see
# https://github.com/open-telemetry/opentelemetry-python/pull/4771. Will increase the
# minimum version after that release.
opentelemetry-sdk >= 1.35.0, < 1.39.0
opentelemetry-api >= 1.35.0
opentelemetry-sdk >= 1.39.0
opentelemetry-api >= 1.39.0

opentelemetry-resourcedetector-gcp >= 1.5.0dev0, == 1.*

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,8 +57,11 @@
get_monitored_resource,
)
from opentelemetry.sdk import version as opentelemetry_sdk_version
from opentelemetry.sdk._logs import LogData
from opentelemetry.sdk._logs.export import LogExporter
from opentelemetry.sdk._logs import ReadableLogRecord
from opentelemetry.sdk._logs.export import (
LogRecordExporter,
LogRecordExportResult,
)
from opentelemetry.sdk.resources import Resource
from opentelemetry.trace import format_span_id, format_trace_id
from opentelemetry.util.types import AnyValue
Expand Down Expand Up @@ -251,7 +254,7 @@ def _get_monitored_resource(
)


class CloudLoggingExporter(LogExporter):
class CloudLoggingExporter(LogRecordExporter):
def __init__(
self,
project_id: Optional[str] = None,
Expand Down Expand Up @@ -320,12 +323,14 @@ def pick_log_id(self, log_name_attr: Any, event_name: str | None) -> str:
return event_name.replace("/", "%2F")
return self.default_log_name

def export(self, batch: Sequence[LogData]):
def export(
self, batch: Sequence[ReadableLogRecord]
) -> LogRecordExportResult:
now = datetime.datetime.now()
log_entries = []
for log_data in batch:
for readable_log_record in batch:
log_entry = LogEntry()
log_record = log_data.log_record
log_record = readable_log_record.log_record
attributes = log_record.attributes or {}
project_id = str(
attributes.get(PROJECT_ID_ATTRIBUTE_KEY, self.project_id)
Expand All @@ -342,7 +347,7 @@ def export(self, batch: Sequence[LogData]):
ts.FromDatetime(now)
log_entry.timestamp = ts
if monitored_resource := _get_monitored_resource(
log_record.resource
readable_log_record.resource
):
log_entry.resource = monitored_resource
log_entry.trace_sampled = (
Expand Down Expand Up @@ -371,6 +376,8 @@ def export(self, batch: Sequence[LogData]):

self._write_log_entries(log_entries)

return LogRecordExportResult.SUCCESS

@staticmethod
def _write_log_entries_to_file(file: TextIO, log_entries: list[LogEntry]):
"""Formats logs into the Cloud Logging structured log format, and writes them to the
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@
unary_unary_rpc_method_handler,
)
from opentelemetry.exporter.cloud_logging import CloudLoggingExporter
from opentelemetry.sdk._logs import LogData
from opentelemetry.sdk._logs import ReadableLogRecord
from syrupy.assertion import SnapshotAssertion
from syrupy.extensions.json import JSONSnapshotExtension

Expand Down Expand Up @@ -133,7 +133,7 @@ def fixture_cloudloggingfake() -> Iterable[CloudLoggingFake]:
server.stop(None)


ExportAndAssertSnapshot = Callable[[Sequence[LogData]], None]
ExportAndAssertSnapshot = Callable[[Sequence[ReadableLogRecord]], None]


@pytest.fixture(
Expand All @@ -148,7 +148,9 @@ def fixture_export_and_assert_snapshot(
"cloudloggingfake"
)

def export_and_assert_snapshot(log_data: Sequence[LogData]) -> None:
def export_and_assert_snapshot(
log_data: Sequence[ReadableLogRecord],
) -> None:
cloudloggingfake.exporter.export(log_data)

assert cloudloggingfake.get_calls() == snapshot(
Expand All @@ -158,7 +160,9 @@ def export_and_assert_snapshot(log_data: Sequence[LogData]) -> None:
return export_and_assert_snapshot

# pylint: disable=function-redefined
def export_and_assert_snapshot(log_data: Sequence[LogData]) -> None:
def export_and_assert_snapshot(
log_data: Sequence[ReadableLogRecord],
) -> None:
buf = StringIO()
exporter = CloudLoggingExporter(
project_id=PROJECT_ID, structured_json_file=buf
Expand Down
103 changes: 63 additions & 40 deletions opentelemetry-exporter-gcp-logging/tests/test_cloud_logging.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@

Be sure to review the changes.
"""

import re
from io import StringIO
from textwrap import dedent
Expand All @@ -44,10 +45,15 @@
CloudLoggingExporter,
is_log_id_valid,
)
from opentelemetry.sdk._logs import LogData
from opentelemetry.sdk._logs import ReadableLogRecord
from opentelemetry.sdk._logs._internal import LogRecord
from opentelemetry.sdk.resources import Resource
from opentelemetry.sdk.util.instrumentation import InstrumentationScope
from opentelemetry.trace import (
NonRecordingSpan,
SpanContext,
set_span_in_context,
)

PROJECT_ID = "fakeproject"

Expand Down Expand Up @@ -127,13 +133,13 @@ def test_too_large_log_raises_warning(caplog) -> None:
)
no_default_logname.export(
[
LogData(
ReadableLogRecord(
log_record=LogRecord(
body="abc",
resource=Resource({}),
attributes={str(i): "i" * 10000 for i in range(1000)},
),
instrumentation_scope=InstrumentationScope("test"),
resource=Resource.get_empty(),
)
]
)
Expand All @@ -146,12 +152,12 @@ def test_too_large_log_raises_warning(caplog) -> None:
def test_user_agent(cloudloggingfake: CloudLoggingFake) -> None:
cloudloggingfake.exporter.export(
[
LogData(
ReadableLogRecord(
log_record=LogRecord(
body="abc",
resource=Resource({}),
),
instrumentation_scope=InstrumentationScope("test"),
resource=Resource.get_empty(),
)
]
)
Expand All @@ -169,53 +175,53 @@ def test_agent_engine_monitored_resources(
export_and_assert_snapshot: ExportAndAssertSnapshot,
) -> None:
log_data = [
LogData(
ReadableLogRecord(
log_record=LogRecord(
body="valid agent engine",
timestamp=1736976310997977393,
resource=Resource(
{
"cloud.resource_id": "//aiplatform.googleapis.com/projects/some-project123-321/locations/europe-west3/reasoningEngines/8477639270431981568"
}
),
),
instrumentation_scope=InstrumentationScope("test"),
resource=Resource(
{
"cloud.resource_id": "//aiplatform.googleapis.com/projects/some-project123-321/locations/europe-west3/reasoningEngines/8477639270431981568"
}
),
),
LogData(
ReadableLogRecord(
log_record=LogRecord(
body="invalid 1",
timestamp=1736976310997977393,
resource=Resource(
{
"cloud.resource_id": "//aiplatform.googleapis.com/locations/europe-west3/reasoningEngines/8477639270431981568"
}
),
),
instrumentation_scope=InstrumentationScope("test"),
resource=Resource(
{
"cloud.resource_id": "//aiplatform.googleapis.com/locations/europe-west3/reasoningEngines/8477639270431981568"
}
),
),
LogData(
ReadableLogRecord(
log_record=LogRecord(
body="invalid 2",
timestamp=1736976310997977393,
resource=Resource(
{
"cloud.resource_id": "//aiplatform.googleapis.com/projects/some-project123-321/locations/europe-west3/reasoningEngines//8477639270431981568"
}
),
),
instrumentation_scope=InstrumentationScope("test"),
resource=Resource(
{
"cloud.resource_id": "//aiplatform.googleapis.com/projects/some-project123-321/locations/europe-west3/reasoningEngines//8477639270431981568"
}
),
),
LogData(
ReadableLogRecord(
log_record=LogRecord(
body="invalid 3",
timestamp=1736976310997977393,
resource=Resource(
{
"cloud.resource_id": "aiplatform.googleapis.com/projects/some-project123-321/locations/europe-west3/reasoningEngines//8477639270431981568"
}
),
),
instrumentation_scope=InstrumentationScope("test"),
resource=Resource(
{
"cloud.resource_id": "aiplatform.googleapis.com/projects/some-project123-321/locations/europe-west3/reasoningEngines//8477639270431981568"
}
),
),
]
export_and_assert_snapshot(log_data)
Expand All @@ -225,13 +231,18 @@ def test_convert_otlp_dict_body(
export_and_assert_snapshot: ExportAndAssertSnapshot,
) -> None:
log_data = [
LogData(
ReadableLogRecord(
log_record=LogRecord(
event_name="random.genai.event",
timestamp=1736976310997977393,
severity_number=SeverityNumber(20),
trace_id=25,
span_id=22,
context=set_span_in_context(
NonRecordingSpan(
context=SpanContext(
trace_id=25, span_id=22, is_remote=False
)
)
),
attributes={
"gen_ai.system": True,
"test": 23,
Expand All @@ -252,6 +263,7 @@ def test_convert_otlp_dict_body(
},
),
instrumentation_scope=InstrumentationScope("test"),
resource=Resource.get_empty(),
)
]
export_and_assert_snapshot(log_data)
Expand All @@ -261,7 +273,7 @@ def test_convert_otlp_various_different_types_in_attrs_and_bytes_body(
export_and_assert_snapshot: ExportAndAssertSnapshot,
) -> None:
log_data = [
LogData(
ReadableLogRecord(
log_record=LogRecord(
timestamp=1736976310997977393,
attributes={
Expand All @@ -273,6 +285,7 @@ def test_convert_otlp_various_different_types_in_attrs_and_bytes_body(
body=b'{"Date": "2016-05-21T21:35:40Z", "CreationDate": "2012-05-05", "LogoType": "png", "Ref": 164611595, "Classe": ["Email addresses", "Passwords"],"Link":"http://some_link.com"}',
),
instrumentation_scope=InstrumentationScope("test"),
resource=Resource.get_empty(),
)
]
export_and_assert_snapshot(log_data)
Expand All @@ -282,12 +295,13 @@ def test_convert_non_json_dict_bytes(
export_and_assert_snapshot: ExportAndAssertSnapshot,
) -> None:
log_data = [
LogData(
ReadableLogRecord(
log_record=LogRecord(
timestamp=1736976310997977393,
body=b"123",
),
instrumentation_scope=InstrumentationScope("test"),
resource=Resource.get_empty(),
)
]
export_and_assert_snapshot(log_data)
Expand All @@ -297,13 +311,14 @@ def test_convert_gen_ai_body(
export_and_assert_snapshot: ExportAndAssertSnapshot,
) -> None:
log_data = [
LogData(
ReadableLogRecord(
log_record=LogRecord(
event_name="gen_ai.client.inference.operation.details",
timestamp=1736976310997977393,
body=GEN_AI_DICT,
),
instrumentation_scope=InstrumentationScope("test"),
resource=Resource.get_empty(),
)
]
export_and_assert_snapshot(log_data)
Expand Down Expand Up @@ -361,12 +376,13 @@ def test_convert_various_types_of_bodies(
body: Union[str, bool, None, Mapping],
) -> None:
log_data = [
LogData(
ReadableLogRecord(
log_record=LogRecord(
timestamp=1736976310997977393,
body=body,
),
instrumentation_scope=InstrumentationScope("test"),
resource=Resource.get_empty(),
)
]
export_and_assert_snapshot(log_data)
Expand All @@ -376,7 +392,7 @@ def test_convert_various_types_of_attributes(
export_and_assert_snapshot: ExportAndAssertSnapshot,
) -> None:
log_data = [
LogData(
ReadableLogRecord(
log_record=LogRecord(
attributes={
"a": [{"key": b"bytes"}],
Expand All @@ -387,6 +403,7 @@ def test_convert_various_types_of_attributes(
timestamp=1736976310997977393,
),
instrumentation_scope=InstrumentationScope("test"),
resource=Resource.get_empty(),
)
]
export_and_assert_snapshot(log_data)
Expand All @@ -399,17 +416,23 @@ def test_structured_json_lines():
)
exporter.export(
[
LogData(
ReadableLogRecord(
log_record=LogRecord(
event_name="foo",
timestamp=1736976310997977393,
severity_number=SeverityNumber(20),
trace_id=25,
span_id=22,
context=set_span_in_context(
NonRecordingSpan(
context=SpanContext(
trace_id=25, span_id=22, is_remote=False
)
)
),
attributes={"key": f"{i}"},
body="hello",
),
instrumentation_scope=InstrumentationScope("test"),
resource=Resource.get_empty(),
)
for i in range(5)
]
Expand Down