Skip to content

Commit 4355654

Browse files
committed
linting
1 parent 0cc506e commit 4355654

File tree

1 file changed

+5
-4
lines changed

1 file changed

+5
-4
lines changed

aws-opentelemetry-distro/src/amazon/opentelemetry/distro/exporter/otlp/aws/logs/aws_batch_log_record_processor.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818

1919
class AwsBatchLogRecordProcessor(BatchLogRecordProcessor):
2020
_BASE_LOG_BUFFER_BYTE_SIZE = (
21-
1500 # Buffer size in bytes to account for log metadata not included in the body or attribute size calculation
21+
1000 # Buffer size in bytes to account for log metadata not included in the body or attribute size calculation
2222
)
2323
_MAX_LOG_REQUEST_BYTE_SIZE = (
2424
1048576 # https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/CloudWatch-OTLPEndpoint.html
@@ -97,7 +97,8 @@ def _estimate_log_size(self, log: LogData, depth: int = 3) -> int:
9797
Estimates the size in bytes of a log by calculating the size of its body and its attributes
9898
and adding a buffer amount to account for other log metadata information.
9999
Will process complex log structures up to the specified depth limit.
100-
If the depth limit of the log structure is exceeded, returns 0.
100+
If the depth limit of the log structure is exceeded, returns truncates calculation
101+
to everything up to that point.
101102
102103
Args:
103104
log: The Log object to calculate size for
@@ -108,6 +109,7 @@ def _estimate_log_size(self, log: LogData, depth: int = 3) -> int:
108109
"""
109110

110111
# Use a queue to prevent excessive recursive calls.
112+
# We calculate based on the size of the log record body and attributes for the log.
111113
queue: List[tuple[AnyValue, int]] = [(log.log_record.body, 0), (log.log_record.attributes, -1)]
112114

113115
size: int = self._BASE_LOG_BUFFER_BYTE_SIZE
@@ -135,7 +137,6 @@ def _estimate_log_size(self, log: LogData, depth: int = 3) -> int:
135137
continue
136138

137139
if current_depth <= depth:
138-
# Sequence has to be
139140
if isinstance(next_val, Sequence):
140141
for content in next_val:
141142
new_queue.append((cast(AnyValue, content), current_depth + 1))
@@ -145,7 +146,7 @@ def _estimate_log_size(self, log: LogData, depth: int = 3) -> int:
145146
size += len(key)
146147
new_queue.append((content, current_depth + 1))
147148
else:
148-
_logger.debug(f"Max log dept of {depth} exceeded. Log data size will not be accurately calculated.")
149+
_logger.debug(f"Max log depth of {depth} exceeded. Log data size will not be accurately calculated.")
149150

150151
queue = new_queue
151152

0 commit comments

Comments
 (0)