Skip to content
Open
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
41 changes: 41 additions & 0 deletions util/opentelemetry-util-genai-dev/TRANSLATOR_README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
# Translator

## Automatic Span Processing (Recommended)

Add `TraceloopSpanProcessor` to your TracerProvider to automatically transform all matching spans:

```python
from opentelemetry import trace
from opentelemetry.sdk.trace import TracerProvider
from opentelemetry.util.genai.processors import TraceloopSpanProcessor

# Set up tracer provider
provider = TracerProvider()

# Add processor - transforms all matching spans automatically
processor = TraceloopSpanProcessor(
attribute_transformations={
"remove": ["debug_info"],
"rename": {"model_ver": "llm.model.version"},
"add": {"service.name": "my-llm"}
},
name_transformations={"chat *": "llm.openai.chat"},
traceloop_attributes={
"traceloop.entity.name": "MyLLMEntity"
}
)
provider.add_span_processor(processor)
trace.set_tracer_provider(provider)

```

## Transformation Rules

### Attributes
- **Remove**: `"remove": ["field1", "field2"]`
- **Rename**: `"rename": {"old_name": "new_name"}`
- **Add**: `"add": {"key": "value"}`

### Span Names
- **Direct**: `"old name": "new name"`
- **Pattern**: `"chat *": "llm.chat"` (wildcard matching)
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
#!/usr/bin/env python3

from __future__ import annotations

import json
import os
from opentelemetry import trace
from opentelemetry.sdk.trace import TracerProvider
from opentelemetry.sdk.trace.export import SimpleSpanProcessor, ConsoleSpanExporter

from opentelemetry.util.genai.processors.traceloop_span_processor import TraceloopSpanProcessor

RULE_SPEC = {
"rules": [
{
# NOTE: In Python dicts, duplicate keys are overwritten. The earlier
# version used two separate "rename" entries so only the last one
# survived. Combine them into a single mapping and optionally
# remove noisy attributes.
"attribute_transformations": {
"rename": {
"traceloop.entity.input": "gen_ai.input.messages",
"traceloop.entity.output": "gen_ai.output.messages",
},
# Demonstrate removal (uncomment to test):
# "remove": ["debug_info"],
},
"name_transformations": {"chat *": "genai.chat"},
}
]
}
os.environ["OTEL_GENAI_SPAN_TRANSFORM_RULES"] = json.dumps(RULE_SPEC)

# Set up tracing
provider = TracerProvider()
provider.add_span_processor(SimpleSpanProcessor(ConsoleSpanExporter()))
# Add the Traceloop processor
provider.add_span_processor(TraceloopSpanProcessor())
trace.set_tracer_provider(provider)
tracer = trace.get_tracer(__name__)

print("Creating spans ...\n")

with tracer.start_as_current_span("chat gpt-4") as span:
span.set_attribute("traceloop.entity.input", "some data")
span.set_attribute("debug_info", "remove me if rule had remove")

with tracer.start_as_current_span("vector encode") as span:
span.set_attribute("custom.kind", "embedding")
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
from .traceloop_span_processor import TraceloopSpanProcessor, TransformationRule

__all__ = [
"TraceloopSpanProcessor",
"TransformationRule",
]
Original file line number Diff line number Diff line change
@@ -0,0 +1,104 @@
# Copyright The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from typing import Any, Dict, Optional

from opentelemetry.sdk.trace import ReadableSpan

from .traceloop_span_generator import TraceloopSpanGenerator
from ..types import LLMInvocation


def _apply_attribute_transformations(
base: Dict[str, Any], transformations: Optional[Dict[str, Any]]
) -> Dict[str, Any]: # pragma: no cover - trivial helpers
if not transformations:
return base
# Order: remove -> rename -> add (so add always wins)
remove_keys = transformations.get("remove") or []
for k in remove_keys:
base.pop(k, None)
rename_map = transformations.get("rename") or {}
for old, new in rename_map.items():
if old in base:
base[new] = base.pop(old)
add_map = transformations.get("add") or {}
for k, v in add_map.items():
base[k] = v
return base


def _derive_new_name(
original_name: str, name_transformations: Optional[Dict[str, str]]
) -> Optional[str]: # pragma: no cover - simple matching
if not name_transformations:
return None
import fnmatch

for pattern, new_name in name_transformations.items():
try:
if fnmatch.fnmatch(original_name, pattern):
return new_name
except Exception: # defensive
continue
return None


def transform_existing_span_to_telemetry(
existing_span: ReadableSpan,
attribute_transformations: Optional[Dict[str, Any]] = None,
name_transformations: Optional[Dict[str, str]] = None,
traceloop_attributes: Optional[Dict[str, Any]] = None,
generator: Optional[TraceloopSpanGenerator] = None,
) -> LLMInvocation:
"""Create a synthetic LLMInvocation span from an ended (or ending) span.

Returns the synthetic ``LLMInvocation`` used purely as a carrier for the new span.
"""
base_attrs: Dict[str, Any] = (
dict(existing_span.attributes) if existing_span.attributes else {}
)

# Apply transformations
base_attrs = _apply_attribute_transformations(
base_attrs, attribute_transformations
)
if traceloop_attributes:
base_attrs.update(traceloop_attributes)

# Span name rewrite (store so generator can use & remove later)
new_name = _derive_new_name(existing_span.name, name_transformations)
if new_name:
base_attrs["_traceloop_new_name"] = new_name

# Determine request_model (best-effort, fallback to unknown)
request_model = (
base_attrs.get("gen_ai.request.model")
or base_attrs.get("llm.request.model")
or base_attrs.get("ai.model.name")
or "unknown"
)

invocation = LLMInvocation(
request_model=str(request_model),
attributes=base_attrs,
messages=[], # empty; original content not reconstructed here
)

if generator is None:
generator = TraceloopSpanGenerator(capture_content=True)
generator.start(invocation)
if existing_span.end_time is not None:
generator.finish(invocation)
return invocation
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
# Copyright The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from typing import Optional
from opentelemetry import trace
from opentelemetry.trace import Tracer
from ..types import LLMInvocation

class TraceloopSpanGenerator:
def __init__(self, tracer: Optional[Tracer] = None, capture_content: bool = False):
self._tracer = tracer or trace.get_tracer(__name__)
self._capture_content = capture_content

def start(self, invocation: LLMInvocation):
override = getattr(invocation, "attributes", {}).get("_traceloop_new_name")
if override:
span_name = override
else:
name = getattr(invocation, "request_model", "llm")
span_name = f"chat {name}" if not str(name).startswith("chat ") else str(name)
span = self._tracer.start_span(span_name, kind=trace.SpanKind.CLIENT)
invocation.span = span
invocation.context_token = trace.use_span(span, end_on_exit=False)
invocation.context_token.__enter__()
# apply starting attributes
for k, v in getattr(invocation, "attributes", {}).items():
try:
span.set_attribute(k, v)
except Exception:
pass

def finish(self, invocation: LLMInvocation):
span = getattr(invocation, "span", None)
if not span:
return
# re-apply attributes (after transformations)
for k, v in getattr(invocation, "attributes", {}).items():
try:
span.set_attribute(k, v)
except Exception:
pass
token = getattr(invocation, "context_token", None)
if token and hasattr(token, "__exit__"):
try:
token.__exit__(None, None, None)
except Exception:
pass
span.end()

def error(self, error, invocation: LLMInvocation): # pragma: no cover - unused in tests now
self.finish(invocation)
Loading
Loading