Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions components-mdx/get-started/python-sdk.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -30,12 +30,12 @@ from langfuse import get_client
langfuse = get_client()

# Create a span using a context manager
with langfuse.start_as_current_span(name="process-request") as span:
with langfuse.start_as_current_observation(name="process-request") as span:
# Your processing logic here
span.update(output="Processing complete")

# Create a nested generation for an LLM call
with langfuse.start_as_current_generation(name="llm-response", model="gpt-3.5-turbo") as generation:
with langfuse.start_as_current_observation(name="llm-response", model="gpt-3.5-turbo", as_type="generation") as generation:
# Your LLM call logic here
generation.update(output="Generated response")

Expand All @@ -57,13 +57,13 @@ from langfuse import get_client
langfuse = get_client()

# Create a span without a context manager
span = langfuse.start_span(name="user-request")
span = langfuse.start_observation(name="user-request")

# Your processing logic here
span.update(output="Request processed")

# Child spans must be created using the parent span object
nested_span = span.start_span(name="nested-span")
nested_span = span.start_observation(name="nested-span")
nested_span.update(output="Nested span output")

# Important: Manually end the span
Expand Down
2 changes: 1 addition & 1 deletion components-mdx/integration-learn-more.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ from langfuse import get_client

langfuse = get_client()

with langfuse.start_as_current_span(name="my-trace") as span:
with langfuse.start_as_current_observation(name="my-trace") as span:

# Run your application here
output = my_llm_call(input)
Expand Down
5 changes: 3 additions & 2 deletions components-mdx/prompt-linking.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -32,10 +32,11 @@ langfuse = get_client()

prompt = langfuse.get_prompt("movie-critic")

with langfuse.start_as_current_generation(
with langfuse.start_as_current_observation(
name="movie-generation",
model="gpt-4o",
prompt=prompt
prompt=prompt,
as_type="generation"
) as generation:
# Your LLM call here
generation.update(output="LLM response")
Expand Down
18 changes: 9 additions & 9 deletions pages/docs/evaluation/evaluation-methods/custom-scores.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ langfuse.create_score(
)

# Method 2: Score current span/generation (within context)
with langfuse.start_as_current_span(name="my-operation") as span:
with langfuse.start_as_current_observation(name="my-operation") as span:
# Score the current span
span.score(
name="correctness",
Expand All @@ -72,7 +72,7 @@ with langfuse.start_as_current_span(name="my-operation") as span:


# Method 3: Score via the current context
with langfuse.start_as_current_span(name="my-operation"):
with langfuse.start_as_current_observation(name="my-operation"):
# Score the current span
langfuse.score_current_span(
name="correctness",
Expand Down Expand Up @@ -108,7 +108,7 @@ langfuse.create_score(
)

# Method 2: Score current span/generation (within context)
with langfuse.start_as_current_span(name="my-operation") as span:
with langfuse.start_as_current_observation(name="my-operation") as span:
# Score the current span
span.score(
name="accuracy",
Expand All @@ -125,7 +125,7 @@ with langfuse.start_as_current_span(name="my-operation") as span:
)

# Method 3: Score via the current context
with langfuse.start_as_current_span(name="my-operation"):
with langfuse.start_as_current_observation(name="my-operation"):
# Score the current span
langfuse.score_current_span(
name="accuracy",
Expand Down Expand Up @@ -161,7 +161,7 @@ langfuse.create_score(
)

# Method 2: Score current span/generation (within context)
with langfuse.start_as_current_span(name="my-operation") as span:
with langfuse.start_as_current_observation(name="my-operation") as span:
# Score the current span
span.score(
name="helpfulness",
Expand All @@ -177,7 +177,7 @@ with langfuse.start_as_current_span(name="my-operation") as span:
data_type="BOOLEAN"
)
# Method 3: Score via the current context
with langfuse.start_as_current_span(name="my-operation"):
with langfuse.start_as_current_observation(name="my-operation"):
# Score the current span
langfuse.score_current_span(
name="helpfulness",
Expand Down Expand Up @@ -325,7 +325,7 @@ langfuse.create_score(
)

# Method 2: Score within context
with langfuse.start_as_current_span(name="my-operation") as span:
with langfuse.start_as_current_observation(name="my-operation") as span:
span.score(
name="accuracy",
value=0.9,
Expand Down Expand Up @@ -356,7 +356,7 @@ langfuse.create_score(
)

# Method 2: Score within context
with langfuse.start_as_current_span(name="my-operation") as span:
with langfuse.start_as_current_observation(name="my-operation") as span:
span.score(
name="correctness",
value="correct",
Expand Down Expand Up @@ -387,7 +387,7 @@ langfuse.create_score(
)

# Method 2: Score within context
with langfuse.start_as_current_span(name="my-operation") as span:
with langfuse.start_as_current_observation(name="my-operation") as span:
span.score(
name="helpfulness",
value=1,
Expand Down
2 changes: 1 addition & 1 deletion pages/docs/observability/features/environments.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ os.environ["LANGFUSE_TRACING_ENVIRONMENT"] = "production"
langfuse = get_client()

# All operations will now be associated with the "production" environment
with langfuse.start_as_current_span(name="my-operation") as span:
with langfuse.start_as_current_observation(name="my-operation") as span:
# Your code here
pass

Expand Down
9 changes: 5 additions & 4 deletions pages/docs/observability/features/log-levels.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -40,9 +40,9 @@ from langfuse import get_client
langfuse = get_client()

# Using context managers (recommended)
with langfuse.start_as_current_span(name="my-operation") as span:
with langfuse.start_as_current_observation(name="my-operation") as span:
# Set level and status message on creation
with span.start_as_current_span(
with span.start_as_current_observation(
name="potentially-risky-operation",
level="WARNING",
status_message="Operation may fail"
Expand All @@ -56,7 +56,7 @@ with langfuse.start_as_current_span(name="my-operation") as span:
)

# You can also update the currently active span without a direct reference
with langfuse.start_as_current_span(name="another-operation"):
with langfuse.start_as_current_observation(name="another-operation"):
# ... some processing ...
langfuse.update_current_span(
level="DEBUG",
Expand All @@ -69,7 +69,8 @@ Levels can also be set when creating generations:
```python
langfuse = get_client()

with langfuse.start_as_current_generation(
with langfuse.start_as_current_observation(
as_type="generation",
name="llm-call",
model="gpt-4o",
level="DEFAULT" # Default level
Expand Down
2 changes: 1 addition & 1 deletion pages/docs/observability/features/masking.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ from langfuse import Langfuse

langfuse = Langfuse(mask=masking_function)

with langfuse.start_as_current_span(
with langfuse.start_as_current_observation(
name="sensitive-operation",
input="SECRET_INPUT_DATA"
) as span:
Expand Down
9 changes: 5 additions & 4 deletions pages/docs/observability/features/metadata.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ from langfuse import get_client
langfuse = get_client()

# Add metadata at trace level
with langfuse.start_as_current_span(
with langfuse.start_as_current_observation(
name="process-request"
) as root_span:
# Add metadata to the trace
Expand All @@ -56,7 +56,8 @@ with langfuse.start_as_current_span(
root_span.update(metadata={"stage": "parsing"})

# Create a child span with metadata
with root_span.start_as_current_generation(
with root_span.start_as_current_observation(
as_type="generation",
name="generate-response",
model="gpt-4o",
metadata={"temperature": 0.7, "max_tokens": 1000}
Expand All @@ -69,7 +70,7 @@ You can add new keys to the metadata object by continuously updating the entity.
We strongly discourage writing the same top-level key multiple times as this will produce an undefined behaviour.

```python
with langfuse.start_as_current_span(name="operation") as span:
with langfuse.start_as_current_observation(name="operation") as span:
# First write
span.update(metadata={"status": "started"})

Expand Down Expand Up @@ -240,7 +241,7 @@ prompt = ChatPromptTemplate.from_template("Tell me a joke about {topic}")
chain = prompt | llm

# Set trace attributes dynamically via enclosing span
with langfuse.start_as_current_span(name="dynamic-langchain-trace") as span:
with langfuse.start_as_current_observation(name="dynamic-langchain-trace") as span:
span.update_trace(
metadata={"foo": "bar", "baz": "qux"}
)
Expand Down
2 changes: 1 addition & 1 deletion pages/docs/observability/features/multi-modality.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -144,7 +144,7 @@ def process_document():

langfuse = get_client()

with langfuse.start_as_current_span(name="analyze-document") as span: # Include media in the span input, output, or metadata
with langfuse.start_as_current_observation(name="analyze-document") as span: # Include media in the span input, output, or metadata
span.update(
input={"document": pdf_media},
metadata={"file_size": len(pdf_bytes)}
Expand Down
7 changes: 4 additions & 3 deletions pages/docs/observability/features/releases-and-versioning.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ def process_data():

# With context managers
langfuse = get_client()
with langfuse.start_as_current_span(name="my-operation") as span:
with langfuse.start_as_current_observation(name="my-operation") as span:
span.update_trace(release="v2.1.24")
```

Expand Down Expand Up @@ -136,14 +136,15 @@ from langfuse import get_client
langfuse = get_client()

# Set version when creating a span
with langfuse.start_as_current_span(
with langfuse.start_as_current_observation(
name="process-data",
version="1.0"
) as span:
# Processing...

# Create a generation with version
with span.start_as_current_generation(
with span.start_as_current_observation(
as_type="generation",
name="guess-countries",
model="gpt-4o",
version="1.0"
Expand Down
7 changes: 4 additions & 3 deletions pages/docs/observability/features/sessions.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -43,14 +43,15 @@ from langfuse import get_client
langfuse = get_client()

# You can set the session_id when creating the root span
with langfuse.start_as_current_span(
with langfuse.start_as_current_observation(
name="process-chat-message"
) as root_span:
# Add session_id to the trace
root_span.update_trace(session_id="chat-session-123")

# All spans in this trace will belong to the same session
with root_span.start_as_current_generation(
with root_span.start_as_current_observation(
as_type="generation",
name="generate-response",
model="gpt-4o"
) as gen:
Expand All @@ -61,7 +62,7 @@ with langfuse.start_as_current_span(
You can also update the session_id of the current trace without a direct reference to a span:

```python
with langfuse.start_as_current_span(name="another-operation"):
with langfuse.start_as_current_observation(name="another-operation"):
# Add to the current trace
langfuse.update_current_trace(session_id="your-session-id")
```
Expand Down
6 changes: 3 additions & 3 deletions pages/docs/observability/features/tags.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -34,22 +34,22 @@ from langfuse import get_client
langfuse = get_client()

# Add tags when creating the root span
with langfuse.start_as_current_span(
with langfuse.start_as_current_observation(
name="my-operation"
) as root_span:
# Add tags to the trace
root_span.update_trace(tags=["tag-1", "tag-2"])

# You can add more tags later from any span in the same trace
with root_span.start_as_current_generation(name="llm-call", model="gpt-4o") as gen:
with root_span.start_as_current_observation(as_type="generation", name="llm-call", model="gpt-4o") as gen:
# Processing...
gen.update_trace(tags=["llm-gen"]) # Adds another tag to the same trace
```

You can also update the tags of the current trace without a direct reference to a span:

```python
with langfuse.start_as_current_span(name="another-operation"):
with langfuse.start_as_current_observation(name="another-operation"):
# ... processing ...
langfuse.update_current_trace(tags=["processing", "beta-feature"])
```
Expand Down
5 changes: 3 additions & 2 deletions pages/docs/observability/features/token-and-cost-tracking.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -117,10 +117,11 @@ import anthropic
langfuse = get_client()
anthropic_client = anthropic.Anthropic()

with langfuse.start_as_current_generation(
with langfuse.start_as_current_observation(
name="anthropic-completion",
model="claude-3-opus-20240229",
input=[{"role": "user", "content": "Hello, Claude"}]
input=[{"role": "user", "content": "Hello, Claude"}],
as_type="generation"
) as generation:
response = anthropic_client.messages.create(
model="claude-3-opus-20240229",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ external_id = "request_12345"
trace_id = langfuse.create_trace_id(seed=external_id)

# Use this trace ID in a span
with langfuse.start_as_current_span(
with langfuse.start_as_current_observation(
name="process-request",
trace_context={"trace_id": trace_id}
) as span:
Expand All @@ -101,7 +101,7 @@ from langfuse import get_client
langfuse = get_client()

# Use a predefined trace ID with trace_context parameter
with langfuse.start_as_current_span(
with langfuse.start_as_current_observation(
name="my-operation",
trace_context={
"trace_id": "abcdef1234567890abcdef1234567890", # Must be 32 hex chars
Expand All @@ -119,7 +119,7 @@ from langfuse import get_client

langfuse = get_client()

with langfuse.start_as_current_span(name="outer-operation") as span:
with langfuse.start_as_current_observation(name="outer-operation") as span:
# Access the trace ID of the current span
current_trace_id = langfuse.get_current_trace_id()
current_span_id = langfuse.get_current_observation_id()
Expand Down
4 changes: 2 additions & 2 deletions pages/docs/observability/features/url.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ from langfuse import get_client

langfuse = get_client()

with langfuse.start_as_current_span(name="process-request") as span:
with langfuse.start_as_current_observation(name="process-request") as span:
# Get the URL of this trace
trace_url = langfuse.get_trace_url()
print(f"View trace at: {trace_url}")
Expand Down Expand Up @@ -133,7 +133,7 @@ from langfuse import get_client

langfuse = get_client()

with langfuse.start_as_current_span(name="process-request") as span:
with langfuse.start_as_current_observation(name="process-request") as span:
# Make this trace public
span.update_trace(public=True)

Expand Down
9 changes: 5 additions & 4 deletions pages/docs/observability/features/users.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -41,16 +41,17 @@ from langfuse import get_client
langfuse = get_client()

# You can set the user_id when creating the root span via update_trace
with langfuse.start_as_current_span(
with langfuse.start_as_current_observation(
name="process-user-request"
) as root_span:
# Add user_id to the trace
root_span.update_trace(user_id="user_12345")

# All spans in this trace will be associated with this user
with root_span.start_as_current_generation(
with root_span.start_as_current_observation(
name="generate-response",
model="gpt-4o"
model="gpt-4o",
as_type="generation"
) as gen:
# ...generate response...
pass
Expand All @@ -59,7 +60,7 @@ with langfuse.start_as_current_span(
You can also update the user_id of the current trace without a direct reference to a span:

```python
with langfuse.start_as_current_span(name="handle-user-interaction"):
with langfuse.start_as_current_observation(name="handle-user-interaction"):
# Add user_id to the current trace
langfuse.update_current_trace(user_id="user_12345")
```
Expand Down
Loading