diff --git a/examples/tracing/agent/README.md b/examples/tracing/agent/README.md
index c1721bbdf..debaf75c4 100644
--- a/examples/tracing/agent/README.md
+++ b/examples/tracing/agent/README.md
@@ -10,6 +10,16 @@ The tracing implementation will log spans to the console for all agent methods.
### Exporting to Collector
-If desired, [install Jaeger locally](https://www.jaegertracing.io/docs/2.5/getting-started/) and then update the `mcp_agent.config.yaml` for this example to have `otel.otlp_settings.endpoint` point to the collector endpoint (e.g. `http://localhost:4318/v1/traces` is the default for Jaeger via HTTP).
+If desired, [install Jaeger locally](https://www.jaegertracing.io/docs/2.5/getting-started/) and then update the `mcp_agent.config.yaml` to include a typed OTLP exporter with the collector endpoint (e.g. `http://localhost:4318/v1/traces`):
+
+```yaml
+otel:
+ enabled: true
+ exporters:
+ - type: console
+ - type: file
+ - type: otlp
+ endpoint: "http://localhost:4318/v1/traces"
+```
diff --git a/examples/tracing/agent/mcp_agent.config.yaml b/examples/tracing/agent/mcp_agent.config.yaml
index c653064a9..47f3f8c7c 100644
--- a/examples/tracing/agent/mcp_agent.config.yaml
+++ b/examples/tracing/agent/mcp_agent.config.yaml
@@ -26,8 +26,11 @@ openai:
otel:
enabled: true
- exporters: ["console", "file"]
- # If running jaeger locally, uncomment the following lines and add "otlp" to the exporters list
- # otlp_settings:
- # endpoint: "http://localhost:4318/v1/traces"
+ exporters: [
+ { type: console },
+ { type: file },
+ # To export to a collector, also include:
+ # { type: otlp, endpoint: "http://localhost:4318/v1/traces" },
+
+ ]
service_name: "BasicTracingAgentExample"
diff --git a/examples/tracing/langfuse/README.md b/examples/tracing/langfuse/README.md
index 355b1d93e..76cac23e4 100644
--- a/examples/tracing/langfuse/README.md
+++ b/examples/tracing/langfuse/README.md
@@ -1,7 +1,6 @@
# Langfuse Trace Exporter Example
-This example shows how to configure a Langfuse OTLP trace exporter for use in `mcp-agent` by configuring the
-`otel.otlp_settings` with the expected endpoint and headers.
+This example shows how to configure a Langfuse OTLP trace exporter for use in `mcp-agent` by adding a typed OTLP exporter with the expected endpoint and headers.
Following information from https://langfuse.com/integrations/native/opentelemetry
## `1` App set up
@@ -47,7 +46,7 @@ Obtain a secret and public API key for your desired Langfuse project and then ge
echo -n "pk-your-public-key:sk-your-secret-key" | base64
```
-In `mcp_agent.secrets.yaml` set the Authorization header for OTLP:
+In `mcp_agent.secrets.yaml` set the Authorization header for OTLP (merged automatically with the typed exporter):
```yaml
otel:
@@ -56,8 +55,15 @@ otel:
Authorization: "Basic AUTH_STRING"
```
-Lastly, ensure the proper trace endpoint is configured for the `otel.otlp_settings.endpoint` in `mcp_agent.yaml` for the relevant
-Langfuse data region.
+Lastly, ensure the proper trace endpoint is configured in the typed exporter in `mcp_agent.config.yaml` for your Langfuse region, e.g.:
+
+```yaml
+otel:
+ enabled: true
+ exporters:
+ - type: otlp
+ endpoint: "https://us.cloud.langfuse.com/api/public/otel/v1/traces"
+```
## `4` Run locally
diff --git a/examples/tracing/langfuse/mcp_agent.config.yaml b/examples/tracing/langfuse/mcp_agent.config.yaml
index f44e27f8d..536a7a483 100644
--- a/examples/tracing/langfuse/mcp_agent.config.yaml
+++ b/examples/tracing/langfuse/mcp_agent.config.yaml
@@ -26,8 +26,6 @@ openai:
otel:
enabled: true
- exporters: ["otlp"]
- otlp_settings:
- endpoint: "https://us.cloud.langfuse.com/api/public/otel/v1/traces"
- # Set Authorization header with API key in mcp_agent.secrets.yaml
+ exporters: [{ type: otlp, endpoint: "https://us.cloud.langfuse.com/api/public/otel/v1/traces" }]
+ # Set Authorization header with API key in mcp_agent.secrets.yaml
service_name: "BasicTracingLangfuseExample"
diff --git a/examples/tracing/langfuse/mcp_agent.secrets.yaml.example b/examples/tracing/langfuse/mcp_agent.secrets.yaml.example
index 1d1dbfa1e..aefc0b521 100644
--- a/examples/tracing/langfuse/mcp_agent.secrets.yaml.example
+++ b/examples/tracing/langfuse/mcp_agent.secrets.yaml.example
@@ -7,6 +7,7 @@ anthropic:
api_key: anthropic_api_key
otel:
+ # Headers are merged with typed OTLP exporter settings
otlp_settings:
headers:
Authorization: "Basic "
diff --git a/examples/tracing/llm/README.md b/examples/tracing/llm/README.md
index ee7d6cd8b..fa44302ae 100644
--- a/examples/tracing/llm/README.md
+++ b/examples/tracing/llm/README.md
@@ -10,6 +10,29 @@ The tracing implementation will log spans to the console for all AugmentedLLM me
### Exporting to Collector
-If desired, [install Jaeger locally](https://www.jaegertracing.io/docs/2.5/getting-started/) and then update the `mcp_agent.config.yaml` for this example to have `otel.otlp_settings.endpoint` point to the collector endpoint (e.g. `http://localhost:4318/v1/traces` is the default for Jaeger via HTTP).
+If desired, [install Jaeger locally](https://www.jaegertracing.io/docs/2.5/getting-started/):
+
+```
+docker run
+ --rm --name jaeger \
+ -p 16686:16686 \
+ -p 4317:4317 \
+ -p 4318:4318 \
+ -p 5778:5778 \
+ -p 9411:9411 \
+ jaegertracing/jaeger:2.5.0
+```
+
+Then update the `mcp_agent.config.yaml` to include a typed OTLP exporter with the collector endpoint (e.g. `http://localhost:4318/v1/traces`):
+
+```yaml
+otel:
+ enabled: true
+ exporters:
+ - type: console
+ - type: file
+ - type: otlp
+ endpoint: "http://localhost:4318/v1/traces"
+```
diff --git a/examples/tracing/llm/main.py b/examples/tracing/llm/main.py
index 04416eb36..bda339f03 100644
--- a/examples/tracing/llm/main.py
+++ b/examples/tracing/llm/main.py
@@ -1,5 +1,6 @@
import asyncio
import time
+from typing import Dict
from pydantic import BaseModel
@@ -16,13 +17,25 @@
app = MCPApp(name="llm_tracing_example")
-class CountryInfo(BaseModel):
- """Model representing structured data for country information."""
+class CountryRecord(BaseModel):
+ """Single country's structured data."""
capital: str
population: int
+class CountryInfo(BaseModel):
+ """Structured response containing multiple countries."""
+
+ countries: Dict[str, CountryRecord]
+
+ def summary(self) -> str:
+ return ", ".join(
+ f"{country}: {info.capital} (pop {info.population:,})"
+ for country, info in self.countries.items()
+ )
+
+
async def llm_tracing():
async with app.run() as agent_app:
logger = agent_app.logger
@@ -51,11 +64,18 @@ async def _trace_openai():
result_structured = await openai_llm.generate_structured(
MessageParam(
role="user",
- content="Give JSON representing the the capitals and populations of the following countries: France, Ireland, Italy",
+ content=(
+ "Return JSON under a top-level `countries` object. "
+ "Within `countries`, each key should be the country name (France, Ireland, Italy) "
+ "with values containing `capital` and `population`."
+ ),
),
response_model=CountryInfo,
)
- logger.info(f"openai_llm structured result: {result_structured}")
+ logger.info(
+ "openai_llm structured result",
+ data=result_structured.model_dump(mode="json"),
+ )
async def _trace_anthropic():
# Agent-integrated LLM (Anthropic)
@@ -73,11 +93,18 @@ async def _trace_anthropic():
result_structured = await llm.generate_structured(
MessageParam(
role="user",
- content="Give JSON representing the the capitals and populations of the following countries: France, Germany, Belgium",
+ content=(
+ "Return JSON under a top-level `countries` object. "
+ "Within `countries`, each key should be the country name (France, Germany, Belgium) "
+ "with values containing `capital` and `population`."
+ ),
),
response_model=CountryInfo,
)
- logger.info(f"llm_agent structured result: {result_structured}")
+ logger.info(
+ "llm_agent structured result",
+ data=result_structured.model_dump(mode="json"),
+ )
async def _trace_azure():
# Azure
@@ -93,11 +120,18 @@ async def _trace_azure():
result_structured = await azure_llm.generate_structured(
MessageParam(
role="user",
- content="Give JSON representing the the capitals and populations of the following countries: Spain, Portugal, Italy",
+ content=(
+ "Return JSON under a top-level `countries` object. "
+ "Within `countries`, each key should be the country name (Spain, Portugal, Italy) "
+ "with values containing `capital` and `population`."
+ ),
),
response_model=CountryInfo,
)
- logger.info(f"azure_llm structured result: {result_structured}")
+ logger.info(
+ "azure_llm structured result",
+ data=result_structured.model_dump(mode="json"),
+ )
await asyncio.gather(
_trace_openai(),
diff --git a/examples/tracing/llm/mcp_agent.config.yaml b/examples/tracing/llm/mcp_agent.config.yaml
index 7e5418f80..3d3eedf7e 100644
--- a/examples/tracing/llm/mcp_agent.config.yaml
+++ b/examples/tracing/llm/mcp_agent.config.yaml
@@ -26,8 +26,11 @@ openai:
otel:
enabled: true
- exporters: ["console", "file"]
- # If running jaeger locally, uncomment the following lines and add "otlp" to the exporters list
- # otlp_settings:
- # endpoint: "http://localhost:4318/v1/traces"
+ exporters: [
+ { type: console },
+ { type: file },
+ # To export to a collector, also include:
+ # { type: otlp, endpoint: "http://localhost:4318/v1/traces" },
+ ]
+
service_name: "BasicTracingLLMExample"
diff --git a/examples/tracing/mcp/README.md b/examples/tracing/mcp/README.md
index c854f5f58..caf4dcb80 100644
--- a/examples/tracing/mcp/README.md
+++ b/examples/tracing/mcp/README.md
@@ -48,7 +48,15 @@ Then open `mcp_agent.secrets.yaml` and add your api key for your preferred LLM f
## `3` Configure Jaeger Collector
-[Run Jaeger locally](https://www.jaegertracing.io/docs/2.5/getting-started/) and then update the `mcp_agent.config.yaml` for this example to have `otel.otlp_settings.endpoint` point to the collector endpoint (e.g. `http://localhost:4318/v1/traces` is the default for Jaeger via HTTP).
+[Run Jaeger locally](https://www.jaegertracing.io/docs/2.5/getting-started/) and then update the `mcp_agent.config.yaml` to include a typed OTLP exporter with the collector endpoint (e.g. `http://localhost:4318/v1/traces`):
+
+```yaml
+otel:
+ enabled: true
+ exporters:
+ - type: otlp
+ endpoint: "http://localhost:4318/v1/traces"
+```
## `4` Run locally
diff --git a/examples/tracing/mcp/mcp_agent.config.yaml b/examples/tracing/mcp/mcp_agent.config.yaml
index 63b56586e..00e3a4872 100644
--- a/examples/tracing/mcp/mcp_agent.config.yaml
+++ b/examples/tracing/mcp/mcp_agent.config.yaml
@@ -17,8 +17,5 @@ openai:
otel:
enabled: true
- exporters: ["otlp"]
- # If running jaeger locally, uncomment the following lines and add "otlp" to the exporters list
- otlp_settings:
- endpoint: "http://localhost:4318/v1/traces"
+ exporters: [{ type: otlp, endpoint: "http://localhost:4318/v1/traces" }]
service_name: "MCPAgentSSEExample"
diff --git a/examples/tracing/temporal/README.md b/examples/tracing/temporal/README.md
index 2cc35a5e0..d663d29ad 100644
--- a/examples/tracing/temporal/README.md
+++ b/examples/tracing/temporal/README.md
@@ -47,7 +47,15 @@ To run any of these examples, you'll need to:
3. Configure Jaeger Collector
-[Run Jaeger locally](https://www.jaegertracing.io/docs/2.5/getting-started/) and then ensure the `mcp_agent.config.yaml` for this example has `otel.otlp_settings.endpoint` point to the collector endpoint (e.g. `http://localhost:4318/v1/traces` is the default for Jaeger via HTTP).
+[Run Jaeger locally](https://www.jaegertracing.io/docs/2.5/getting-started/) and then ensure the `mcp_agent.config.yaml` for this example includes a typed OTLP exporter with the collector endpoint:
+
+```yaml
+otel:
+ enabled: true
+ exporters:
+ - type: otlp
+ endpoint: "http://localhost:4318/v1/traces"
+```
4. In a separate terminal, start the worker:
diff --git a/examples/tracing/temporal/mcp_agent.config.yaml b/examples/tracing/temporal/mcp_agent.config.yaml
index 5b211bad1..65c9efbce 100644
--- a/examples/tracing/temporal/mcp_agent.config.yaml
+++ b/examples/tracing/temporal/mcp_agent.config.yaml
@@ -45,7 +45,9 @@ openai:
otel:
enabled: true
- exporters: ["file", "otlp"]
- otlp_settings:
- endpoint: "http://localhost:4318/v1/traces"
+ exporters:
+ [
+ { type: file },
+ { type: otlp, endpoint: "http://localhost:4318/v1/traces" },
+ ]
service_name: "TemporalTracingExample"
diff --git a/examples/workflows/workflow_deep_orchestrator/mcp_agent.config.yaml b/examples/workflows/workflow_deep_orchestrator/mcp_agent.config.yaml
index 80ba11c4f..0ed34e2dd 100644
--- a/examples/workflows/workflow_deep_orchestrator/mcp_agent.config.yaml
+++ b/examples/workflows/workflow_deep_orchestrator/mcp_agent.config.yaml
@@ -24,8 +24,17 @@ openai:
otel:
enabled: true
- exporters: ["file"]
- # If running jaeger locally, uncomment the following lines and add "otlp" to the exporters list
- # otlp_settings:
- # endpoint: "http://localhost:4318/v1/traces"
+ exporters: [
+ {
+ type: file,
+ path_settings:
+ {
+ path_pattern: "traces/mcp-agent-trace-{unique_id}.jsonl",
+ unique_id: "timestamp",
+ timestamp_format: "%Y%m%d_%H%M%S",
+ },
+ },
+ # To export to a collector, also include:
+ # { type: otlp, endpoint: "http://localhost:4318/v1/traces" },
+ ]
service_name: "AdaptiveWorkflowExample"
diff --git a/examples/workflows/workflow_evaluator_optimizer/mcp_agent.config.yaml b/examples/workflows/workflow_evaluator_optimizer/mcp_agent.config.yaml
index 283a63672..b4b46f570 100644
--- a/examples/workflows/workflow_evaluator_optimizer/mcp_agent.config.yaml
+++ b/examples/workflows/workflow_evaluator_optimizer/mcp_agent.config.yaml
@@ -11,14 +11,14 @@ execution_engine: asyncio
# Logging configuration
logger:
- type: console # Log output type (console, file, or http)
- level: debug # Logging level (debug, info, warning, error)
- batch_size: 100 # Number of logs to batch before sending
- flush_interval: 2 # Interval in seconds to flush logs
- max_queue_size: 2048 # Maximum queue size for buffered logs
- http_endpoint: # Optional: HTTP endpoint for remote logging
- http_headers: # Optional: Headers for HTTP logging
- http_timeout: 5 # Timeout for HTTP logging requests
+ type: console # Log output type (console, file, or http)
+ level: debug # Logging level (debug, info, warning, error)
+ batch_size: 100 # Number of logs to batch before sending
+ flush_interval: 2 # Interval in seconds to flush logs
+ max_queue_size: 2048 # Maximum queue size for buffered logs
+ http_endpoint: # Optional: HTTP endpoint for remote logging
+ http_headers: # Optional: Headers for HTTP logging
+ http_timeout: 5 # Timeout for HTTP logging requests
# MCP (Model Context Protocol) server configuration
mcp:
@@ -36,13 +36,14 @@ mcp:
# OpenAI configuration
openai:
# API keys are stored in mcp_agent.secrets.yaml (gitignored for security)
- default_model: gpt-5 # Default model for OpenAI API calls
+ default_model: gpt-5 # Default model for OpenAI API calls
# OpenTelemetry (OTEL) configuration for distributed tracing
otel:
- enabled: false # Set to true to enable tracing
- exporters: ["console"] # Trace exporters (console, otlp)
- # Uncomment below to export traces to Jaeger running locally
- # otlp_settings:
- # endpoint: "http://localhost:4318/v1/traces"
- service_name: "WorkflowEvaluatorOptimizerExample" # Service name in traces
+ enabled: false
+ exporters: [
+ { type: console },
+ # To export to a collector, also include:
+ # { type: otlp, endpoint: "http://localhost:4318/v1/traces" }
+ ]
+ service_name: "WorkflowEvaluatorOptimizerExample"
diff --git a/examples/workflows/workflow_intent_classifier/mcp_agent.config.yaml b/examples/workflows/workflow_intent_classifier/mcp_agent.config.yaml
index 56bdea49b..1b15b9db3 100644
--- a/examples/workflows/workflow_intent_classifier/mcp_agent.config.yaml
+++ b/examples/workflows/workflow_intent_classifier/mcp_agent.config.yaml
@@ -21,8 +21,9 @@ openai:
otel:
enabled: false
- exporters: ["console"]
- # If running jaeger locally, uncomment the following lines and add "otlp" to the exporters list
- # otlp_settings:
- # endpoint: "http://localhost:4318/v1/traces"
+ exporters: [
+ { type: console },
+ # To export to a collector, also include:
+ # { type: otlp, endpoint: "http://localhost:4318/v1/traces" }
+ ]
service_name: "WorkflowIntentClassifierExample"
diff --git a/examples/workflows/workflow_orchestrator_worker/mcp_agent.config.yaml b/examples/workflows/workflow_orchestrator_worker/mcp_agent.config.yaml
index c7b4f1468..c91192eb3 100644
--- a/examples/workflows/workflow_orchestrator_worker/mcp_agent.config.yaml
+++ b/examples/workflows/workflow_orchestrator_worker/mcp_agent.config.yaml
@@ -26,8 +26,9 @@ openai:
otel:
enabled: false
- exporters: ["console"]
- # If running jaeger locally, uncomment the following lines and add "otlp" to the exporters list
- # otlp_settings:
- # endpoint: "http://localhost:4318/v1/traces"
+ exporters: [
+ { type: console },
+ # To export to a collector, also include:
+ # { type: otlp, endpoint: "http://localhost:4318/v1/traces" }
+ ]
service_name: "WorkflowOrchestratorWorkerExample"
diff --git a/examples/workflows/workflow_parallel/mcp_agent.config.yaml b/examples/workflows/workflow_parallel/mcp_agent.config.yaml
index a068721ef..0e0dfc810 100644
--- a/examples/workflows/workflow_parallel/mcp_agent.config.yaml
+++ b/examples/workflows/workflow_parallel/mcp_agent.config.yaml
@@ -25,8 +25,10 @@ openai:
otel:
enabled: false
- exporters: ["console"]
- # If running jaeger locally, uncomment the following lines and add "otlp" to the exporters list
- # otlp_settings:
- # endpoint: "http://localhost:4318/v1/traces"
+ exporters: [
+ { type: console },
+ # To export to a collector, also include:
+ # { type: otlp, endpoint: "http://localhost:4318/v1/traces" }
+ ]
+
service_name: "WorkflowParallelExample"
diff --git a/examples/workflows/workflow_router/mcp_agent.config.yaml b/examples/workflows/workflow_router/mcp_agent.config.yaml
index 7c5ac6c34..5265d8c11 100644
--- a/examples/workflows/workflow_router/mcp_agent.config.yaml
+++ b/examples/workflows/workflow_router/mcp_agent.config.yaml
@@ -21,8 +21,9 @@ openai:
otel:
enabled: false
- exporters: ["console"]
- # If running jaeger locally, uncomment the following lines and add "otlp" to the exporters list
- # otlp_settings:
- # endpoint: "http://localhost:4318/v1/traces"
+ exporters: [
+ { type: console },
+ # To export to a collector, also include:
+ # { type: otlp, endpoint: "http://localhost:4318/v1/traces" }
+ ]
service_name: "WorkflowRouterExample"
diff --git a/schema/mcp-agent.config.schema.json b/schema/mcp-agent.config.schema.json
index 18b388a2e..00ee3cc4a 100644
--- a/schema/mcp-agent.config.schema.json
+++ b/schema/mcp-agent.config.schema.json
@@ -314,6 +314,55 @@
"title": "CohereSettings",
"type": "object"
},
+ "ConsoleExporterSettings": {
+ "additionalProperties": true,
+ "properties": {
+ "type": {
+ "const": "console",
+ "default": "console",
+ "title": "Type",
+ "type": "string"
+ }
+ },
+ "title": "ConsoleExporterSettings",
+ "type": "object"
+ },
+ "FileExporterSettings": {
+ "additionalProperties": true,
+ "properties": {
+ "type": {
+ "const": "file",
+ "default": "file",
+ "title": "Type",
+ "type": "string"
+ },
+ "path": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "title": "Path"
+ },
+ "path_settings": {
+ "anyOf": [
+ {
+ "$ref": "#/$defs/TracePathSettings"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null
+ }
+ },
+ "title": "FileExporterSettings",
+ "type": "object"
+ },
"GoogleSettings": {
"additionalProperties": true,
"description": "Settings for using Google models in the MCP Agent application.",
@@ -786,6 +835,46 @@
"title": "MCPSettings",
"type": "object"
},
+ "OTLPExporterSettings": {
+ "additionalProperties": true,
+ "properties": {
+ "type": {
+ "const": "otlp",
+ "default": "otlp",
+ "title": "Type",
+ "type": "string"
+ },
+ "endpoint": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "title": "Endpoint"
+ },
+ "headers": {
+ "anyOf": [
+ {
+ "additionalProperties": {
+ "type": "string"
+ },
+ "type": "object"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "title": "Headers"
+ }
+ },
+ "title": "OTLPExporterSettings",
+ "type": "object"
+ },
"OpenAISettings": {
"additionalProperties": true,
"description": "Settings for using OpenAI models in the MCP Agent application.",
@@ -879,16 +968,40 @@
"exporters": {
"default": [],
"items": {
- "enum": [
- "console",
- "file",
- "otlp"
- ],
- "type": "string"
+ "anyOf": [
+ {
+ "enum": [
+ "console",
+ "file",
+ "otlp"
+ ],
+ "type": "string"
+ },
+ {
+ "discriminator": {
+ "mapping": {
+ "console": "#/$defs/ConsoleExporterSettings",
+ "file": "#/$defs/FileExporterSettings",
+ "otlp": "#/$defs/OTLPExporterSettings"
+ },
+ "propertyName": "type"
+ },
+ "oneOf": [
+ {
+ "$ref": "#/$defs/ConsoleExporterSettings"
+ },
+ {
+ "$ref": "#/$defs/FileExporterSettings"
+ },
+ {
+ "$ref": "#/$defs/OTLPExporterSettings"
+ }
+ ]
+ }
+ ]
},
"title": "Exporters",
- "type": "array",
- "description": "List of exporters to use (can enable multiple simultaneously)"
+ "type": "array"
},
"service_name": {
"default": "mcp-agent",
@@ -935,30 +1048,7 @@
}
],
"default": null,
- "description": "OTLP settings for OpenTelemetry tracing. Required if using otlp exporter."
- },
- "path": {
- "anyOf": [
- {
- "type": "string"
- },
- {
- "type": "null"
- }
- ],
- "default": null,
- "title": "Path"
- },
- "path_settings": {
- "anyOf": [
- {
- "$ref": "#/$defs/TracePathSettings"
- },
- {
- "type": "null"
- }
- ],
- "default": null
+ "description": "Deprecated single OTLP settings. Prefer exporters list with type \"otlp\"."
}
},
"title": "OpenTelemetrySettings",
@@ -1096,16 +1186,8 @@
"description": "Settings for OTLP exporter in OpenTelemetry.",
"properties": {
"endpoint": {
- "anyOf": [
- {
- "type": "string"
- },
- {
- "type": "null"
- }
- ],
- "default": null,
"title": "Endpoint",
+ "type": "string",
"description": "OTLP endpoint for exporting traces."
},
"headers": {
@@ -1125,6 +1207,9 @@
"description": "Optional headers for OTLP exporter."
}
},
+ "required": [
+ "endpoint"
+ ],
"title": "TraceOTLPSettings",
"type": "object"
},
@@ -1320,9 +1405,7 @@
"service_instance_id": null,
"service_version": null,
"sample_rate": 1.0,
- "otlp_settings": null,
- "path": null,
- "path_settings": null
+ "otlp_settings": null
},
"description": "OpenTelemetry logging settings for the MCP Agent application"
},
diff --git a/src/mcp_agent/config.py b/src/mcp_agent/config.py
index 5afd81a23..0ddbd9676 100644
--- a/src/mcp_agent/config.py
+++ b/src/mcp_agent/config.py
@@ -4,14 +4,21 @@
"""
import sys
+from httpx import URL
from io import StringIO
from pathlib import Path
-from typing import Dict, List, Literal, Optional, Set
+from typing import Annotated, Dict, List, Literal, Optional, Set, Union
import threading
import warnings
-from httpx import URL
-from pydantic import AliasChoices, BaseModel, ConfigDict, Field, field_validator
+from pydantic import (
+ AliasChoices,
+ BaseModel,
+ ConfigDict,
+ Field,
+ field_validator,
+ model_validator,
+)
from pydantic_settings import BaseSettings, SettingsConfigDict
import yaml
@@ -107,8 +114,11 @@ class MCPServerSettings(BaseModel):
"""Environment variables to pass to the server process."""
allowed_tools: Set[str] | None = None
- """Set of tool names to allow from this server. If specified, only these tools will be exposed to agents.
- Tool names should match exactly. [WARNING] Empty list will result LLM have no access to tools."""
+ """
+ Set of tool names to allow from this server. If specified, only these tools will be exposed to agents.
+ Tool names should match exactly.
+ Note: Empty list will result in the agent having no access to tools.
+ """
model_config = ConfigDict(extra="allow", arbitrary_types_allowed=True)
@@ -466,7 +476,7 @@ class TraceOTLPSettings(BaseModel):
Settings for OTLP exporter in OpenTelemetry.
"""
- endpoint: str | None = None
+ endpoint: str
"""OTLP endpoint for exporting traces."""
headers: Dict[str, str] | None = None
@@ -475,6 +485,44 @@ class TraceOTLPSettings(BaseModel):
model_config = ConfigDict(extra="allow", arbitrary_types_allowed=True)
+class OpenTelemetryExporterBase(BaseModel):
+ """
+ Base class for OpenTelemetry exporter configuration.
+
+ This is used as the discriminated base for exporter-specific configs.
+ """
+
+ type: Literal["console", "file", "otlp"]
+
+ model_config = ConfigDict(extra="allow", arbitrary_types_allowed=True)
+
+
+class ConsoleExporterSettings(OpenTelemetryExporterBase):
+ type: Literal["console"] = "console"
+
+
+class FileExporterSettings(OpenTelemetryExporterBase):
+ type: Literal["file"] = "file"
+ path: str | None = None
+ path_settings: TracePathSettings | None = None
+
+
+class OTLPExporterSettings(OpenTelemetryExporterBase):
+ type: Literal["otlp"] = "otlp"
+ endpoint: str | None = None
+ headers: Dict[str, str] | None = None
+
+
+OpenTelemetryExporterSettings = Annotated[
+ Union[
+ ConsoleExporterSettings,
+ FileExporterSettings,
+ OTLPExporterSettings,
+ ],
+ Field(discriminator="type"),
+]
+
+
class OpenTelemetrySettings(BaseModel):
"""
OTEL settings for the MCP Agent application.
@@ -482,8 +530,17 @@ class OpenTelemetrySettings(BaseModel):
enabled: bool = False
- exporters: List[Literal["console", "file", "otlp"]] = []
- """List of exporters to use (can enable multiple simultaneously)"""
+ exporters: List[
+ Union[Literal["console", "file", "otlp"], OpenTelemetryExporterSettings]
+ ] = []
+ """
+ Exporters to use (can enable multiple simultaneously). Each exporter has
+ its own typed configuration.
+
+ Backward compatible: a YAML list of literal strings (e.g. ["console", "otlp"]) is
+ accepted and will be transformed, sourcing settings from legacy fields
+ like `otlp_settings`, `path` and `path_settings` if present.
+ """
service_name: str = "mcp-agent"
service_instance_id: str | None = None
@@ -492,23 +549,128 @@ class OpenTelemetrySettings(BaseModel):
sample_rate: float = 1.0
"""Sample rate for tracing (1.0 = sample everything)"""
+ # Deprecated: use exporters: [{ type: "otlp", ... }]
otlp_settings: TraceOTLPSettings | None = None
- """OTLP settings for OpenTelemetry tracing. Required if using otlp exporter."""
+ """Deprecated single OTLP settings. Prefer exporters list with type "otlp"."""
- path: str | None = None
- """
- Direct path for trace file. If specified, this takes precedence over path_settings.
- Useful for test scenarios where you want full control over the trace file location.
- """
+ model_config = ConfigDict(extra="allow", arbitrary_types_allowed=True)
- # Settings for advanced trace path configuration for file exporter
- path_settings: TracePathSettings | None = None
- """
- Save trace files with more advanced path semantics, like having timestamps or session id in the trace name.
- Ignored if 'path' is specified.
- """
+ @model_validator(mode="before")
+ @classmethod
+ def _coerce_exporters_schema(cls, data: Dict) -> Dict:
+ """
+ Backward compatibility shim to allow:
+ - exporters: ["console", "file", "otlp"] with legacy per-exporter fields
+ - exporters already in discriminated-union form
+ """
+ if not isinstance(data, dict):
+ return data
+
+ exporters = data.get("exporters")
+
+ # If exporters are already objects with a 'type', leave as-is
+ if isinstance(exporters, list) and all(
+ isinstance(e, dict) and "type" in e for e in exporters
+ ):
+ return data
+
+ # If exporters are literal strings, up-convert to typed configs
+ if isinstance(exporters, list) and all(isinstance(e, str) for e in exporters):
+ typed_exporters: List[Dict] = []
+
+ # Legacy helpers (can arrive as dicts or BaseModel instances)
+ legacy_otlp = data.get("otlp_settings")
+ if isinstance(legacy_otlp, BaseModel):
+ legacy_otlp = legacy_otlp.model_dump(exclude_none=True)
+ elif not isinstance(legacy_otlp, dict):
+ legacy_otlp = {}
+
+ legacy_path = data.get("path")
+ legacy_path_settings = data.get("path_settings")
+ if isinstance(legacy_path_settings, BaseModel):
+ legacy_path_settings = legacy_path_settings.model_dump(
+ exclude_none=True
+ )
+
+ for name in exporters:
+ if name == "console":
+ typed_exporters.append({"type": "console"})
+ elif name == "file":
+ typed_exporters.append(
+ {
+ "type": "file",
+ "path": legacy_path,
+ "path_settings": legacy_path_settings,
+ }
+ )
+ elif name == "otlp":
+ typed_exporters.append(
+ {
+ "type": "otlp",
+ "endpoint": (legacy_otlp or {}).get("endpoint"),
+ "headers": (legacy_otlp or {}).get("headers"),
+ }
+ )
+ else:
+ raise ValueError(
+ f"Unsupported OpenTelemetry exporter '{name}'. "
+ "Supported exporters: console, file, otlp."
+ )
+
+ # Overwrite with transformed list
+ data["exporters"] = typed_exporters
+
+ return data
+
+ @model_validator(mode="after")
+ def _finalize_exporters(cls, values: "OpenTelemetrySettings"):
+ """Ensure exporters are instantiated as typed configs even if literals were provided."""
+
+ typed_exporters: List[OpenTelemetryExporterSettings] = []
+
+ legacy_path = getattr(values, "path", None)
+ legacy_path_settings = getattr(values, "path_settings", None)
+ if isinstance(legacy_path_settings, dict):
+ legacy_path_settings = TracePathSettings.model_validate(legacy_path_settings)
+
+ for exporter in values.exporters:
+ if isinstance(exporter, OpenTelemetryExporterBase):
+ typed_exporters.append(exporter) # Already typed
+ continue
+
+ if exporter == "console":
+ typed_exporters.append(ConsoleExporterSettings())
+ elif exporter == "file":
+ typed_exporters.append(
+ FileExporterSettings(
+ path=legacy_path,
+ path_settings=legacy_path_settings,
+ )
+ )
+ elif exporter == "otlp":
+ endpoint = None
+ headers = None
+ if values.otlp_settings:
+ endpoint = getattr(values.otlp_settings, "endpoint", None)
+ headers = getattr(values.otlp_settings, "headers", None)
+ typed_exporters.append(
+ OTLPExporterSettings(endpoint=endpoint, headers=headers)
+ )
+ else: # pragma: no cover - safeguarded by pre-validator, but keep defensive path
+ raise ValueError(
+ f"Unsupported OpenTelemetry exporter '{exporter}'. "
+ "Supported exporters: console, file, otlp."
+ )
- model_config = ConfigDict(extra="allow", arbitrary_types_allowed=True)
+ values.exporters = typed_exporters
+
+ # Remove legacy extras once we've consumed them to avoid leaking into dumps
+ if hasattr(values, "path"):
+ delattr(values, "path")
+ if hasattr(values, "path_settings"):
+ delattr(values, "path_settings")
+
+ return values
class LogPathSettings(BaseModel):
diff --git a/src/mcp_agent/tracing/tracer.py b/src/mcp_agent/tracing/tracer.py
index 701df3f29..94503a7e7 100644
--- a/src/mcp_agent/tracing/tracer.py
+++ b/src/mcp_agent/tracing/tracer.py
@@ -4,6 +4,7 @@
from opentelemetry.propagate import set_global_textmap
from opentelemetry.sdk.resources import Resource
from opentelemetry.sdk.trace import TracerProvider
+from opentelemetry.sdk.trace.sampling import ParentBased, TraceIdRatioBased
from opentelemetry.sdk.trace.export import BatchSpanProcessor, ConsoleSpanExporter
from opentelemetry.trace.propagation.tracecontext import TraceContextTextMapPropagator
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
@@ -91,23 +92,63 @@ async def configure(
}
)
- # Create provider with resource
- tracer_provider = TracerProvider(resource=resource)
+ # Create provider with resource and optional sampler (respect sample_rate when explicitly set)
+ sampler = None
+ if (
+ "sample_rate" in settings.model_fields_set
+ and settings.sample_rate is not None
+ ):
+ sample_rate = settings.sample_rate
+ try:
+ sample_rate = max(0.0, min(1.0, float(sample_rate)))
+ except Exception: # If parsing fails, fall back to full sampling
+ sample_rate = 1.0
+ sampler = ParentBased(TraceIdRatioBased(sample_rate))
+
+ tracer_provider_kwargs = {"resource": resource}
+ if sampler is not None:
+ tracer_provider_kwargs["sampler"] = sampler
+
+ tracer_provider = TracerProvider(**tracer_provider_kwargs)
for exporter in settings.exporters:
- if exporter == "console":
+ # Exporter entries can be strings (legacy) or typed configs with a 'type' attribute
+ exporter_type = (
+ exporter
+ if isinstance(exporter, str)
+ else getattr(exporter, "type", None)
+ )
+ if exporter_type == "console":
tracer_provider.add_span_processor(
BatchSpanProcessor(
ConsoleSpanExporter(service_name=settings.service_name)
)
)
- elif exporter == "otlp":
+ elif exporter_type == "otlp":
+ # Merge endpoint/headers from typed config with legacy secrets (if provided)
+ endpoint = (
+ getattr(exporter, "endpoint", None)
+ if not isinstance(exporter, str)
+ else None
+ )
+ headers = (
+ getattr(exporter, "headers", None)
+ if not isinstance(exporter, str)
+ else None
+ )
if settings.otlp_settings:
+ endpoint = endpoint or getattr(
+ settings.otlp_settings, "endpoint", None
+ )
+ headers = headers or getattr(
+ settings.otlp_settings, "headers", None
+ )
+ if endpoint:
tracer_provider.add_span_processor(
BatchSpanProcessor(
OTLPSpanExporter(
- endpoint=settings.otlp_settings.endpoint,
- headers=settings.otlp_settings.headers,
+ endpoint=endpoint,
+ headers=headers,
)
)
)
@@ -115,21 +156,31 @@ async def configure(
logger.error(
"OTLP exporter is enabled but no OTLP settings endpoint is provided."
)
- elif exporter == "file":
+ elif exporter_type == "file":
+ custom_path = (
+ getattr(exporter, "path", None)
+ if not isinstance(exporter, str)
+ else getattr(settings, "path", None)
+ )
+ path_settings = (
+ getattr(exporter, "path_settings", None)
+ if not isinstance(exporter, str)
+ else getattr(settings, "path_settings", None)
+ )
tracer_provider.add_span_processor(
BatchSpanProcessor(
FileSpanExporter(
service_name=settings.service_name,
session_id=session_id,
- path_settings=settings.path_settings,
- custom_path=settings.path,
+ path_settings=path_settings,
+ custom_path=custom_path,
)
)
)
continue
else:
logger.error(
- f"Unknown exporter '{exporter}' specified. Supported exporters: console, otlp, file."
+ f"Unknown exporter '{exporter_type}' specified. Supported exporters: console, otlp, file."
)
# Store the tracer provider instance
diff --git a/src/mcp_agent/workflows/llm/augmented_llm_anthropic.py b/src/mcp_agent/workflows/llm/augmented_llm_anthropic.py
index d6522af5a..a2f31e72c 100644
--- a/src/mcp_agent/workflows/llm/augmented_llm_anthropic.py
+++ b/src/mcp_agent/workflows/llm/augmented_llm_anthropic.py
@@ -475,8 +475,17 @@ async def generate_structured(
client = AsyncAnthropic()
async with client:
- async with client.messages.stream(**args) as stream:
- final = await stream.get_final_message()
+ stream_method = client.messages.stream
+ if all(
+ hasattr(stream_method, attr) for attr in ("__aenter__", "__aexit__")
+ ):
+ async with stream_method(**args) as stream:
+ final = await stream.get_final_message()
+ else:
+ # The OpenTelemetry anthropic instrumentation wraps stream() and
+ # returns an async generator that is not an async context manager.
+ # Fallback to create() so the call succeeds while still emitting spans.
+ final = await client.messages.create(**args)
# Extract tool_use input and validate
for block in final.content:
diff --git a/tests/test_config_exporters.py b/tests/test_config_exporters.py
new file mode 100644
index 000000000..4d78d74d6
--- /dev/null
+++ b/tests/test_config_exporters.py
@@ -0,0 +1,104 @@
+"""Tests for OpenTelemetry exporter configuration handling."""
+
+import pytest
+
+from mcp_agent.config import (
+ ConsoleExporterSettings,
+ FileExporterSettings,
+ OTLPExporterSettings,
+ OpenTelemetrySettings,
+ TraceOTLPSettings,
+ TracePathSettings,
+)
+
+
+def _assert_console_exporter(exporter):
+ assert isinstance(exporter, ConsoleExporterSettings)
+ assert exporter.type == "console"
+
+
+def _assert_file_exporter(exporter):
+ assert isinstance(exporter, FileExporterSettings)
+ assert exporter.type == "file"
+
+
+def _assert_otlp_exporter(exporter, endpoint: str):
+ assert isinstance(exporter, OTLPExporterSettings)
+ assert exporter.type == "otlp"
+ assert exporter.endpoint == endpoint
+
+
+def test_typed_exporters_passthrough():
+ settings = OpenTelemetrySettings(
+ enabled=True,
+ exporters=[
+ {"type": "console"},
+ {"type": "otlp", "endpoint": "http://collector:4318/v1/traces"},
+ ],
+ )
+
+ assert len(settings.exporters) == 2
+ _assert_console_exporter(settings.exporters[0])
+ _assert_otlp_exporter(settings.exporters[1], "http://collector:4318/v1/traces")
+
+
+def test_legacy_exporters_with_dict_settings():
+ settings = OpenTelemetrySettings(
+ enabled=True,
+ exporters=["file", "otlp"],
+ path="/tmp/trace.jsonl",
+ path_settings={
+ "path_pattern": "traces/trace-{unique_id}.jsonl",
+ "unique_id": "timestamp",
+ },
+ otlp_settings={
+ "endpoint": "http://collector:4318/v1/traces",
+ "headers": {"Authorization": "Bearer token"},
+ },
+ )
+
+ assert len(settings.exporters) == 2
+ _assert_file_exporter(settings.exporters[0])
+ assert settings.exporters[0].path == "/tmp/trace.jsonl"
+ assert settings.exporters[0].path_settings
+ assert (
+ settings.exporters[0].path_settings.path_pattern
+ == "traces/trace-{unique_id}.jsonl"
+ )
+
+ _assert_otlp_exporter(settings.exporters[1], "http://collector:4318/v1/traces")
+ assert settings.exporters[1].headers == {"Authorization": "Bearer token"}
+
+
+def test_legacy_exporters_with_base_models():
+ settings = OpenTelemetrySettings(
+ enabled=True,
+ exporters=["file", "otlp"],
+ path_settings=TracePathSettings(path_pattern="trace-{unique_id}.jsonl"),
+ otlp_settings=TraceOTLPSettings(endpoint="http://collector:4318/v1/traces"),
+ )
+
+ assert len(settings.exporters) == 2
+ _assert_file_exporter(settings.exporters[0])
+ assert settings.exporters[0].path_settings
+ assert settings.exporters[0].path_settings.path_pattern == "trace-{unique_id}.jsonl"
+
+ _assert_otlp_exporter(settings.exporters[1], "http://collector:4318/v1/traces")
+
+
+def test_legacy_unknown_exporter_raises():
+ with pytest.raises(ValueError, match="Unsupported OpenTelemetry exporter"):
+ OpenTelemetrySettings(exporters=["console", "bogus"])
+
+
+def test_literal_exporters_become_typed_configs():
+ settings = OpenTelemetrySettings(exporters=["console", "file", "otlp"])
+
+ assert len(settings.exporters) == 3
+ assert [
+ type(exporter) for exporter in settings.exporters
+ ] == [
+ ConsoleExporterSettings,
+ FileExporterSettings,
+ OTLPExporterSettings,
+ ]
diff --git a/tests/test_tracing_configure.py b/tests/test_tracing_configure.py
new file mode 100644
index 000000000..7ea1132c0
--- /dev/null
+++ b/tests/test_tracing_configure.py
@@ -0,0 +1,108 @@
+"""Tracer configuration tests."""
+
+import pytest
+
+from mcp_agent.config import OpenTelemetrySettings, OTLPExporterSettings
+from mcp_agent.tracing.tracer import TracingConfig
+
+
+def _install_tracer_stubs(monkeypatch):
+ recorded_exporters = []
+ provider_kwargs = []
+
+ class StubOTLPExporter:
+ def __init__(self, *, endpoint=None, headers=None):
+ self.endpoint = endpoint
+ self.headers = headers
+ recorded_exporters.append(self)
+
+ class StubBatchSpanProcessor:
+ def __init__(self, exporter):
+ self.exporter = exporter
+
+ def on_start(self, *_, **__): # pragma: no cover - interface stub
+ pass
+
+ def on_end(self, *_, **__): # pragma: no cover - interface stub
+ pass
+
+ def shutdown(self, *_, **__): # pragma: no cover - interface stub
+ pass
+
+ def force_flush(self, *_, **__): # pragma: no cover - interface stub
+ pass
+
+ class StubTracerProvider:
+ def __init__(self, **kwargs):
+ provider_kwargs.append(kwargs)
+ self.processors = []
+
+ def add_span_processor(self, processor):
+ self.processors.append(processor)
+
+ def shutdown(self): # pragma: no cover - interface stub
+ pass
+
+ monkeypatch.setattr("mcp_agent.tracing.tracer.OTLPSpanExporter", StubOTLPExporter)
+ monkeypatch.setattr(
+ "mcp_agent.tracing.tracer.BatchSpanProcessor", StubBatchSpanProcessor
+ )
+ monkeypatch.setattr("mcp_agent.tracing.tracer.TracerProvider", StubTracerProvider)
+ monkeypatch.setattr(TracingConfig, "_global_provider_set", True, raising=False)
+ monkeypatch.setattr(
+ TracingConfig, "_instrumentation_initialized", True, raising=False
+ )
+
+ return recorded_exporters, provider_kwargs
+
+
+@pytest.mark.anyio
+async def test_multiple_otlp_exporters(monkeypatch):
+ recorded_exporters, _ = _install_tracer_stubs(monkeypatch)
+
+ settings = OpenTelemetrySettings(
+ enabled=True,
+ exporters=[
+ OTLPExporterSettings(endpoint="http://collector-a:4318/v1/traces"),
+ OTLPExporterSettings(
+ endpoint="http://collector-b:4318/v1/traces",
+ headers={"X-Auth": "token"},
+ ),
+ ],
+ )
+
+ tracer_config = TracingConfig()
+ await tracer_config.configure(settings, session_id="test-session", force=True)
+
+ assert [exp.endpoint for exp in recorded_exporters] == [
+ "http://collector-a:4318/v1/traces",
+ "http://collector-b:4318/v1/traces",
+ ]
+ assert recorded_exporters[1].headers == {"X-Auth": "token"}
+
+
+@pytest.mark.anyio
+async def test_sample_rate_only_applied_when_specified(monkeypatch):
+ _, provider_kwargs = _install_tracer_stubs(monkeypatch)
+
+ settings_default = OpenTelemetrySettings(
+ enabled=True,
+ exporters=[{"type": "console"}],
+ )
+ tracer_config = TracingConfig()
+ await tracer_config.configure(settings_default, session_id="session-1", force=True)
+
+ assert "sampler" not in provider_kwargs[0]
+ assert provider_kwargs[0]["resource"] is not None
+
+ settings_with_rate = OpenTelemetrySettings(
+ enabled=True,
+ exporters=[{"type": "console"}],
+ sample_rate=0.5,
+ )
+ tracer_config = TracingConfig()
+ await tracer_config.configure(
+ settings_with_rate, session_id="session-2", force=True
+ )
+
+ assert "sampler" in provider_kwargs[1]
diff --git a/tests/test_tracing_isolation.py b/tests/test_tracing_isolation.py
index d173cc98d..e39413887 100644
--- a/tests/test_tracing_isolation.py
+++ b/tests/test_tracing_isolation.py
@@ -6,7 +6,7 @@
from opentelemetry import trace
from mcp_agent.app import MCPApp
-from mcp_agent.config import Settings, OpenTelemetrySettings
+from mcp_agent.config import Settings, OpenTelemetrySettings, FileExporterSettings
from mcp_agent.tracing.tracer import TracingConfig
@@ -297,8 +297,7 @@ async def test_file_span_exporter_isolation(self):
otel=OpenTelemetrySettings(
enabled=True,
service_name="app1-service",
- exporters=["file"],
- path=str(trace_file1), # Direct path
+ exporters=[FileExporterSettings(path=str(trace_file1))],
)
)
@@ -306,8 +305,7 @@ async def test_file_span_exporter_isolation(self):
otel=OpenTelemetrySettings(
enabled=True,
service_name="app2-service",
- exporters=["file"],
- path=str(trace_file2), # Direct path
+ exporters=[FileExporterSettings(path=str(trace_file2))],
)
)
@@ -380,9 +378,7 @@ async def test_file_span_exporter_with_path_settings(self):
otel=OpenTelemetrySettings(
enabled=True,
service_name="path-settings-service",
- exporters=["file"],
- path_settings=path_settings,
- # Note: path is NOT set, so path_settings should be used
+ exporters=[FileExporterSettings(path_settings=path_settings)],
)
)
@@ -456,8 +452,7 @@ async def run_app_with_traces(app_num: int):
otel=OpenTelemetrySettings(
enabled=True,
service_name=f"concurrent-app-{app_num}",
- exporters=["file"],
- path=str(trace_file),
+ exporters=[FileExporterSettings(path=str(trace_file))],
)
)