diff --git a/examples/tracing/agent/README.md b/examples/tracing/agent/README.md
index debaf75c4..a8c3070cb 100644
--- a/examples/tracing/agent/README.md
+++ b/examples/tracing/agent/README.md
@@ -16,10 +16,10 @@ If desired, [install Jaeger locally](https://www.jaegertracing.io/docs/2.5/getti
otel:
enabled: true
exporters:
- - type: console
- - type: file
- - type: otlp
- endpoint: "http://localhost:4318/v1/traces"
+ - console
+ - file
+ - otlp:
+ endpoint: "http://localhost:4318/v1/traces"
```
diff --git a/examples/tracing/agent/mcp_agent.config.yaml b/examples/tracing/agent/mcp_agent.config.yaml
index 47f3f8c7c..54482bde5 100644
--- a/examples/tracing/agent/mcp_agent.config.yaml
+++ b/examples/tracing/agent/mcp_agent.config.yaml
@@ -26,11 +26,10 @@ openai:
otel:
enabled: true
- exporters: [
- { type: console },
- { type: file },
- # To export to a collector, also include:
- # { type: otlp, endpoint: "http://localhost:4318/v1/traces" },
-
- ]
+ exporters:
+ - console
+ - file
+ # To export to a collector, also include:
+ # - otlp:
+ # endpoint: "http://localhost:4318/v1/traces"
service_name: "BasicTracingAgentExample"
diff --git a/examples/tracing/langfuse/README.md b/examples/tracing/langfuse/README.md
index 76cac23e4..ba456bc8b 100644
--- a/examples/tracing/langfuse/README.md
+++ b/examples/tracing/langfuse/README.md
@@ -46,23 +46,25 @@ Obtain a secret and public API key for your desired Langfuse project and then ge
echo -n "pk-your-public-key:sk-your-secret-key" | base64
```
-In `mcp_agent.secrets.yaml` set the Authorization header for OTLP (merged automatically with the typed exporter):
+In `mcp_agent.secrets.yaml` set the OTLP exporter with the Authorization header (this fully defines the exporter for Langfuse):
```yaml
otel:
- otlp_settings:
- headers:
- Authorization: "Basic AUTH_STRING"
+ exporters:
+ - otlp:
+ endpoint: "https://us.cloud.langfuse.com/api/public/otel/v1/traces"
+ headers:
+ Authorization: "Basic AUTH_STRING"
```
-Lastly, ensure the proper trace endpoint is configured in the typed exporter in `mcp_agent.config.yaml` for your Langfuse region, e.g.:
+The default `mcp_agent.config.yaml` leaves the exporters list commented out so this secrets entry is the only OTLP exporter (preventing a duplicate without headers). For non-authenticated collectors, you can instead define the exporter directly in `mcp_agent.config.yaml` and omit it from `mcp_agent.secrets.yaml`, e.g.:
```yaml
otel:
enabled: true
exporters:
- - type: otlp
- endpoint: "https://us.cloud.langfuse.com/api/public/otel/v1/traces"
+ - otlp:
+ endpoint: "https://some.other.tracing.com"
```
## `4` Run locally
diff --git a/examples/tracing/langfuse/mcp_agent.config.yaml b/examples/tracing/langfuse/mcp_agent.config.yaml
index 536a7a483..7c0d83a0c 100644
--- a/examples/tracing/langfuse/mcp_agent.config.yaml
+++ b/examples/tracing/langfuse/mcp_agent.config.yaml
@@ -26,6 +26,10 @@ openai:
otel:
enabled: true
- exporters: [{ type: otlp, endpoint: "https://us.cloud.langfuse.com/api/public/otel/v1/traces" }]
+ # OTLP exporter (with headers) is defined in mcp_agent.secrets.yaml.
+ # For non-authenticated collectors, uncomment and configure below:
+ # exporters:
+ # - otlp:
+ # endpoint: "https://some.other.tracing.com"
# Set Authorization header with API key in mcp_agent.secrets.yaml
service_name: "BasicTracingLangfuseExample"
diff --git a/examples/tracing/langfuse/mcp_agent.secrets.yaml.example b/examples/tracing/langfuse/mcp_agent.secrets.yaml.example
index aefc0b521..51858bf3b 100644
--- a/examples/tracing/langfuse/mcp_agent.secrets.yaml.example
+++ b/examples/tracing/langfuse/mcp_agent.secrets.yaml.example
@@ -7,7 +7,10 @@ anthropic:
api_key: anthropic_api_key
otel:
- # Headers are merged with typed OTLP exporter settings
- otlp_settings:
- headers:
- Authorization: "Basic "
+ # Define the Langfuse OTLP exporter (including headers) here so
+ # mcp_agent.config.yaml does not need a duplicate entry.
+ exporters:
+ - otlp:
+ endpoint: "https://us.cloud.langfuse.com/api/public/otel/v1/traces"
+ headers:
+ Authorization: "Basic AUTH_STRING"
diff --git a/examples/tracing/llm/README.md b/examples/tracing/llm/README.md
index fa44302ae..87df57c9c 100644
--- a/examples/tracing/llm/README.md
+++ b/examples/tracing/llm/README.md
@@ -29,10 +29,10 @@ Then update the `mcp_agent.config.yaml` to include a typed OTLP exporter with th
otel:
enabled: true
exporters:
- - type: console
- - type: file
- - type: otlp
- endpoint: "http://localhost:4318/v1/traces"
+ - console
+ - file
+ - otlp:
+ endpoint: "http://localhost:4318/v1/traces"
```
diff --git a/examples/tracing/llm/mcp_agent.config.yaml b/examples/tracing/llm/mcp_agent.config.yaml
index 3d3eedf7e..e5552d1c8 100644
--- a/examples/tracing/llm/mcp_agent.config.yaml
+++ b/examples/tracing/llm/mcp_agent.config.yaml
@@ -26,11 +26,10 @@ openai:
otel:
enabled: true
- exporters: [
- { type: console },
- { type: file },
- # To export to a collector, also include:
- # { type: otlp, endpoint: "http://localhost:4318/v1/traces" },
- ]
-
+ exporters:
+ - console
+ - file
+ # To export to a collector, also include:
+ # - otlp:
+ # endpoint: "http://localhost:4318/v1/traces"
service_name: "BasicTracingLLMExample"
diff --git a/examples/tracing/mcp/README.md b/examples/tracing/mcp/README.md
index caf4dcb80..f23c2e3af 100644
--- a/examples/tracing/mcp/README.md
+++ b/examples/tracing/mcp/README.md
@@ -54,8 +54,8 @@ Then open `mcp_agent.secrets.yaml` and add your api key for your preferred LLM f
otel:
enabled: true
exporters:
- - type: otlp
- endpoint: "http://localhost:4318/v1/traces"
+ - otlp:
+ endpoint: "http://localhost:4318/v1/traces"
```
## `4` Run locally
diff --git a/examples/tracing/mcp/mcp_agent.config.yaml b/examples/tracing/mcp/mcp_agent.config.yaml
index 00e3a4872..28fcde6ab 100644
--- a/examples/tracing/mcp/mcp_agent.config.yaml
+++ b/examples/tracing/mcp/mcp_agent.config.yaml
@@ -17,5 +17,7 @@ openai:
otel:
enabled: true
- exporters: [{ type: otlp, endpoint: "http://localhost:4318/v1/traces" }]
+ exporters:
+ - otlp:
+ endpoint: "http://localhost:4318/v1/traces"
service_name: "MCPAgentSSEExample"
diff --git a/examples/tracing/temporal/README.md b/examples/tracing/temporal/README.md
index d663d29ad..e4bd544dd 100644
--- a/examples/tracing/temporal/README.md
+++ b/examples/tracing/temporal/README.md
@@ -53,8 +53,8 @@ To run any of these examples, you'll need to:
otel:
enabled: true
exporters:
- - type: otlp
- endpoint: "http://localhost:4318/v1/traces"
+ - otlp:
+ endpoint: "http://localhost:4318/v1/traces"
```
4. In a separate terminal, start the worker:
diff --git a/examples/tracing/temporal/mcp_agent.config.yaml b/examples/tracing/temporal/mcp_agent.config.yaml
index 65c9efbce..e624f5f68 100644
--- a/examples/tracing/temporal/mcp_agent.config.yaml
+++ b/examples/tracing/temporal/mcp_agent.config.yaml
@@ -46,8 +46,7 @@ openai:
otel:
enabled: true
exporters:
- [
- { type: file },
- { type: otlp, endpoint: "http://localhost:4318/v1/traces" },
- ]
+ - file
+ - otlp:
+ endpoint: "http://localhost:4318/v1/traces"
service_name: "TemporalTracingExample"
diff --git a/examples/workflows/workflow_deep_orchestrator/mcp_agent.config.yaml b/examples/workflows/workflow_deep_orchestrator/mcp_agent.config.yaml
index 0ed34e2dd..bb78ccb45 100644
--- a/examples/workflows/workflow_deep_orchestrator/mcp_agent.config.yaml
+++ b/examples/workflows/workflow_deep_orchestrator/mcp_agent.config.yaml
@@ -24,17 +24,13 @@ openai:
otel:
enabled: true
- exporters: [
- {
- type: file,
+ exporters:
+ - file:
path_settings:
- {
- path_pattern: "traces/mcp-agent-trace-{unique_id}.jsonl",
- unique_id: "timestamp",
- timestamp_format: "%Y%m%d_%H%M%S",
- },
- },
- # To export to a collector, also include:
- # { type: otlp, endpoint: "http://localhost:4318/v1/traces" },
- ]
+ path_pattern: "traces/mcp-agent-trace-{unique_id}.jsonl"
+ unique_id: "timestamp"
+ timestamp_format: "%Y%m%d_%H%M%S"
+ # To export to a collector, also include:
+ # - otlp:
+ # endpoint: "http://localhost:4318/v1/traces"
service_name: "AdaptiveWorkflowExample"
diff --git a/examples/workflows/workflow_evaluator_optimizer/mcp_agent.config.yaml b/examples/workflows/workflow_evaluator_optimizer/mcp_agent.config.yaml
index b4b46f570..f3ff4735b 100644
--- a/examples/workflows/workflow_evaluator_optimizer/mcp_agent.config.yaml
+++ b/examples/workflows/workflow_evaluator_optimizer/mcp_agent.config.yaml
@@ -41,9 +41,9 @@ openai:
# OpenTelemetry (OTEL) configuration for distributed tracing
otel:
enabled: false
- exporters: [
- { type: console },
- # To export to a collector, also include:
- # { type: otlp, endpoint: "http://localhost:4318/v1/traces" }
- ]
+ exporters:
+ - console
+ # To export to a collector, also include:
+ # - otlp:
+ # endpoint: "http://localhost:4318/v1/traces"
service_name: "WorkflowEvaluatorOptimizerExample"
diff --git a/examples/workflows/workflow_intent_classifier/mcp_agent.config.yaml b/examples/workflows/workflow_intent_classifier/mcp_agent.config.yaml
index 1b15b9db3..bd4eda75b 100644
--- a/examples/workflows/workflow_intent_classifier/mcp_agent.config.yaml
+++ b/examples/workflows/workflow_intent_classifier/mcp_agent.config.yaml
@@ -21,9 +21,9 @@ openai:
otel:
enabled: false
- exporters: [
- { type: console },
- # To export to a collector, also include:
- # { type: otlp, endpoint: "http://localhost:4318/v1/traces" }
- ]
+ exporters:
+ - console
+ # To export to a collector, also include:
+ # - otlp:
+ # endpoint: "http://localhost:4318/v1/traces"
service_name: "WorkflowIntentClassifierExample"
diff --git a/examples/workflows/workflow_orchestrator_worker/mcp_agent.config.yaml b/examples/workflows/workflow_orchestrator_worker/mcp_agent.config.yaml
index c91192eb3..75f00e03e 100644
--- a/examples/workflows/workflow_orchestrator_worker/mcp_agent.config.yaml
+++ b/examples/workflows/workflow_orchestrator_worker/mcp_agent.config.yaml
@@ -26,9 +26,9 @@ openai:
otel:
enabled: false
- exporters: [
- { type: console },
- # To export to a collector, also include:
- # { type: otlp, endpoint: "http://localhost:4318/v1/traces" }
- ]
+ exporters:
+ - console
+ # To export to a collector, also include:
+ # - otlp:
+ # endpoint: "http://localhost:4318/v1/traces"
service_name: "WorkflowOrchestratorWorkerExample"
diff --git a/examples/workflows/workflow_parallel/mcp_agent.config.yaml b/examples/workflows/workflow_parallel/mcp_agent.config.yaml
index 0e0dfc810..ddb50cf73 100644
--- a/examples/workflows/workflow_parallel/mcp_agent.config.yaml
+++ b/examples/workflows/workflow_parallel/mcp_agent.config.yaml
@@ -25,10 +25,9 @@ openai:
otel:
enabled: false
- exporters: [
- { type: console },
- # To export to a collector, also include:
- # { type: otlp, endpoint: "http://localhost:4318/v1/traces" }
- ]
-
+ exporters:
+ - console
+ # To export to a collector, also include:
+ # - otlp:
+ # endpoint: "http://localhost:4318/v1/traces"
service_name: "WorkflowParallelExample"
diff --git a/examples/workflows/workflow_router/mcp_agent.config.yaml b/examples/workflows/workflow_router/mcp_agent.config.yaml
index 5265d8c11..9b6e5b3f5 100644
--- a/examples/workflows/workflow_router/mcp_agent.config.yaml
+++ b/examples/workflows/workflow_router/mcp_agent.config.yaml
@@ -21,9 +21,9 @@ openai:
otel:
enabled: false
- exporters: [
- { type: console },
- # To export to a collector, also include:
- # { type: otlp, endpoint: "http://localhost:4318/v1/traces" }
- ]
+ exporters:
+ - console
+ # To export to a collector, also include:
+ # - otlp:
+ # endpoint: "http://localhost:4318/v1/traces"
service_name: "WorkflowRouterExample"
diff --git a/schema/mcp-agent.config.schema.json b/schema/mcp-agent.config.schema.json
index 00ee3cc4a..f59160058 100644
--- a/schema/mcp-agent.config.schema.json
+++ b/schema/mcp-agent.config.schema.json
@@ -205,6 +205,52 @@
"default": null,
"title": "Endpoint"
},
+ "api_version": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "title": "Api Version"
+ },
+ "azure_deployment": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "title": "Azure Deployment"
+ },
+ "azure_ad_token": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "title": "Azure Ad Token"
+ },
+ "azure_ad_token_provider": {
+ "anyOf": [
+ {},
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "title": "Azure Ad Token Provider"
+ },
"credential_scopes": {
"anyOf": [
{
@@ -221,6 +267,18 @@
"https://cognitiveservices.azure.com/.default"
],
"title": "Credential Scopes"
+ },
+ "default_model": {
+ "anyOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ],
+ "default": null,
+ "title": "Default Model"
}
},
"title": "AzureSettings",
@@ -316,26 +374,15 @@
},
"ConsoleExporterSettings": {
"additionalProperties": true,
- "properties": {
- "type": {
- "const": "console",
- "default": "console",
- "title": "Type",
- "type": "string"
- }
- },
+ "description": "Console exporter uses stdout; no extra settings required.",
+ "properties": {},
"title": "ConsoleExporterSettings",
"type": "object"
},
"FileExporterSettings": {
"additionalProperties": true,
+ "description": "File exporter settings for writing traces to a file.",
"properties": {
- "type": {
- "const": "file",
- "default": "file",
- "title": "Type",
- "type": "string"
- },
"path": {
"anyOf": [
{
@@ -838,12 +885,6 @@
"OTLPExporterSettings": {
"additionalProperties": true,
"properties": {
- "type": {
- "const": "otlp",
- "default": "otlp",
- "title": "Type",
- "type": "string"
- },
"endpoint": {
"anyOf": [
{
@@ -978,25 +1019,64 @@
"type": "string"
},
{
- "discriminator": {
- "mapping": {
- "console": "#/$defs/ConsoleExporterSettings",
- "file": "#/$defs/FileExporterSettings",
- "otlp": "#/$defs/OTLPExporterSettings"
- },
- "propertyName": "type"
+ "additionalProperties": {
+ "anyOf": [
+ {
+ "$ref": "#/$defs/ConsoleExporterSettings"
+ },
+ {
+ "additionalProperties": true,
+ "type": "object"
+ }
+ ]
+ },
+ "propertyNames": {
+ "const": "console"
+ },
+ "type": "object"
+ },
+ {
+ "additionalProperties": {
+ "anyOf": [
+ {
+ "$ref": "#/$defs/FileExporterSettings"
+ },
+ {
+ "additionalProperties": true,
+ "type": "object"
+ }
+ ]
+ },
+ "propertyNames": {
+ "const": "file"
+ },
+ "type": "object"
+ },
+ {
+ "additionalProperties": {
+ "anyOf": [
+ {
+ "$ref": "#/$defs/OTLPExporterSettings"
+ },
+ {
+ "additionalProperties": true,
+ "type": "object"
+ }
+ ]
+ },
+ "propertyNames": {
+ "const": "otlp"
},
- "oneOf": [
- {
- "$ref": "#/$defs/ConsoleExporterSettings"
- },
- {
- "$ref": "#/$defs/FileExporterSettings"
- },
- {
- "$ref": "#/$defs/OTLPExporterSettings"
- }
- ]
+ "type": "object"
+ },
+ {
+ "$ref": "#/$defs/ConsoleExporterSettings"
+ },
+ {
+ "$ref": "#/$defs/FileExporterSettings"
+ },
+ {
+ "$ref": "#/$defs/OTLPExporterSettings"
}
]
},
@@ -1037,18 +1117,6 @@
"title": "Sample Rate",
"type": "number",
"description": "Sample rate for tracing (1.0 = sample everything)"
- },
- "otlp_settings": {
- "anyOf": [
- {
- "$ref": "#/$defs/TraceOTLPSettings"
- },
- {
- "type": "null"
- }
- ],
- "default": null,
- "description": "Deprecated single OTLP settings. Prefer exporters list with type \"otlp\"."
}
},
"title": "OpenTelemetrySettings",
@@ -1181,38 +1249,6 @@
"title": "TemporalSettings",
"type": "object"
},
- "TraceOTLPSettings": {
- "additionalProperties": true,
- "description": "Settings for OTLP exporter in OpenTelemetry.",
- "properties": {
- "endpoint": {
- "title": "Endpoint",
- "type": "string",
- "description": "OTLP endpoint for exporting traces."
- },
- "headers": {
- "anyOf": [
- {
- "additionalProperties": {
- "type": "string"
- },
- "type": "object"
- },
- {
- "type": "null"
- }
- ],
- "default": null,
- "title": "Headers",
- "description": "Optional headers for OTLP exporter."
- }
- },
- "required": [
- "endpoint"
- ],
- "title": "TraceOTLPSettings",
- "type": "object"
- },
"TracePathSettings": {
"additionalProperties": true,
"description": "Settings for configuring trace file paths with dynamic elements like timestamps or session IDs.",
@@ -1404,8 +1440,7 @@
"service_name": "mcp-agent",
"service_instance_id": null,
"service_version": null,
- "sample_rate": 1.0,
- "otlp_settings": null
+ "sample_rate": 1.0
},
"description": "OpenTelemetry logging settings for the MCP Agent application"
},
diff --git a/src/mcp_agent/config.py b/src/mcp_agent/config.py
index 3d296bb42..a9e75eb62 100644
--- a/src/mcp_agent/config.py
+++ b/src/mcp_agent/config.py
@@ -7,7 +7,7 @@
from httpx import URL
from io import StringIO
from pathlib import Path
-from typing import Annotated, Any, Dict, List, Literal, Optional, Set, Union
+from typing import Any, Dict, List, Literal, Optional, Set, Union
import threading
import warnings
@@ -535,41 +535,32 @@ class TraceOTLPSettings(BaseModel):
model_config = ConfigDict(extra="allow", arbitrary_types_allowed=True)
-class OpenTelemetryExporterBase(BaseModel):
- """
- Base class for OpenTelemetry exporter configuration.
-
- This is used as the discriminated base for exporter-specific configs.
- """
-
- type: Literal["console", "file", "otlp"]
+class ConsoleExporterSettings(BaseModel):
+ """Console exporter uses stdout; no extra settings required."""
model_config = ConfigDict(extra="allow", arbitrary_types_allowed=True)
-class ConsoleExporterSettings(OpenTelemetryExporterBase):
- type: Literal["console"] = "console"
+class FileExporterSettings(BaseModel):
+ """File exporter settings for writing traces to a file."""
-
-class FileExporterSettings(OpenTelemetryExporterBase):
- type: Literal["file"] = "file"
path: str | None = None
path_settings: TracePathSettings | None = None
+ model_config = ConfigDict(extra="allow", arbitrary_types_allowed=True)
+
-class OTLPExporterSettings(OpenTelemetryExporterBase):
- type: Literal["otlp"] = "otlp"
+class OTLPExporterSettings(BaseModel):
endpoint: str | None = None
headers: Dict[str, str] | None = None
+ model_config = ConfigDict(extra="allow", arbitrary_types_allowed=True)
+
-OpenTelemetryExporterSettings = Annotated[
- Union[
- ConsoleExporterSettings,
- FileExporterSettings,
- OTLPExporterSettings,
- ],
- Field(discriminator="type"),
+OpenTelemetryExporterSettings = Union[
+ ConsoleExporterSettings,
+ FileExporterSettings,
+ OTLPExporterSettings,
]
@@ -581,16 +572,30 @@ class OpenTelemetrySettings(BaseModel):
enabled: bool = False
exporters: List[
- Union[Literal["console", "file", "otlp"], OpenTelemetryExporterSettings]
+ Union[
+ Literal["console", "file", "otlp"],
+ Dict[Literal["console"], ConsoleExporterSettings | Dict],
+ Dict[Literal["file"], FileExporterSettings | Dict],
+ Dict[Literal["otlp"], OTLPExporterSettings | Dict],
+ ConsoleExporterSettings,
+ FileExporterSettings,
+ OTLPExporterSettings,
+ ]
] = []
"""
- Exporters to use (can enable multiple simultaneously). Each exporter has
- its own typed configuration.
+ Exporters to use (can enable multiple simultaneously). Each exporter accepts
+ either a plain string name (e.g. "console") or a keyed mapping (e.g.
+ `{file: {path: "path/to/file"}}`).
- Backward compatible: a YAML list of literal strings (e.g. ["console", "otlp"]) is
- accepted and will be transformed, sourcing settings from legacy fields
- like `otlp_settings`, `path` and `path_settings` if present.
- """
+ Backward compatible:
+ - `exporters: ["console", "otlp"]`
+ - `exporters: [{type: "file", path: "/tmp/out"}]`
+ Schema:
+ - `exporters: [console: {}, file: {path: "trace.jsonl"}, otlp: {endpoint: "https://..."}]`
+ - `exporters: ["console", {file: {path: "trace.jsonl"}}]`
+
+ Strings fall back to legacy fields like `otlp_settings`, `path`, and
+ `path_settings` when no explicit config is present"""
service_name: str = "mcp-agent"
service_instance_id: str | None = None
@@ -599,129 +604,202 @@ class OpenTelemetrySettings(BaseModel):
sample_rate: float = 1.0
"""Sample rate for tracing (1.0 = sample everything)"""
- # Deprecated: use exporters: [{ type: "otlp", ... }]
- otlp_settings: TraceOTLPSettings | None = None
- """Deprecated single OTLP settings. Prefer exporters list with type "otlp"."""
-
model_config = ConfigDict(extra="allow", arbitrary_types_allowed=True)
@model_validator(mode="before")
@classmethod
def _coerce_exporters_schema(cls, data: Dict) -> Dict:
"""
- Backward compatibility shim to allow:
- - exporters: ["console", "file", "otlp"] with legacy per-exporter fields
- - exporters already in discriminated-union form
+ Normalize exporter entries for backward compatibility.
+
+ This validator handles three exporter formats:
+ - String exporters like ["console", "file", "otlp"] with top-level legacy fields
+ - Type-discriminated format with 'type' field: [{type: "console"}, {type: "otlp", endpoint: "..."}]
+ - Key-discriminated format: [{console: {}}, {otlp: {endpoint: "..."}}]
+
+ Conversion logic:
+ - String exporters → Keep as-is, will be finalized in _finalize_exporters using legacy fields
+ - {type: "X", ...} → Convert to {X: {...}} by removing 'type' and using it as dict key
+ - {X: {...}} → Keep as-is (already in correct format)
"""
if not isinstance(data, dict):
return data
exporters = data.get("exporters")
-
- # If exporters are already objects with a 'type', leave as-is
- if isinstance(exporters, list) and all(
- isinstance(e, dict) and "type" in e for e in exporters
- ):
+ if not isinstance(exporters, list):
return data
- # If exporters are literal strings, up-convert to typed configs
- if isinstance(exporters, list) and all(isinstance(e, str) for e in exporters):
- typed_exporters: List[Dict] = []
-
- # Legacy helpers (can arrive as dicts or BaseModel instances)
- legacy_otlp = data.get("otlp_settings")
- if isinstance(legacy_otlp, BaseModel):
- legacy_otlp = legacy_otlp.model_dump(exclude_none=True)
- elif not isinstance(legacy_otlp, dict):
- legacy_otlp = {}
-
- legacy_path = data.get("path")
- legacy_path_settings = data.get("path_settings")
- if isinstance(legacy_path_settings, BaseModel):
- legacy_path_settings = legacy_path_settings.model_dump(
- exclude_none=True
- )
+ normalized: List[Union[str, Dict[str, Dict[str, object]]]] = []
- for name in exporters:
- if name == "console":
- typed_exporters.append({"type": "console"})
- elif name == "file":
- typed_exporters.append(
- {
- "type": "file",
- "path": legacy_path,
- "path_settings": legacy_path_settings,
- }
- )
- elif name == "otlp":
- typed_exporters.append(
- {
- "type": "otlp",
- "endpoint": (legacy_otlp or {}).get("endpoint"),
- "headers": (legacy_otlp or {}).get("headers"),
- }
- )
- else:
- raise ValueError(
- f"Unsupported OpenTelemetry exporter '{name}'. "
- "Supported exporters: console, file, otlp."
- )
+ for entry in exporters:
+ # Plain string like "console" or "file"
+ # These will be expanded later using legacy fields (path, otlp_settings, etc.)
+ if isinstance(entry, str):
+ normalized.append(entry)
+ continue
+
+ # Handle BaseModel instances passed directly (e.g., from tests or re-validation)
+ # If already a typed exporter settings instance, keep as-is (already finalized)
+ if isinstance(
+ entry,
+ (ConsoleExporterSettings, FileExporterSettings, OTLPExporterSettings),
+ ):
+ normalized.append(entry)
+ continue
- # Overwrite with transformed list
- data["exporters"] = typed_exporters
+ # Handle other BaseModel instances by converting to dict
+ if isinstance(entry, BaseModel):
+ entry = entry.model_dump(exclude_none=True)
+ # Fall through to dict processing below
+
+ if isinstance(entry, dict):
+ # Type-discriminated format: Extract 'type' field and use it as the dict key
+ # Example: {type: "otlp", endpoint: "..."} → {otlp: {endpoint: "..."}}
+ if "type" in entry:
+ entry = entry.copy()
+ exporter_type = entry.pop("type")
+ normalized.append({exporter_type: entry})
+ continue
+
+ # Key-discriminated format: Single-key dict like {console: {}} or {otlp: {endpoint: "..."}}
+ if len(entry) == 1:
+ normalized.append(entry)
+ continue
+
+ raise ValueError(
+ "OpenTelemetry exporters must be strings, type-tagged dicts, or "
+ 'keyed mappings (e.g. `- console`, `- {type: "file"}`, '
+ '`- {file: {path: "trace.jsonl"}}`).'
+ )
+
+ data["exporters"] = normalized
return data
@model_validator(mode="after")
@classmethod
def _finalize_exporters(cls, values: "OpenTelemetrySettings"):
- """Ensure exporters are instantiated as typed configs even if literals were provided."""
+ """
+ Convert exporter entries to key-discriminated dict format for serialization compatibility.
+
+ This validator runs after Pydantic validation and:
+ 1. Extracts legacy top-level fields (path, path_settings, otlp_settings) from the model
+ 2. Converts string exporters and dict exporters to key-discriminated dict format
+ 3. Falls back to legacy fields when string exporters don't provide explicit config
+ 4. Removes legacy fields from the model to avoid leaking them in serialization
+
+ Output format is key-discriminated dicts (e.g., {console: {}}, {file: {path: "..."}}) to ensure
+ that re-serialization and re-validation works correctly.
+
+ Example conversions:
+ - "file" + path="trace.jsonl" → {file: {path: "trace.jsonl"}}
+ - "otlp" + otlp_settings={endpoint: "..."} → {otlp: {endpoint: "...", headers: ...}}
+ """
- typed_exporters: List[OpenTelemetryExporterSettings] = []
+ finalized_exporters: List[Dict[str, Dict[str, Any]]] = []
+ # Extract legacy top-level fields (captured via extra="allow" in model_config)
+ # These fields were previously defined at the top level of OpenTelemetrySettings
legacy_path = getattr(values, "path", None)
legacy_path_settings = getattr(values, "path_settings", None)
+
+ # Normalize legacy_path_settings to TracePathSettings if it's a dict or BaseModel
if isinstance(legacy_path_settings, dict):
legacy_path_settings = TracePathSettings.model_validate(
legacy_path_settings
)
+ elif legacy_path_settings is not None and not isinstance(
+ legacy_path_settings, TracePathSettings
+ ):
+ legacy_path_settings = TracePathSettings.model_validate(
+ getattr(
+ legacy_path_settings, "model_dump", lambda **_: legacy_path_settings
+ )()
+ )
+
+ # Extract legacy otlp_settings and normalize to dict
+ legacy_otlp = getattr(values, "otlp_settings", None)
+ if isinstance(legacy_otlp, BaseModel):
+ legacy_otlp = legacy_otlp.model_dump(exclude_none=True)
+ elif not isinstance(legacy_otlp, dict):
+ legacy_otlp = {}
for exporter in values.exporters:
- if isinstance(exporter, OpenTelemetryExporterBase):
- typed_exporters.append(exporter) # Already typed
+ # If already a typed BaseModel instance, convert to key-discriminated dict format
+ if isinstance(exporter, ConsoleExporterSettings):
+ console_dict = exporter.model_dump(exclude_none=True)
+ finalized_exporters.append({"console": console_dict})
+ continue
+ elif isinstance(exporter, FileExporterSettings):
+ file_dict = exporter.model_dump(exclude_none=True)
+ finalized_exporters.append({"file": file_dict})
continue
+ elif isinstance(exporter, OTLPExporterSettings):
+ otlp_dict = exporter.model_dump(exclude_none=True)
+ finalized_exporters.append({"otlp": otlp_dict})
+ continue
+
+ exporter_name: str | None = None
+ payload: Dict[str, object] = {}
- if exporter == "console":
- typed_exporters.append(ConsoleExporterSettings())
- elif exporter == "file":
- typed_exporters.append(
- FileExporterSettings(
- path=legacy_path,
- path_settings=legacy_path_settings,
+ if isinstance(exporter, str):
+ exporter_name = exporter
+ elif isinstance(exporter, dict):
+ if len(exporter) != 1:
+ raise ValueError(
+ "OpenTelemetry exporter mappings must have exactly one key"
)
+ exporter_name, payload = next(iter(exporter.items()))
+ if payload is None:
+ payload = {}
+ elif isinstance(payload, BaseModel):
+ payload = payload.model_dump(exclude_none=True)
+ elif not isinstance(payload, dict):
+ raise ValueError(
+ 'Exporter configuration must be a dict. Example: `- file: {path: "trace.jsonl"}`'
+ )
+ else:
+ raise TypeError(f"Unexpected exporter entry: {exporter!r}")
+
+ if exporter_name == "console":
+ console_settings = ConsoleExporterSettings.model_validate(payload or {})
+ finalized_exporters.append(
+ {"console": console_settings.model_dump(exclude_none=True)}
)
- elif exporter == "otlp":
- endpoint = None
- headers = None
- if values.otlp_settings:
- endpoint = getattr(values.otlp_settings, "endpoint", None)
- headers = getattr(values.otlp_settings, "headers", None)
- typed_exporters.append(
- OTLPExporterSettings(endpoint=endpoint, headers=headers)
+ elif exporter_name == "file":
+ file_payload = payload.copy()
+ file_payload.setdefault("path", legacy_path)
+ if (
+ "path_settings" not in file_payload
+ and legacy_path_settings is not None
+ ):
+ file_payload["path_settings"] = legacy_path_settings
+ file_settings = FileExporterSettings.model_validate(file_payload)
+ finalized_exporters.append(
+ {"file": file_settings.model_dump(exclude_none=True)}
)
- else: # pragma: no cover - safeguarded by pre-validator, but keep defensive path
+ elif exporter_name == "otlp":
+ otlp_payload = payload.copy()
+ otlp_payload.setdefault("endpoint", legacy_otlp.get("endpoint"))
+ otlp_payload.setdefault("headers", legacy_otlp.get("headers"))
+ otlp_settings = OTLPExporterSettings.model_validate(otlp_payload)
+ finalized_exporters.append(
+ {"otlp": otlp_settings.model_dump(exclude_none=True)}
+ )
+ else:
raise ValueError(
- f"Unsupported OpenTelemetry exporter '{exporter}'. "
- "Supported exporters: console, file, otlp."
+ f"Unsupported OpenTelemetry exporter '{exporter_name}'. Supported exporters: console, file, otlp."
)
- values.exporters = typed_exporters
+ values.exporters = finalized_exporters
# Remove legacy extras once we've consumed them to avoid leaking into dumps
if hasattr(values, "path"):
delattr(values, "path")
if hasattr(values, "path_settings"):
delattr(values, "path_settings")
+ if hasattr(values, "otlp_settings"):
+ delattr(values, "otlp_settings")
return values
@@ -994,16 +1072,30 @@ def get_settings(config_path: str | None = None, set_global: bool = True) -> Set
Settings instance with loaded configuration.
"""
- def deep_merge(base: dict, update: dict) -> dict:
- """Recursively merge two dictionaries, preserving nested structures."""
+ def deep_merge(base: dict, update: dict, path: tuple = ()) -> dict:
+ """Recursively merge two dictionaries, preserving nested structures.
+
+ Special handling for 'exporters' lists under 'otel' key:
+ - Concatenates lists instead of replacing them
+ - Allows combining exporters from config and secrets files
+ """
merged = base.copy()
for key, value in update.items():
+ current_path = path + (key,)
if (
key in merged
and isinstance(merged[key], dict)
and isinstance(value, dict)
):
- merged[key] = deep_merge(merged[key], value)
+ merged[key] = deep_merge(merged[key], value, current_path)
+ elif (
+ key in merged
+ and isinstance(merged[key], list)
+ and isinstance(value, list)
+ and current_path == ("otel", "exporters")
+ ):
+ # Concatenate exporters lists from config and secrets
+ merged[key] = merged[key] + value
else:
merged[key] = value
return merged
diff --git a/src/mcp_agent/tracing/tracer.py b/src/mcp_agent/tracing/tracer.py
index 94503a7e7..2b7f364d6 100644
--- a/src/mcp_agent/tracing/tracer.py
+++ b/src/mcp_agent/tracing/tracer.py
@@ -9,7 +9,10 @@
from opentelemetry.trace.propagation.tracecontext import TraceContextTextMapPropagator
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
-from mcp_agent.config import OpenTelemetrySettings
+from mcp_agent.config import (
+ OpenTelemetrySettings,
+ TracePathSettings,
+)
from mcp_agent.logging.logger import get_logger
from mcp_agent.tracing.file_span_exporter import FileSpanExporter
@@ -112,12 +115,24 @@ async def configure(
tracer_provider = TracerProvider(**tracer_provider_kwargs)
for exporter in settings.exporters:
- # Exporter entries can be strings (legacy) or typed configs with a 'type' attribute
- exporter_type = (
- exporter
- if isinstance(exporter, str)
- else getattr(exporter, "type", None)
- )
+ # Determine exporter type from dict format: {console: {}}, {file: {...}}, {otlp: {...}}
+ exporter_type = None
+ payload = {}
+
+ if isinstance(exporter, str):
+ # Legacy string format
+ exporter_type = exporter
+ elif isinstance(exporter, dict):
+ # Key-discriminated dict format: {exporter_name: {config}}
+ if len(exporter) == 1:
+ exporter_type, payload = next(iter(exporter.items()))
+ if payload is None:
+ payload = {}
+ else:
+ # Unexpected format
+ logger.error(f"Unknown exporter format: {exporter!r}")
+ continue
+
if exporter_type == "console":
tracer_provider.add_span_processor(
BatchSpanProcessor(
@@ -125,24 +140,18 @@ async def configure(
)
)
elif exporter_type == "otlp":
- # Merge endpoint/headers from typed config with legacy secrets (if provided)
+ # Extract endpoint/headers from dict payload
endpoint = (
- getattr(exporter, "endpoint", None)
- if not isinstance(exporter, str)
- else None
+ payload.get("endpoint") if isinstance(payload, dict) else None
)
- headers = (
- getattr(exporter, "headers", None)
- if not isinstance(exporter, str)
- else None
- )
- if settings.otlp_settings:
- endpoint = endpoint or getattr(
- settings.otlp_settings, "endpoint", None
- )
- headers = headers or getattr(
- settings.otlp_settings, "headers", None
- )
+ headers = payload.get("headers") if isinstance(payload, dict) else None
+
+ # Fall back to legacy otlp_settings if not provided in payload
+ legacy_otlp = getattr(settings, "otlp_settings", None)
+ if legacy_otlp:
+ endpoint = endpoint or getattr(legacy_otlp, "endpoint", None)
+ headers = headers or getattr(legacy_otlp, "headers", None)
+
if endpoint:
tracer_provider.add_span_processor(
BatchSpanProcessor(
@@ -157,16 +166,22 @@ async def configure(
"OTLP exporter is enabled but no OTLP settings endpoint is provided."
)
elif exporter_type == "file":
- custom_path = (
- getattr(exporter, "path", None)
- if not isinstance(exporter, str)
- else getattr(settings, "path", None)
- )
+ # Extract path and path_settings from dict payload
+ custom_path = payload.get("path") if isinstance(payload, dict) else None
path_settings = (
- getattr(exporter, "path_settings", None)
- if not isinstance(exporter, str)
- else getattr(settings, "path_settings", None)
+ payload.get("path_settings") if isinstance(payload, dict) else None
)
+
+ # Fall back to legacy top-level fields if not provided in payload
+ if not custom_path:
+ custom_path = getattr(settings, "path", None)
+ if not path_settings:
+ path_settings = getattr(settings, "path_settings", None)
+
+ # Convert path_settings dict to TracePathSettings if needed
+ if isinstance(path_settings, dict):
+ path_settings = TracePathSettings.model_validate(path_settings)
+
tracer_provider.add_span_processor(
BatchSpanProcessor(
FileSpanExporter(
diff --git a/tests/test_config_exporters.py b/tests/test_config_exporters.py
index 1a477dc27..72907deee 100644
--- a/tests/test_config_exporters.py
+++ b/tests/test_config_exporters.py
@@ -1,11 +1,9 @@
-"""Tests for OpenTelemetry exporter configuration handling."""
+"""Tests for OpenTelemetry exporter configuration handling across different formats."""
import pytest
+from pydantic_core import ValidationError
from mcp_agent.config import (
- ConsoleExporterSettings,
- FileExporterSettings,
- OTLPExporterSettings,
OpenTelemetrySettings,
Settings,
TraceOTLPSettings,
@@ -14,39 +12,53 @@
def _assert_console_exporter(exporter):
- assert isinstance(exporter, ConsoleExporterSettings)
- assert exporter.type == "console"
+ """Assert that exporter is in key-discriminated console format: {console: {...}}."""
+ assert isinstance(exporter, dict)
+ assert "console" in exporter
+ assert isinstance(exporter["console"], dict)
-def _assert_file_exporter(exporter):
- assert isinstance(exporter, FileExporterSettings)
- assert exporter.type == "file"
+def _assert_file_exporter(exporter, path=None, path_pattern=None):
+ """Assert that exporter is in key-discriminated file format with optional path checks."""
+ assert isinstance(exporter, dict)
+ assert "file" in exporter
+ file_config = exporter["file"]
+ assert isinstance(file_config, dict)
+ if path is not None:
+ assert file_config.get("path") == path
+ if path_pattern is not None:
+ assert file_config.get("path_settings") is not None
+ path_settings = file_config["path_settings"]
+ if isinstance(path_settings, dict):
+ assert path_settings.get("path_pattern") == path_pattern
+ else:
+ assert path_settings.path_pattern == path_pattern
-def _assert_otlp_exporter(exporter, endpoint: str):
- assert isinstance(exporter, OTLPExporterSettings)
- assert exporter.type == "otlp"
- assert exporter.endpoint == endpoint
+def _assert_otlp_exporter(
+ exporter, endpoint: str | None = None, headers: dict | None = None
+):
+ """Assert that exporter is in key-discriminated OTLP format with optional field checks."""
+ assert isinstance(exporter, dict)
+ assert "otlp" in exporter
+ otlp_config = exporter["otlp"]
+ assert isinstance(otlp_config, dict)
+ if endpoint is not None:
+ assert otlp_config.get("endpoint") == endpoint
+ if headers is not None:
+ assert otlp_config.get("headers") == headers
-def test_typed_exporters_passthrough():
- settings = OpenTelemetrySettings(
- enabled=True,
- exporters=[
- {"type": "console"},
- {"type": "otlp", "endpoint": "http://collector:4318/v1/traces"},
- ],
- )
+# ============================================================================
+# String Exporter Tests (with legacy top-level fields)
+# ============================================================================
- assert len(settings.exporters) == 2
- _assert_console_exporter(settings.exporters[0])
- _assert_otlp_exporter(settings.exporters[1], "http://collector:4318/v1/traces")
-
-def test_legacy_exporters_with_dict_settings():
+def test_v1_string_exporters_with_legacy_fields():
+ """Test string exporters with top-level path/otlp_settings."""
settings = OpenTelemetrySettings(
enabled=True,
- exporters=["file", "otlp"],
+ exporters=["console", "file", "otlp"],
path="/tmp/trace.jsonl",
path_settings={
"path_pattern": "traces/trace-{unique_id}.jsonl",
@@ -58,54 +70,555 @@ def test_legacy_exporters_with_dict_settings():
},
)
- assert len(settings.exporters) == 2
- _assert_file_exporter(settings.exporters[0])
- assert settings.exporters[0].path == "/tmp/trace.jsonl"
- assert settings.exporters[0].path_settings
- assert (
- settings.exporters[0].path_settings.path_pattern
- == "traces/trace-{unique_id}.jsonl"
+ assert len(settings.exporters) == 3
+ _assert_console_exporter(settings.exporters[0])
+ _assert_file_exporter(
+ settings.exporters[1],
+ path="/tmp/trace.jsonl",
+ path_pattern="traces/trace-{unique_id}.jsonl",
+ )
+ _assert_otlp_exporter(
+ settings.exporters[2],
+ endpoint="http://collector:4318/v1/traces",
+ headers={"Authorization": "Bearer token"},
+ )
+
+
+def test_v1_file_exporter_with_base_model_path_settings():
+ """Test string exporter with TracePathSettings as BaseModel."""
+ settings = OpenTelemetrySettings(
+ enabled=True,
+ exporters=["file"],
+ path_settings=TracePathSettings(
+ path_pattern="trace-{unique_id}.jsonl",
+ unique_id="session_id",
+ ),
+ )
+
+ assert len(settings.exporters) == 1
+ file_exp = settings.exporters[0]
+ _assert_file_exporter(file_exp)
+ file_config = file_exp["file"]
+ assert file_config.get("path_settings") is not None
+ path_settings = file_config["path_settings"]
+ if isinstance(path_settings, dict):
+ assert path_settings.get("path_pattern") == "trace-{unique_id}.jsonl"
+ assert path_settings.get("unique_id") == "session_id"
+ else:
+ assert path_settings.path_pattern == "trace-{unique_id}.jsonl"
+ assert path_settings.unique_id == "session_id"
+
+
+def test_v1_otlp_exporter_with_base_model():
+ """Test string exporter with TraceOTLPSettings as BaseModel."""
+ settings = OpenTelemetrySettings(
+ enabled=True,
+ exporters=["otlp"],
+ otlp_settings=TraceOTLPSettings(
+ endpoint="http://collector:4318/v1/traces",
+ headers={"X-Custom-Header": "value"},
+ ),
)
- _assert_otlp_exporter(settings.exporters[1], "http://collector:4318/v1/traces")
- assert settings.exporters[1].headers == {"Authorization": "Bearer token"}
+ assert len(settings.exporters) == 1
+ _assert_otlp_exporter(
+ settings.exporters[0],
+ endpoint="http://collector:4318/v1/traces",
+ headers={"X-Custom-Header": "value"},
+ )
-def test_legacy_exporters_with_base_models():
+def test_v1_string_exporters_without_legacy_fields():
+ """Test string exporters without legacy fields (should create empty settings)."""
settings = OpenTelemetrySettings(
enabled=True,
- exporters=["file", "otlp"],
- path_settings=TracePathSettings(path_pattern="trace-{unique_id}.jsonl"),
- otlp_settings=TraceOTLPSettings(endpoint="http://collector:4318/v1/traces"),
+ exporters=["console", "file", "otlp"],
+ )
+
+ assert len(settings.exporters) == 3
+ _assert_console_exporter(settings.exporters[0])
+ _assert_file_exporter(settings.exporters[1]) # No path or path_settings
+ _assert_otlp_exporter(settings.exporters[2]) # No endpoint or headers
+
+
+# ============================================================================
+# Type-Discriminated Exporter Tests (using 'type' field)
+# ============================================================================
+
+
+def test_v2_type_discriminated_union():
+ """Test exporters with 'type' discriminator field."""
+ settings = OpenTelemetrySettings(
+ enabled=True,
+ exporters=[
+ {"type": "console"},
+ {"type": "file", "path": "/var/log/traces.jsonl"},
+ {"type": "otlp", "endpoint": "http://collector:4318/v1/traces"},
+ ],
+ )
+
+ assert len(settings.exporters) == 3
+ _assert_console_exporter(settings.exporters[0])
+ _assert_file_exporter(settings.exporters[1], path="/var/log/traces.jsonl")
+ _assert_otlp_exporter(
+ settings.exporters[2], endpoint="http://collector:4318/v1/traces"
+ )
+
+
+def test_v2_multiple_otlp_exporters():
+ """Test type-discriminated format supports multiple OTLP exporters with different endpoints."""
+ settings = OpenTelemetrySettings(
+ enabled=True,
+ exporters=[
+ {"type": "otlp", "endpoint": "http://collector1:4318"},
+ {
+ "type": "otlp",
+ "endpoint": "http://collector2:4318",
+ "headers": {"X-API-Key": "secret"},
+ },
+ ],
)
assert len(settings.exporters) == 2
- _assert_file_exporter(settings.exporters[0])
- assert settings.exporters[0].path_settings
- assert settings.exporters[0].path_settings.path_pattern == "trace-{unique_id}.jsonl"
+ _assert_otlp_exporter(settings.exporters[0], endpoint="http://collector1:4318")
+ _assert_otlp_exporter(
+ settings.exporters[1],
+ endpoint="http://collector2:4318",
+ headers={"X-API-Key": "secret"},
+ )
- _assert_otlp_exporter(settings.exporters[1], "http://collector:4318/v1/traces")
+def test_v2_file_exporter_with_path_settings():
+ """Test type-discriminated file exporter with nested path_settings."""
+ settings = OpenTelemetrySettings(
+ enabled=True,
+ exporters=[
+ {
+ "type": "file",
+ "path": "/tmp/trace.jsonl",
+ "path_settings": {
+ "path_pattern": "logs/{unique_id}.jsonl",
+ "unique_id": "timestamp",
+ "timestamp_format": "%Y%m%d",
+ },
+ }
+ ],
+ )
-def test_legacy_unknown_exporter_raises():
- with pytest.raises(ValueError, match="Unsupported OpenTelemetry exporter"):
- OpenTelemetrySettings(exporters=["console", "bogus"])
+ assert len(settings.exporters) == 1
+ file_exp = settings.exporters[0]
+ _assert_file_exporter(file_exp, path="/tmp/trace.jsonl")
+ file_config = file_exp["file"]
+ path_settings = file_config.get("path_settings")
+ assert path_settings is not None
+ if isinstance(path_settings, dict):
+ assert path_settings.get("path_pattern") == "logs/{unique_id}.jsonl"
+ assert path_settings.get("timestamp_format") == "%Y%m%d"
+ else:
+ assert path_settings.path_pattern == "logs/{unique_id}.jsonl"
+ assert path_settings.timestamp_format == "%Y%m%d"
-def test_literal_exporters_become_typed_configs():
- settings = OpenTelemetrySettings(exporters=["console", "file", "otlp"])
+# ============================================================================
+# Key-Discriminated Exporter Tests (dict key, no 'type' field)
+# ============================================================================
+
+
+def test_v3_dict_key_discriminator():
+ """Test key-discriminated format: exporters use dict keys as discriminators."""
+ settings = OpenTelemetrySettings(
+ enabled=True,
+ exporters=[
+ {"console": {}},
+ {"file": {"path": "/var/log/traces.jsonl"}},
+ {"otlp": {"endpoint": "http://collector:4318/v1/traces"}},
+ ],
+ )
+
+ assert len(settings.exporters) == 3
+ _assert_console_exporter(settings.exporters[0])
+ _assert_file_exporter(settings.exporters[1], path="/var/log/traces.jsonl")
+ _assert_otlp_exporter(
+ settings.exporters[2], endpoint="http://collector:4318/v1/traces"
+ )
+
+
+def test_v3_multiple_exporters_same_type():
+ """Test key-discriminated format supports multiple exporters of the same type."""
+ settings = OpenTelemetrySettings(
+ enabled=True,
+ exporters=[
+ {"otlp": {"endpoint": "http://primary-collector:4318"}},
+ {
+ "otlp": {
+ "endpoint": "http://backup-collector:4318",
+ "headers": {"X-Region": "us-west"},
+ }
+ },
+ {"otlp": {"endpoint": "https://cloud-collector.example.com:4318"}},
+ ],
+ )
assert len(settings.exporters) == 3
- assert [type(exporter) for exporter in settings.exporters] == [
- ConsoleExporterSettings,
- FileExporterSettings,
- OTLPExporterSettings,
- ]
+ _assert_otlp_exporter(
+ settings.exporters[0], endpoint="http://primary-collector:4318"
+ )
+ _assert_otlp_exporter(
+ settings.exporters[1],
+ endpoint="http://backup-collector:4318",
+ headers={"X-Region": "us-west"},
+ )
+ _assert_otlp_exporter(
+ settings.exporters[2], endpoint="https://cloud-collector.example.com:4318"
+ )
-def test_settings_default_construction_uses_typed_exporters():
+def test_v3_file_exporter_with_advanced_path_settings():
+ """Test key-discriminated file exporter with complex path_settings."""
+ settings = OpenTelemetrySettings(
+ enabled=True,
+ exporters=[
+ {
+ "file": {
+ "path": "a/b/c/d",
+ "path_settings": {
+ "path_pattern": "logs/mcp-agent-{unique_id}.jsonl",
+ "unique_id": "timestamp",
+ "timestamp_format": "%Y%m%d_%H%M%S",
+ },
+ }
+ }
+ ],
+ )
+
+ assert len(settings.exporters) == 1
+ file_exp = settings.exporters[0]
+ _assert_file_exporter(file_exp, path="a/b/c/d")
+ file_config = file_exp["file"]
+ path_settings = file_config.get("path_settings")
+ assert path_settings is not None
+ if isinstance(path_settings, dict):
+ assert path_settings.get("path_pattern") == "logs/mcp-agent-{unique_id}.jsonl"
+ assert path_settings.get("unique_id") == "timestamp"
+ assert path_settings.get("timestamp_format") == "%Y%m%d_%H%M%S"
+ else:
+ assert path_settings.path_pattern == "logs/mcp-agent-{unique_id}.jsonl"
+ assert path_settings.unique_id == "timestamp"
+ assert path_settings.timestamp_format == "%Y%m%d_%H%M%S"
+
+
+def test_v3_console_exporter_empty_dict():
+ """Test key-discriminated console exporter with empty dict (no extra settings needed)."""
+ settings = OpenTelemetrySettings(
+ enabled=True,
+ exporters=[{"console": {}}],
+ )
+
+ assert len(settings.exporters) == 1
+ _assert_console_exporter(settings.exporters[0])
+
+
+# ============================================================================
+# Cross-Version Compatibility Tests
+# ============================================================================
+
+
+def test_mixed_v1_and_v3_string_and_dict():
+ """Test mixing string exporters with key-discriminated dict syntax in the same config."""
+ settings = OpenTelemetrySettings(
+ enabled=True,
+ exporters=[
+ "console", # String exporter
+ {"file": {"path": "/tmp/trace.jsonl"}}, # Key-discriminated dict
+ ],
+ )
+
+ assert len(settings.exporters) == 2
+ _assert_console_exporter(settings.exporters[0])
+ _assert_file_exporter(settings.exporters[1], path="/tmp/trace.jsonl")
+
+
+def test_v2_to_v3_conversion():
+ """Test that type-discriminated format is automatically converted to key-discriminated internal format."""
+ v2_settings = OpenTelemetrySettings(
+ enabled=True,
+ exporters=[
+ {"type": "console"},
+ {
+ "type": "otlp",
+ "endpoint": "http://collector:4318",
+ "headers": {"Auth": "Bearer xyz"},
+ },
+ ],
+ )
+
+ assert len(v2_settings.exporters) == 2
+ _assert_console_exporter(v2_settings.exporters[0])
+ _assert_otlp_exporter(
+ v2_settings.exporters[1],
+ endpoint="http://collector:4318",
+ headers={"Auth": "Bearer xyz"},
+ )
+
+
+def test_v1_legacy_fields_removed_after_finalization():
+ """Test that legacy top-level fields are removed from the model after conversion."""
+ settings = OpenTelemetrySettings(
+ enabled=True,
+ exporters=["file"],
+ path="/tmp/trace.jsonl",
+ )
+
+ # Legacy fields should be removed after finalization
+ assert not hasattr(settings, "path")
+ assert not hasattr(settings, "path_settings")
+
+
+# ============================================================================
+# Error Handling Tests
+# ============================================================================
+
+
+def test_unsupported_exporter_type_raises():
+ """Test that unsupported exporter types raise ValidationError from Pydantic."""
+ with pytest.raises(ValidationError):
+ OpenTelemetrySettings(exporters=["console", "invalid_exporter"])
+
+
+def test_invalid_exporter_format_raises():
+ """Test that invalid exporter formats raise ValueError."""
+ with pytest.raises(
+ ValidationError, match="OpenTelemetry exporters must be strings"
+ ):
+ OpenTelemetrySettings(
+ exporters=[{"foo": "bar", "baz": "qux"}]
+ ) # Multi-key dict
+
+
+def test_invalid_dict_exporter_with_multi_keys_raises():
+ """Test that key-discriminated dict exporters with multiple keys raise ValueError."""
+ with pytest.raises(
+ ValidationError, match="OpenTelemetry exporters must be strings"
+ ):
+ OpenTelemetrySettings(
+ exporters=[
+ {"console": {}, "file": {}} # Invalid: two keys in one dict
+ ]
+ )
+
+
+# ============================================================================
+# Integration Tests with Full Settings
+# ============================================================================
+
+
+def test_settings_default_construction():
+ """Test default Settings construction uses typed exporters."""
settings = Settings()
assert isinstance(settings.otel, OpenTelemetrySettings)
- # Default exporters should still be a typed list instance
assert isinstance(settings.otel.exporters, list)
+
+
+def test_v1_full_config_via_settings():
+ """Test string exporter config loaded via full Settings model."""
+ settings = Settings(
+ otel={
+ "enabled": True,
+ "exporters": ["console", "otlp"],
+ "otlp_settings": {"endpoint": "http://collector:4318/v1/traces"},
+ }
+ )
+
+ assert settings.otel.enabled is True
+ assert len(settings.otel.exporters) == 2
+ _assert_console_exporter(settings.otel.exporters[0])
+ _assert_otlp_exporter(
+ settings.otel.exporters[1], endpoint="http://collector:4318/v1/traces"
+ )
+
+
+def test_v2_full_config_via_settings():
+ """Test type-discriminated config loaded via full Settings model."""
+ settings = Settings(
+ otel={
+ "enabled": True,
+ "exporters": [
+ {"type": "console"},
+ {"type": "file", "path": "/tmp/trace.jsonl"},
+ ],
+ "service_name": "TestApp",
+ }
+ )
+
+ assert settings.otel.enabled is True
+ assert settings.otel.service_name == "TestApp"
+ assert len(settings.otel.exporters) == 2
+ _assert_console_exporter(settings.otel.exporters[0])
+ _assert_file_exporter(settings.otel.exporters[1], path="/tmp/trace.jsonl")
+
+
+def test_v3_full_config_via_settings():
+ """Test key-discriminated config loaded via full Settings model."""
+ settings = Settings(
+ otel={
+ "enabled": True,
+ "exporters": [
+ {"console": {}},
+ {"otlp": {"endpoint": "https://collector.example.com:4318"}},
+ ],
+ "service_name": "V3App",
+ "sample_rate": 0.5,
+ }
+ )
+
+ assert settings.otel.enabled is True
+ assert settings.otel.service_name == "V3App"
+ assert settings.otel.sample_rate == 0.5
+ assert len(settings.otel.exporters) == 2
+ _assert_console_exporter(settings.otel.exporters[0])
+ _assert_otlp_exporter(
+ settings.otel.exporters[1], endpoint="https://collector.example.com:4318"
+ )
+
+
+def test_merge_otel_exporters_from_config_and_secrets():
+ """Test that OTEL exporters from config.yaml and secrets.yaml are merged together."""
+
+ # Simulate config.yaml with one OTLP exporter (public endpoint)
+ config_data = {
+ "otel": {
+ "exporters": [
+ {
+ "otlp": {
+ "endpoint": "https://us.cloud.langfuse.com/api/public/otel/v1/traces",
+ "headers": {"Authorization": "Basic AUTH_STRING"},
+ }
+ }
+ ]
+ }
+ }
+
+ # Simulate secrets.yaml with another OTLP exporter (secret endpoint)
+ secrets_data = {
+ "otel": {
+ "enabled": True,
+ "exporters": [{"otlp": {"endpoint": "https://some-other-otel-exporter"}}],
+ }
+ }
+
+ # Manually perform deep merge as get_settings does internally
+ def deep_merge(base: dict, update: dict, path: tuple = ()) -> dict:
+ """Recursively merge two dictionaries, preserving nested structures.
+
+ Special handling for 'exporters' lists under 'otel' key:
+ - Concatenates lists instead of replacing them
+ - Allows combining exporters from config and secrets files
+ """
+ merged = base.copy()
+ for key, value in update.items():
+ current_path = path + (key,)
+ if (
+ key in merged
+ and isinstance(merged[key], dict)
+ and isinstance(value, dict)
+ ):
+ merged[key] = deep_merge(merged[key], value, current_path)
+ elif (
+ key in merged
+ and isinstance(merged[key], list)
+ and isinstance(value, list)
+ and current_path == ("otel", "exporters")
+ ):
+ # Concatenate exporters lists from config and secrets
+ merged[key] = merged[key] + value
+ else:
+ merged[key] = value
+ return merged
+
+ merged = deep_merge(config_data, secrets_data)
+ settings = Settings(**merged)
+
+ # Verify both exporters are present
+ assert settings.otel.enabled is True
+ assert len(settings.otel.exporters) == 2
+
+ # Verify first exporter (from config.yaml)
+ _assert_otlp_exporter(
+ settings.otel.exporters[0],
+ endpoint="https://us.cloud.langfuse.com/api/public/otel/v1/traces",
+ headers={"Authorization": "Basic AUTH_STRING"},
+ )
+
+ # Verify second exporter (from secrets.yaml)
+ _assert_otlp_exporter(
+ settings.otel.exporters[1], endpoint="https://some-other-otel-exporter"
+ )
+
+
+def test_merge_non_otel_lists_are_replaced_not_concatenated():
+ """Test that non-OTEL lists are replaced, not concatenated (default behavior)."""
+
+ # Manually perform deep merge as get_settings does internally
+ def deep_merge(base: dict, update: dict, path: tuple = ()) -> dict:
+ """Recursively merge two dictionaries, preserving nested structures.
+
+ Special handling for 'exporters' lists under 'otel' key:
+ - Concatenates lists instead of replacing them
+ - Allows combining exporters from config and secrets files
+ """
+ merged = base.copy()
+ for key, value in update.items():
+ current_path = path + (key,)
+ if (
+ key in merged
+ and isinstance(merged[key], dict)
+ and isinstance(value, dict)
+ ):
+ merged[key] = deep_merge(merged[key], value, current_path)
+ elif (
+ key in merged
+ and isinstance(merged[key], list)
+ and isinstance(value, list)
+ and current_path == ("otel", "exporters")
+ ):
+ # Concatenate exporters lists from config and secrets
+ merged[key] = merged[key] + value
+ else:
+ merged[key] = value
+ return merged
+
+ # Test with logger.transports (should be replaced, not concatenated)
+ config_data = {"logger": {"transports": ["console", "file"]}}
+ secrets_data = {"logger": {"transports": ["http"]}}
+ merged = deep_merge(config_data, secrets_data)
+ # Should be replaced, not concatenated
+ assert merged["logger"]["transports"] == ["http"]
+ assert len(merged["logger"]["transports"]) == 1
+
+ # Test with mcp.servers (dict, should be merged)
+ config_data = {
+ "mcp": {"servers": {"fetch": {"command": "uvx", "args": ["mcp-server-fetch"]}}}
+ }
+ secrets_data = {
+ "mcp": {
+ "servers": {
+ "filesystem": {
+ "command": "npx",
+ "args": ["-y", "@modelcontextprotocol/server-filesystem"],
+ }
+ }
+ }
+ }
+ merged = deep_merge(config_data, secrets_data)
+ # Both servers should be present (dicts are merged)
+ assert "fetch" in merged["mcp"]["servers"]
+ assert "filesystem" in merged["mcp"]["servers"]
+
+ # Test with a nested list that's NOT otel.exporters (should be replaced)
+ config_data = {"agents": {"search_paths": [".claude/agents", "~/.claude/agents"]}}
+ secrets_data = {"agents": {"search_paths": [".mcp-agent/agents"]}}
+ merged = deep_merge(config_data, secrets_data)
+ # Should be replaced, not concatenated
+ assert merged["agents"]["search_paths"] == [".mcp-agent/agents"]
+ assert len(merged["agents"]["search_paths"]) == 1