Skip to content

Commit dc77276

Browse files
authored
Merge pull request #45 from sipercai/agentscope
[ISSUE#44] Add support for AgentScope V1.
2 parents bc34cdb + b8c68f4 commit dc77276

35 files changed

+2861
-63
lines changed

instrumentation-genai/opentelemetry-instrumentation-agentscope/README.md

Lines changed: 163 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,168 @@ pip install ./instrumentation-genai/opentelemetry-instrumentation-agentscope
2424

2525
## RUN
2626

27+
## AgentScope V1
28+
29+
Follow the official [agentscope example](https://github.com/agentscope-ai/agentscope/tree/main/examples) to instrument a example demo such as `examples/functionality/structured_output/main.py`
30+
31+
## Quick Start
32+
33+
You can automatically instrument your AgentScope application using the `opentelemetry-instrument` command:
34+
35+
```bash
36+
opentelemetry-instrument \
37+
--traces_exporter console \
38+
--metrics_exporter console \
39+
python examples/functionality/structured_output/main.py
40+
```
41+
If everything is working correctly, you should see logs similar to the following
42+
```json
43+
{
44+
"name": "invoke_agent Friday",
45+
"context": {
46+
"trace_id": "0xfe4bc5bb9da01f6c3af93d16628e6f6c",
47+
"span_id": "0x5198826f9fc1b744",
48+
"trace_state": "[]"
49+
},
50+
"kind": "SpanKind.INTERNAL",
51+
"parent_id": null,
52+
"start_time": "2025-09-18T07:14:07.458406Z",
53+
"end_time": "2025-09-18T07:14:09.388674Z",
54+
"status": {
55+
"status_code": "OK"
56+
},
57+
"attributes": {
58+
"gen_ai.span.kind": "agent",
59+
"gen_ai.conversation.id": "cSqkWAVi54oaRU8Msg6uDh",
60+
"gen_ai.agent.id": "UzohJ5i8CMueXaQpdp4ReS",
61+
"gen_ai.agent.name": "Friday",
62+
"gen_ai.agent.description": "A ReAct agent implementation in AgentScope, which supports\n\n- Realtime steering\n- API-based (parallel) tool calling\n- Hooks around reasoning, acting, reply, observe and print functions\n- Structured output generation",
63+
"gen_ai.operation.name": "invoke_agent",
64+
"gen_ai.system_instructions": "You are a helpful assistant named Friday.",
65+
"gen_ai.request.model": "qwen-max",
66+
"gen_ai.input.messages": "[{\"role\": \"user\", \"parts\": [{\"type\": \"text\", \"content\": \"Choose one of your favorite fruit\"}], \"name\": \"user\"}]",
67+
"gen_ai.output.messages": "{\"role\": \"assistant\", \"parts\": [{\"type\": \"text\", \"content\": \"I choose apple as one of my favorite fruits. It's crisp, sweet, and very refreshing!\"}], \"name\": \"Friday\"}"
68+
},
69+
"events": [],
70+
"links": [],
71+
"resource": {
72+
"attributes": {
73+
"telemetry.sdk.language": "python",
74+
"telemetry.sdk.name": "opentelemetry",
75+
"telemetry.sdk.version": "1.37.0",
76+
"telemetry.auto.version": "0.57b0",
77+
"service.name": "unknown_service"
78+
},
79+
"schema_url": ""
80+
}
81+
}
82+
{
83+
"name": "chat qwen-max",
84+
"context": {
85+
"trace_id": "0xfe4bc5bb9da01f6c3af93d16628e6f6c",
86+
"span_id": "0x851e0d4ef1b184e9",
87+
"trace_state": "[]"
88+
},
89+
"kind": "SpanKind.INTERNAL",
90+
"parent_id": "0x5198826f9fc1b744",
91+
"start_time": "2025-09-18T07:14:07.459576Z",
92+
"end_time": "2025-09-18T07:14:09.387582Z",
93+
"status": {
94+
"status_code": "OK"
95+
},
96+
"attributes": {
97+
"gen_ai.span.kind": "llm",
98+
"gen_ai.conversation.id": "cSqkWAVi54oaRU8Msg6uDh",
99+
"gen_ai.provider.name": "dashscope",
100+
"gen_ai.operation.name": "chat",
101+
"gen_ai.request.model": "qwen-max",
102+
"gen_ai.input.messages": "[{\"role\": \"system\", \"parts\": [{\"type\": \"text\", \"content\": \"You are a helpful assistant named Friday.\"}]}, {\"role\": \"user\", \"parts\": [{\"type\": \"text\", \"content\": \"Please introduce Einstein\"}]}, {\"role\": \"assistant\", \"parts\": [{\"type\": \"tool_call\", \"name\": \"generate_response\", \"id\": \"call_cd1858cf64ad402f8f64e6\", \"arguments\": {\"name\": \"Albert Einstein\", \"age\": 76, \"intro\": \"Albert Einstein was a German-born theoretical physicist, widely acknowledged to be one of the greatest and most influential physicists of all time.\", \"honors\": [\"Nobel Prize in Physics (1921)\", \"Copley Medal (1925)\", \"Max Planck Medal (1929)\", \"Time Magazine's Person of the Century (1999)\"], \"response\": \"Albert Einstein, who lived to be 76 years old, was a renowned theoretical physicist. He is best known for developing the theory of relativity, but he also made significant contributions to the development of the philosophy of science. Einstein received numerous honors, including the Nobel Prize in Physics in 1921, the Copley Medal in 1925, the Max Planck Medal in 1929, and he was even named Time Magazine's Person of the Century in 1999.\"}}]}, {\"role\": \"tool\", \"parts\": [{\"type\": \"tool_call_response\", \"response\": \"Successfully generated response.\", \"id\": \"call_cd1858cf64ad402f8f64e6\"}]}, {\"role\": \"assistant\", \"parts\": [{\"type\": \"text\", \"content\": \"Albert Einstein, who lived to be 76 years old, was a renowned theoretical physicist. He is best known for developing the theory of relativity, but he also made significant contributions to the development of the philosophy of science. Einstein received numerous honors, including the Nobel Prize in Physics in 1921, the Copley Medal in 1925, the Max Planck Medal in 1929, and he was even named Time Magazine's Person of the Century in 1999.\"}]}, {\"role\": \"user\", \"parts\": [{\"type\": \"text\", \"content\": \"Choose one of your favorite fruit\"}]}]",
103+
"gen_ai.response.id": "2025-09-18 15:14:09.383_3641e0",
104+
"gen_ai.output.type": "text",
105+
"gen_ai.response.finish_reasons": "[\"stop\"]",
106+
"gen_ai.usage.input_tokens": 594,
107+
"gen_ai.usage.output_tokens": 45,
108+
"gen_ai.output.messages": "[{\"role\": \"assistant\", \"parts\": [{\"type\": \"tool_call\", \"id\": \"call_7f25ce38ce304f4eb20a0a\", \"name\": \"generate_response\", \"arguments\": {\"choice\": \"apple\", \"response\": \"I choose apple as one of my favorite fruits. It's crisp, sweet, and very refreshing!\"}}], \"finish_reason\": \"stop\"}]"
109+
},
110+
"events": [],
111+
"links": [],
112+
"resource": {
113+
"attributes": {
114+
"telemetry.sdk.language": "python",
115+
"telemetry.sdk.name": "opentelemetry",
116+
"telemetry.sdk.version": "1.37.0",
117+
"telemetry.auto.version": "0.57b0",
118+
"service.name": "unknown_service"
119+
},
120+
"schema_url": ""
121+
}
122+
}
123+
{
124+
"name": "execute_tool generate_response",
125+
"context": {
126+
"trace_id": "0xfe4bc5bb9da01f6c3af93d16628e6f6c",
127+
"span_id": "0xa5b483e5de02c128",
128+
"trace_state": "[]"
129+
},
130+
"kind": "SpanKind.INTERNAL",
131+
"parent_id": "0x5198826f9fc1b744",
132+
"start_time": "2025-09-18T07:14:09.388282Z",
133+
"end_time": "2025-09-18T07:14:09.388612Z",
134+
"status": {
135+
"status_code": "OK"
136+
},
137+
"attributes": {
138+
"gen_ai.span.kind": "tool",
139+
"gen_ai.conversation.id": "cSqkWAVi54oaRU8Msg6uDh",
140+
"gen_ai.tool.call.id": "call_7f25ce38ce304f4eb20a0a",
141+
"gen_ai.tool.name": "generate_response",
142+
"gen_ai.tool.description": "Generate a response. Note only the input argument `response` is\n\nvisible to the others, you should include all the necessary\ninformation in the `response` argument.",
143+
"gen_ai.tool.call.arguments": "{\"choice\": \"apple\", \"response\": \"I choose apple as one of my favorite fruits. It's crisp, sweet, and very refreshing!\"}",
144+
"gen_ai.operation.name": "execute_tool",
145+
"gen_ai.tool.call.result": "[{\"type\": \"text\", \"text\": \"Successfully generated response.\"}]"
146+
},
147+
"events": [],
148+
"links": [],
149+
"resource": {
150+
"attributes": {
151+
"telemetry.sdk.language": "python",
152+
"telemetry.sdk.name": "opentelemetry",
153+
"telemetry.sdk.version": "1.37.0",
154+
"telemetry.auto.version": "0.57b0",
155+
"service.name": "unknown_service"
156+
},
157+
"schema_url": ""
158+
}
159+
}
160+
161+
```
162+
163+
## Forwarding OTLP Data to the Backend
164+
```shell
165+
export OTEL_SERVICE_NAME=<service_name>
166+
export OTEL_EXPORTER_OTLP_PROTOCOL=http/protobuf
167+
export OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=<trace_endpoint>
168+
export OTEL_EXPORTER_OTLP_METRICS_ENDPOINT=<metrics_endpoint>
169+
170+
opentelemetry-instrument <your_run_command>
171+
172+
```
173+
174+
### Results
175+
#### Export to aliyun xtrace
176+
177+
![image.png](_assets/img_v1/Aliyun//agent_description.png)
178+
![image.png](_assets/img_v1/Aliyun//llm_info.png)
179+
![image.png](_assets/img_v1/Aliyun//tool_exec.png)
180+
181+
#### Export to agentscope studio
182+
183+
This version supports OTel data ingestion — please use it for integration [agentscope studio](https://github.com/agentscope-ai/agentscope-studio/pull/7)
184+
![image.png](_assets/img_v1/AgentScopeStudio/dashboard.png)
185+
![image.png](_assets/img_v1/AgentScopeStudio/detail.png)
186+
187+
## AgentScope V0
188+
27189
### Build the Example
28190

29191
Follow the official [agentscope documentation](https://doc.agentscope.io/) to create a sample file named `demo.py`
@@ -214,4 +376,4 @@ python demo.py
214376

215377
Access the Jaeger UI to view the collected trace data. You should now see trace information being properly received.
216378

217-
![image.png](_assets/img/results.png)
379+
![image.png](_assets/img_v0/results.png)

instrumentation-genai/opentelemetry-instrumentation-agentscope/_assets/img/results.png renamed to instrumentation-genai/opentelemetry-instrumentation-agentscope/_assets/img_v0/results.png

File renamed without changes.
1.64 MB
Loading
1.21 MB
Loading
1.48 MB
Loading
1.17 MB
Loading
1.28 MB
Loading

instrumentation-genai/opentelemetry-instrumentation-agentscope/pyproject.toml

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -26,8 +26,9 @@ classifiers = [
2626
"Programming Language :: Python :: 3.13",
2727
]
2828
dependencies = [
29-
# "opentelemetry-api~=1.25.0",
30-
# "opentelemetry-instrumentation==0.46b0",
29+
"opentelemetry-api>=1.37.0",
30+
"opentelemetry-sdk>=1.37.0",
31+
"opentelemetry-semantic-conventions>=0.58b0",
3132
"wrapt",
3233
]
3334

Lines changed: 24 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -1,45 +1,35 @@
1-
from typing import Any, Collection
2-
from wrapt import wrap_function_wrapper
3-
from opentelemetry.instrumentation.utils import unwrap
4-
from opentelemetry.instrumentation.agentscope.package import _instruments
5-
from opentelemetry.instrumentation.agentscope._wrapper import AgentscopeRequestWrapper, AgentscopeToolcallWrapper
6-
from opentelemetry import trace as trace_api
1+
# -*- coding: utf-8 -*-
2+
from typing import Any, Collection, Callable, Optional
73
from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
8-
from opentelemetry.instrumentation.version import (
9-
__version__,
10-
)
11-
12-
"""OpenTelemetry exporters for agentscope instrumentation https://github.com/modelscope/agentscope"""
4+
from opentelemetry.instrumentation.agentscope.package import _instruments
5+
from opentelemetry.instrumentation.agentscope.utils import is_agentscope_v1
6+
from typing_extensions import Coroutine
137

14-
_MODULE = "agentscope.models.model"
15-
_TOOLKIT = "agentscope.service.service_toolkit"
168
__all__ = ["AgentScopeInstrumentor"]
179

1810
class AgentScopeInstrumentor(BaseInstrumentor): # type: ignore
19-
"""
20-
An instrumentor for agentscope.
21-
"""
11+
12+
def __init__(self,):
13+
self._meter = None
2214

2315
def instrumentation_dependencies(self) -> Collection[str]:
2416
return _instruments
2517

2618
def _instrument(self, **kwargs: Any) -> None:
27-
if not (tracer_provider := kwargs.get("tracer_provider")):
28-
tracer_provider = trace_api.get_tracer_provider()
29-
tracer = trace_api.get_tracer(__name__, __version__, tracer_provider)
30-
wrap_function_wrapper(
31-
module=_MODULE,
32-
name="ModelWrapperBase.__init__",
33-
wrapper=AgentscopeRequestWrapper(tracer=tracer),
34-
)
35-
wrap_function_wrapper(
36-
module=_TOOLKIT,
37-
name="ServiceToolkit._execute_func",
38-
wrapper=AgentscopeToolcallWrapper(tracer=tracer),
39-
)
19+
"""Enable AgentScope instrumentation."""
20+
if is_agentscope_v1():
21+
from opentelemetry.instrumentation.agentscope.v1 import AgentScopeV1Instrumentor
22+
AgentScopeV1Instrumentor().instrument(**kwargs)
23+
else:
24+
from opentelemetry.instrumentation.agentscope.v0 import AgentScopeV0Instrumentor
25+
AgentScopeV0Instrumentor().instrument(**kwargs)
26+
4027
def _uninstrument(self, **kwargs: Any) -> None:
41-
42-
import agentscope.models.model
43-
unwrap(agentscope.models.model.ModelWrapperBase, "__init__")
44-
import agentscope.service.service_toolkit
45-
unwrap(agentscope.service.service_toolkit.ServiceToolkit, "_execute_func")
28+
29+
if is_agentscope_v1():
30+
from opentelemetry.instrumentation.agentscope.v1 import AgentScopeV1Instrumentor
31+
AgentScopeV1Instrumentor().uninstrument(**kwargs)
32+
else:
33+
from opentelemetry.instrumentation.agentscope.v0 import AgentScopeV0Instrumentor
34+
AgentScopeV0Instrumentor().uninstrument(**kwargs)
35+
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,33 @@
1+
# -*- coding: utf-8 -*-
2+
"""
3+
OpenTelemetry AgentScope Instrumentation Shared Module
4+
5+
This module provides shared components for AgentScope instrumentation,
6+
including GenAI semantic conventions compliance, telemetry options,
7+
and common attribute definitions.
8+
"""
9+
10+
from .constants import *
11+
from .telemetry_options import GenAITelemetryOptions, get_telemetry_options, set_telemetry_options
12+
from .attributes import *
13+
14+
__all__ = [
15+
# Constants
16+
"OTEL_INSTRUMENTATION_GENAI_CAPTURE_MESSAGE_CONTENT",
17+
"OTEL_INSTRUMENTATION_GENAI_CAPTURE_MESSAGE_CONTENT_MAX_LENGTH",
18+
"OTEL_INSTRUMENTATION_GENAI_MESSAGE_STRATEGY",
19+
# Telemetry options
20+
"GenAITelemetryOptions",
21+
"get_telemetry_options",
22+
"set_telemetry_options",
23+
# Attributes
24+
"LLMRequestAttributes",
25+
"LLMResponseAttributes",
26+
"EmbeddingRequestAttributes",
27+
"AgentRequestAttributes",
28+
"ToolRequestAttributes",
29+
# Enums
30+
"GenAiSpanKind",
31+
# Attribute constants
32+
"CommonAttributes",
33+
]

0 commit comments

Comments
 (0)