Skip to content

Commit 60187a7

Browse files
authored
Merge pull request #34 from sipercai/liuyu/langchain
[ISSUE#16] Add support for LangChain.
2 parents cf3948d + 7960615 commit 60187a7

File tree

17 files changed

+2964
-0
lines changed

17 files changed

+2964
-0
lines changed
Lines changed: 127 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,127 @@
1+
# OpenTelemetry LangChain Instrumentation
2+
3+
This package provides OpenTelemetry instrumentation for LangChain applications, allowing you to automatically trace and monitor your LangChain workflows. For details on usage and installation of LoongSuite and Jaeger, please refer to [LoongSuite Documentation](https://github.com/alibaba/loongsuite-python-agent/blob/main/README.md).
4+
5+
## Installation
6+
7+
```bash
8+
git clone https://github.com/alibaba/loongsuite-python-agent.git
9+
cd loongsuite-python-agent
10+
pip install ./instrumentation-genai/opentelemetry-instrumentation-langchain
11+
```
12+
13+
## RUN
14+
15+
### Build the Example
16+
17+
Follow the official [LangChain Documentation](https://python.langchain.com/docs/introduction/) to create a sample file named `demo.py`. You can also experience the Tongyi model like me: https://python.langchain.com/docs/integrations/llms/tongyi/
18+
19+
```python
20+
from langchain_core.messages import HumanMessage, SystemMessage
21+
from langchain_community.llms.tongyi import Tongyi
22+
23+
chatLLM = Tongyi(model="qwen-turbo")
24+
messages = [
25+
SystemMessage(
26+
content="You are a helpful assistant that translates English to French."
27+
),
28+
HumanMessage(
29+
content="Translate this sentence from English to French. I love programming."
30+
),
31+
]
32+
res = chatLLM.invoke(messages)
33+
print(res)
34+
```
35+
36+
## Quick Start
37+
38+
You can automatically instrument your LangChain application using the `opentelemetry-instrument` command:
39+
40+
```bash
41+
export OTEL_INSTRUMENTATION_GENAI_CAPTURE_MESSAGE_CONTENT=true
42+
opentelemetry-instrument \
43+
--traces_exporter console \
44+
--metrics_exporter console \
45+
python your_langchain_app.py
46+
```
47+
If everything is working correctly, you should see logs similar to the following
48+
```json
49+
{
50+
"name": "Tongyi",
51+
"context": {
52+
"trace_id": "0x61d2c954558c3988f42770a946ea877e",
53+
"span_id": "0x7bb229d6f75e52ad",
54+
"trace_state": "[]"
55+
},
56+
"kind": "SpanKind.INTERNAL",
57+
"parent_id": null,
58+
"start_time": "2025-08-14T07:30:38.783413Z",
59+
"end_time": "2025-08-14T07:30:39.321573Z",
60+
"status": {
61+
"status_code": "OK"
62+
},
63+
"attributes": {
64+
"gen_ai.span.kind": "llm",
65+
"input.value": "{\"prompts\": [\"System: You are a helpful assistant that translates English to French.\\nHuman: Translate this sentence from English to French. I love programming.\"]}",
66+
"input.mime_type": "application/json",
67+
"output.value": "{\"generations\": [[{\"text\": \"J'adore la programmation.\", \"generation_info\": {\"finish_reason\": \"stop\", \"request_id\": \"463d2249-6424-9eef-8665-6ef88d4fcc7a\", \"token_usage\": {\"input_tokens\": 39, \"output_tokens\": 8, \"total_tokens\": 47, \"prompt_tokens_details\": {\"cached_tokens\": 0}}}, \"type\": \"Generation\"}]], \"llm_output\": {\"model_name\": \"qwen-turbo\"}, \"run\": null, \"type\": \"LLMResult\"}",
68+
"output.mime_type": "application/json",
69+
"gen_ai.prompt.0.content": "System: You are a helpful assistant that translates English to French.\nHuman: Translate this sentence from English to French. I love programming.",
70+
"gen_ai.response.finish_reasons": "stop",
71+
"gen_ai.usage.prompt_tokens": 39,
72+
"gen_ai.usage.completion_tokens": 8,
73+
"gen_ai.usage.total_tokens": 47,
74+
"gen_ai.completion": [
75+
"J'adore la programmation."
76+
],
77+
"gen_ai.response.model": "qwen-turbo",
78+
"gen_ai.request.model": "qwen-turbo",
79+
"metadata": "{\"ls_provider\": \"tongyi\", \"ls_model_type\": \"llm\", \"ls_model_name\": \"qwen-turbo\"}"
80+
},
81+
"events": [],
82+
"links": [],
83+
"resource": {
84+
"attributes": {
85+
"telemetry.sdk.language": "python",
86+
"telemetry.sdk.name": "opentelemetry",
87+
"telemetry.sdk.version": "1.35.0",
88+
"service.name": "langchain_loon",
89+
"telemetry.auto.version": "0.56b0"
90+
},
91+
"schema_url": ""
92+
}
93+
}
94+
95+
```
96+
97+
## Forwarding OTLP Data to the Backend
98+
```shell
99+
export OTEL_SERVICE_NAME=<service_name>
100+
export OTEL_EXPORTER_OTLP_PROTOCOL=http/protobuf
101+
export OTEL_EXPORTER_OTLP_TRACES_ENDPOINT=<trace_endpoint>
102+
export OTEL_EXPORTER_OTLP_METRICS_ENDPOINT=<metrics_endpoint>
103+
104+
export OTEL_INSTRUMENTATION_GENAI_CAPTURE_MESSAGE_CONTENT=true
105+
106+
opentelemetry-instrument <your_run_command>
107+
108+
```
109+
110+
111+
## Requirements
112+
113+
- Python >= 3.8
114+
- LangChain >= 0.1.0
115+
- OpenTelemetry >= 1.20.0
116+
117+
## Contributing
118+
119+
Contributions are welcome! Please feel free to submit a Pull Request.
120+
121+
## Acknowledgments
122+
123+
This instrumentation was inspired by and builds upon the excellent work done by the [OpenInference](https://github.com/Arize-ai/openinference) project. We acknowledge their contributions to the OpenTelemetry instrumentation ecosystem for AI/ML frameworks.
124+
125+
## License
126+
127+
This project is licensed under the Apache License 2.0.
815 KB
Loading
Lines changed: 68 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,68 @@
1+
[build-system]
2+
requires = ["hatchling"]
3+
build-backend = "hatchling.build"
4+
5+
[project]
6+
name = "opentelemetry-instrumentation-langchain"
7+
dynamic = ["version"]
8+
description = "OpenTelemetry LangChain Instrumentation"
9+
readme = "README.md"
10+
license = "Apache-2.0"
11+
requires-python = ">=3.8, <3.13"
12+
authors = [
13+
{ name = "LoongSuite Python Agent Authors", email = "" },
14+
]
15+
classifiers = [
16+
"Development Status :: 5 - Production/Stable",
17+
"Intended Audience :: Developers",
18+
"License :: OSI Approved :: Apache Software License",
19+
"Programming Language :: Python",
20+
"Programming Language :: Python :: 3",
21+
"Programming Language :: Python :: 3.8",
22+
"Programming Language :: Python :: 3.9",
23+
"Programming Language :: Python :: 3.10",
24+
"Programming Language :: Python :: 3.11",
25+
"Programming Language :: Python :: 3.12",
26+
"Programming Language :: Python :: 3.13",
27+
]
28+
dependencies = [
29+
"wrapt",
30+
"opentelemetry-api",
31+
"opentelemetry-instrumentation",
32+
"opentelemetry-semantic-conventions",
33+
]
34+
35+
[project.optional-dependencies]
36+
instruments = [
37+
"langchain_core >= 0.1.0",
38+
]
39+
test = [
40+
"pytest",
41+
"opentelemetry-sdk",
42+
"langchain_core == 0.1.8",
43+
"langchain == 0.1.0",
44+
"langchain_openai == 0.0.2",
45+
"langchain-community == 0.0.10",
46+
"respx",
47+
"numpy",
48+
"openai",
49+
"httpx",
50+
]
51+
type-check = [
52+
"langchain_core == 0.1.0",
53+
]
54+
55+
[tool.hatch.version]
56+
path = "src/opentelemetry/instrumentation/langchain/version.py"
57+
58+
[tool.hatch.build.targets.sdist]
59+
include = [
60+
"src",
61+
"tests",
62+
]
63+
64+
[tool.hatch.build.targets.wheel]
65+
packages = ["src/opentelemetry"]
66+
67+
[project.entry-points.opentelemetry_instrumentor]
68+
langchain = "opentelemetry.instrumentation.langchain:LangChainInstrumentor"
Lines changed: 61 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,61 @@
1+
from typing import TYPE_CHECKING, Any, Callable, Collection, Type
2+
from opentelemetry.instrumentation.instrumentor import BaseInstrumentor # type: ignore
3+
from opentelemetry import trace as trace_api
4+
from wrapt import wrap_function_wrapper
5+
from opentelemetry.instrumentation.langchain.package import _instruments
6+
from opentelemetry.metrics import get_meter, Meter
7+
8+
if TYPE_CHECKING:
9+
from langchain_core.callbacks import BaseCallbackManager
10+
from opentelemetry.instrumentation.langchain.internal._tracer import LoongsuiteTracer
11+
12+
13+
class LangChainInstrumentor(BaseInstrumentor): # type: ignore
14+
"""
15+
An instrumentor for LangChain
16+
"""
17+
18+
def instrumentation_dependencies(self) -> Collection[str]:
19+
return _instruments
20+
21+
def _instrument(self, **kwargs: Any) -> None:
22+
if not (tracer_provider := kwargs.get("tracer_provider")):
23+
tracer_provider = trace_api.get_tracer_provider()
24+
tracer = trace_api.get_tracer(__name__, '', tracer_provider)
25+
from opentelemetry.instrumentation.langchain.internal._tracer import LoongsuiteTracer
26+
meter_provider = kwargs.get("meter_provider")
27+
meter = get_meter(
28+
__name__,
29+
meter_provider=meter_provider,
30+
schema_url="https://opentelemetry.io/schemas/1.11.0",
31+
)
32+
wrap_function_wrapper(
33+
module="langchain_core.callbacks",
34+
name="BaseCallbackManager.__init__",
35+
wrapper=_BaseCallbackManagerInit(tracer=tracer, meter=meter, cls=LoongsuiteTracer),
36+
)
37+
38+
def _uninstrument(self, **kwargs: Any) -> None:
39+
pass
40+
41+
42+
class _BaseCallbackManagerInit:
43+
__slots__ = ("_tracer_instance",)
44+
45+
def __init__(self, tracer: trace_api.Tracer, meter: Meter, cls: Type["LoongsuiteTracer"]):
46+
self._tracer_instance = cls(tracer=tracer, meter=meter)
47+
48+
def __call__(
49+
self,
50+
wrapped: Callable[..., None],
51+
instance: "BaseCallbackManager",
52+
args: Any,
53+
kwargs: Any,
54+
) -> None:
55+
wrapped(*args, **kwargs)
56+
57+
for handler in instance.inheritable_handlers:
58+
if isinstance(handler, type(self._tracer_instance)):
59+
break
60+
else:
61+
instance.add_handler(self._tracer_instance, True)
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
# Internal implementation details for OpenTelemetry LangChain instrumentation

0 commit comments

Comments
 (0)