Skip to content

Commit fd9baed

Browse files
committed
merge
2 parents 5f14b58 + 216609c commit fd9baed

File tree

15 files changed

+376
-26
lines changed

15 files changed

+376
-26
lines changed

pyproject.toml

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ classifiers = [
1818
"Operating System :: OS Independent",
1919
]
2020
dependencies = [
21-
'trace-attributes==7.1.1',
21+
'trace-attributes==7.2.0',
2222
'opentelemetry-api>=1.25.0',
2323
'opentelemetry-sdk>=1.25.0',
2424
'opentelemetry-instrumentation>=0.47b0',
@@ -44,6 +44,7 @@ dev = [
4444
"anthropic",
4545
"chromadb",
4646
"qdrant-client",
47+
"graphlit-client",
4748
"python-dotenv",
4849
"pinecone-client",
4950
"langchain",

src/.DS_Store

6 KB
Binary file not shown.
Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
import asyncio
2+
from examples.graphlit_example.conversation import complete
3+
from langtrace_python_sdk import with_langtrace_root_span
4+
5+
6+
class GraphlitRunner:
7+
@with_langtrace_root_span("GraphlitRun")
8+
def run(self):
9+
asyncio.run(complete())
Lines changed: 87 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,87 @@
1+
import asyncio
2+
from dotenv import find_dotenv, load_dotenv
3+
from langtrace_python_sdk import langtrace
4+
from langtrace_python_sdk.utils.with_root_span import with_langtrace_root_span
5+
from openai import OpenAI
6+
from graphlit import Graphlit
7+
from graphlit_api import input_types, enums, exceptions
8+
9+
_ = load_dotenv(find_dotenv())
10+
11+
langtrace.init()
12+
13+
14+
graphlit = Graphlit()
15+
16+
langtrace.init()
17+
client = OpenAI()
18+
19+
20+
async def complete():
21+
uri = "https://themixchief.com"
22+
try:
23+
ingest_response = await graphlit.client.ingest_uri(uri=uri, is_synchronous=True)
24+
content_id = ingest_response.ingest_uri.id if ingest_response.ingest_uri is not None else None
25+
26+
if content_id is not None:
27+
print(f'Ingested content [{content_id}]:')
28+
29+
prompt = "In 1 sentence, what does mixchief do."
30+
31+
model = "gpt-4o"
32+
33+
specification_input = input_types.SpecificationInput(
34+
name=f"OpenAI [{str(enums.OpenAIModels.GPT4O_128K)}]",
35+
type=enums.SpecificationTypes.COMPLETION,
36+
serviceType=enums.ModelServiceTypes.OPEN_AI,
37+
openAI=input_types.OpenAIModelPropertiesInput(
38+
model=enums.OpenAIModels.GPT4O_128K,
39+
)
40+
)
41+
42+
specification_response = await graphlit.client.create_specification(specification_input)
43+
specification_id = specification_response.create_specification.id if specification_response.create_specification is not None else None
44+
45+
if specification_id is not None:
46+
print(f'Created specification [{specification_id}].')
47+
48+
conversation_input = input_types.ConversationInput(
49+
name="Conversation",
50+
specification=input_types.EntityReferenceInput(
51+
id=specification_id
52+
),
53+
)
54+
55+
conversation_response = await graphlit.client.create_conversation(conversation_input)
56+
conversation_id = conversation_response.create_conversation.id if conversation_response.create_conversation is not None else None
57+
58+
if conversation_id is not None:
59+
print(f'Created conversation [{conversation_id}].')
60+
61+
format_response = await graphlit.client.format_conversation(prompt, conversation_id)
62+
formatted_message = format_response.format_conversation.message.message if format_response.format_conversation is not None and format_response.format_conversation.message is not None else None
63+
64+
if formatted_message is not None:
65+
stream_response = client.chat.completions.create(
66+
model=model,
67+
messages=[{"role": "user", "content": formatted_message}],
68+
temperature=0.1,
69+
top_p=0.2,
70+
stream=True
71+
)
72+
73+
completion = ""
74+
75+
for chunk in stream_response:
76+
delta = chunk.choices[0].delta.content
77+
78+
if delta is not None:
79+
completion += delta
80+
81+
if completion is not None:
82+
# NOTE: stores completion back into conversation
83+
complete_response = await graphlit.client.complete_conversation(completion, conversation_id)
84+
85+
print(complete_response.complete_conversation.message.message if complete_response.complete_conversation is not None and complete_response.complete_conversation.message is not None else "None")
86+
except exceptions.GraphQLClientError as e:
87+
print(f"Graphlit API error: {e}")

src/langtrace_python_sdk/constants/instrumentation/common.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@
3030
"QDRANT": "Qdrant",
3131
"WEAVIATE": "Weaviate",
3232
"OLLAMA": "Ollama",
33-
"VERTEXAI": "VertexAI",
33+
"VERTEXAI": "Vertex AI",
3434
"GEMINI": "Gemini",
3535
"MISTRAL": "Mistral",
3636
"EMBEDCHAIN": "Embedchain",
@@ -40,6 +40,7 @@
4040
"AWS_BEDROCK": "AWS Bedrock",
4141
"CEREBRAS": "Cerebras",
4242
"MILVUS": "Milvus",
43+
"GRAPHLIT": "Graphlit",
4344
}
4445

4546
LANGTRACE_ADDITIONAL_SPAN_ATTRIBUTES_KEY = "langtrace_additional_attributes"

src/langtrace_python_sdk/constants/instrumentation/vertexai.py

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -39,4 +39,24 @@
3939
"method": "ChatSession",
4040
"operation": "send_message_streaming",
4141
},
42+
"PREDICTION_SERVICE_BETA_GENERATE_CONTENT": {
43+
"module": "google.cloud.aiplatform_v1beta1.services.prediction_service.client",
44+
"method": "PredictionServiceClient",
45+
"operation": "generate_content",
46+
},
47+
"PREDICTION_SERVICE_GENERATE_CONTENT": {
48+
"module": "google.cloud.aiplatform_v1.services.prediction_service.client",
49+
"method": "PredictionServiceClient",
50+
"operation": "generate_content",
51+
},
52+
"PREDICTION_SERVICE_BETA_STREAM_GENERATE_CONTENT": {
53+
"module": "google.cloud.aiplatform_v1beta1.services.prediction_service.client",
54+
"method": "PredictionServiceClient",
55+
"operation": "stream_generate_content",
56+
},
57+
"PREDICTION_SERVICE_STREAM_GENERATE_CONTENT": {
58+
"module": "google.cloud.aiplatform_v1.services.prediction_service.client",
59+
"method": "PredictionServiceClient",
60+
"operation": "stream_generate_content",
61+
},
4262
}

src/langtrace_python_sdk/instrumentation/__init__.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,10 @@
2626
from .qdrant import QdrantInstrumentation
2727
from .vertexai import VertexAIInstrumentation
2828
from .weaviate import WeaviateInstrumentation
29+
from .cerebras import CerebrasInstrumentation
30+
from .milvus import MilvusInstrumentation
31+
from .google_genai import GoogleGenaiInstrumentation
32+
from .graphlit import GraphlitInstrumentation
2933

3034
__all__ = [
3135
"AnthropicInstrumentation",
@@ -56,4 +60,5 @@
5660
"MilvusInstrumentation",
5761
"GoogleGenaiInstrumentation",
5862
"CrewaiToolsInstrumentation",
63+
"GraphlitInstrumentation",
5964
]
Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
from .instrumentation import GraphlitInstrumentation
2+
3+
__all__ = ["GraphlitInstrumentation"]
Lines changed: 57 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,57 @@
1+
from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
2+
from opentelemetry.trace import get_tracer
3+
from wrapt import wrap_function_wrapper as _W
4+
from typing import Collection
5+
from importlib_metadata import version as v
6+
from .patch import patch_graphlit_operation
7+
8+
class GraphlitInstrumentation(BaseInstrumentor):
9+
10+
def instrumentation_dependencies(self) -> Collection[str]:
11+
return ["graphlit-client >= 1.0.0"]
12+
13+
def _instrument(self, **kwargs):
14+
tracer_provider = kwargs.get("tracer_provider")
15+
tracer = get_tracer(__name__, "", tracer_provider)
16+
version = v("graphlit-client")
17+
try:
18+
_W(
19+
"graphlit.graphlit",
20+
"Client.ingest_uri",
21+
patch_graphlit_operation("ingest_uri", version, tracer),
22+
)
23+
_W(
24+
"graphlit.graphlit",
25+
"Client.create_feed",
26+
patch_graphlit_operation("create_feed", version, tracer),
27+
)
28+
_W(
29+
"graphlit.graphlit",
30+
"Client.create_specification",
31+
patch_graphlit_operation("create_specification", version, tracer),
32+
)
33+
_W(
34+
"graphlit.graphlit",
35+
"Client.create_conversation",
36+
patch_graphlit_operation("create_conversation", version, tracer),
37+
)
38+
_W(
39+
"graphlit.graphlit",
40+
"Client.format_conversation",
41+
patch_graphlit_operation("format_conversation", version, tracer),
42+
)
43+
_W(
44+
"graphlit.graphlit",
45+
"Client.complete_conversation",
46+
patch_graphlit_operation("complete_conversation", version, tracer),
47+
)
48+
_W(
49+
"graphlit.graphlit",
50+
"Client.prompt_conversation",
51+
patch_graphlit_operation("prompt_conversation", version, tracer),
52+
)
53+
except Exception:
54+
pass
55+
56+
def _uninstrument(self, **kwargs):
57+
pass
Lines changed: 69 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,69 @@
1+
import json
2+
from importlib_metadata import version as v
3+
from langtrace.trace_attributes import FrameworkSpanAttributes
4+
from opentelemetry import baggage
5+
from opentelemetry.trace import Span, SpanKind, Tracer
6+
from opentelemetry.trace.status import Status, StatusCode
7+
8+
from langtrace_python_sdk.constants import LANGTRACE_SDK_NAME
9+
from langtrace_python_sdk.constants.instrumentation.common import (
10+
LANGTRACE_ADDITIONAL_SPAN_ATTRIBUTES_KEY,
11+
SERVICE_PROVIDERS,
12+
)
13+
from langtrace_python_sdk.utils.llm import set_span_attributes
14+
from langtrace_python_sdk.utils.misc import serialize_args, serialize_kwargs
15+
16+
17+
def patch_graphlit_operation(operation_name, version, tracer: Tracer):
18+
async def traced_method(wrapped, instance, args, kwargs):
19+
service_provider = SERVICE_PROVIDERS["GRAPHLIT"]
20+
extra_attributes = baggage.get_baggage(LANGTRACE_ADDITIONAL_SPAN_ATTRIBUTES_KEY)
21+
span_attributes = {
22+
"langtrace.sdk.name": "langtrace-python-sdk",
23+
"langtrace.service.name": service_provider,
24+
"langtrace.service.type": "framework",
25+
"langtrace.service.version": version,
26+
"langtrace.version": v(LANGTRACE_SDK_NAME),
27+
**(extra_attributes if extra_attributes is not None else {}),
28+
}
29+
30+
span_attributes["langchain.metadata"] = serialize_kwargs(**kwargs)
31+
span_attributes["langchain.inputs"] = serialize_args(*args)
32+
33+
attributes = FrameworkSpanAttributes(**span_attributes)
34+
35+
with tracer.start_as_current_span(
36+
name=f"graphlit.{operation_name}", kind=SpanKind.CLIENT
37+
) as span:
38+
try:
39+
set_span_attributes(span, attributes)
40+
result = await wrapped(*args, **kwargs)
41+
42+
if result:
43+
operation_result = json.loads(result.model_dump_json())[operation_name]
44+
if operation_name == "complete_conversation" or operation_name == "prompt_conversation" or operation_name == "format_conversation":
45+
set_graphlit_conversation_attributes(span, operation_result)
46+
else:
47+
for key, value in operation_result.items():
48+
span.set_attribute(f"graphlit.{operation_name}.{key}", str(value))
49+
50+
span.set_status(Status(StatusCode.OK))
51+
52+
return result
53+
54+
except Exception as err:
55+
span.record_exception(err)
56+
span.set_status(Status(StatusCode.ERROR, str(err)))
57+
raise
58+
59+
return traced_method
60+
61+
62+
def set_graphlit_conversation_attributes(span: Span, response):
63+
if not response or "message" not in response:
64+
return
65+
66+
span.set_attribute(f"graphlit.conversation.id", response['conversation']['id'])
67+
68+
for key, value in response['message'].items():
69+
span.set_attribute(f"graphlit.conversation.{key}", str(value))

0 commit comments

Comments
 (0)