Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 1 addition & 5 deletions src/examples/litellm_example/basic.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
from langtrace_python_sdk import with_langtrace_root_span, langtrace
from dotenv import load_dotenv
from litellm import completion, acompletion
import litellm
Expand All @@ -8,11 +7,9 @@


litellm.success_callback = ["langtrace"]
langtrace.init()
litellm.set_verbose = False


@with_langtrace_root_span("Litellm Example OpenAI")
def openAI(streaming=False):
response = completion(
model="gpt-3.5-turbo",
Expand Down Expand Up @@ -56,7 +53,6 @@ def anthropic(streaming=False):
print("ERORRRR", e)


# @with_langtrace_root_span("Litellm Example OpenAI Async Streaming")
async def async_anthropic(streaming=False):
response = await acompletion(
model="claude-2.1",
Expand Down Expand Up @@ -93,6 +89,6 @@ def cohere(streaming=False):

if __name__ == "__main__":
# openAI()
anthropic(streaming=False)
# anthropic(streaming=False)
cohere(streaming=True)
# asyncio.run(async_anthropic(streaming=True))
10 changes: 10 additions & 0 deletions src/examples/litellm_example/config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
model_list:
- model_name: "gpt-4" # all requests where model not in your config go to this deployment
litellm_params:
model: openai/gpt-4 # set `openai/` to use the openai route

litellm_settings:
success_callback: ["langtrace"]

environment_variables:
LANGTRACE_API_KEY: "fake-api-key"
16 changes: 16 additions & 0 deletions src/examples/litellm_example/proxy_basic.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
import openai
from dotenv import load_dotenv

load_dotenv()

client = openai.OpenAI(base_url="http://0.0.0.0:4000")

# request sent to model set on litellm proxy, `litellm --model`
response = client.chat.completions.create(
model="gpt-4",
messages=[
{"role": "user", "content": "this is a test request, write a short poem"}
],
)

print(response)
69 changes: 58 additions & 11 deletions src/langtrace_python_sdk/langtrace.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,10 +31,15 @@
SimpleSpanProcessor,
)

from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import (
OTLPSpanExporter as GRPCExporter,
)
from opentelemetry.exporter.otlp.proto.http.trace_exporter import (
OTLPSpanExporter as HTTPExporter,
)
from langtrace_python_sdk.constants.exporter.langtrace_exporter import (
LANGTRACE_REMOTE_URL,
)
from langtrace_python_sdk.extensions.langtrace_exporter import LangTraceExporter
from langtrace_python_sdk.instrumentation import (
AnthropicInstrumentation,
ChromaInstrumentation,
Expand All @@ -59,6 +64,8 @@
VertexAIInstrumentation,
WeaviateInstrumentation,
)
from opentelemetry.util.re import parse_env_headers

from langtrace_python_sdk.types import DisableInstrumentations, InstrumentationMethods
from langtrace_python_sdk.utils import (
check_if_sdk_is_outdated,
Expand All @@ -74,7 +81,7 @@

class LangtraceConfig:
def __init__(self, **kwargs):
self.api_key = kwargs.get("api_key")
self.api_key = kwargs.get("api_key") or os.environ.get("LANGTRACE_API_KEY")
self.batch = kwargs.get("batch", True)
self.write_spans_to_console = kwargs.get("write_spans_to_console", False)
self.custom_remote_exporter = kwargs.get("custom_remote_exporter")
Expand All @@ -83,7 +90,11 @@ def __init__(self, **kwargs):
self.disable_tracing_for_functions = kwargs.get("disable_tracing_for_functions")
self.service_name = kwargs.get("service_name")
self.disable_logging = kwargs.get("disable_logging", False)
self.headers = kwargs.get("headers", {})
self.headers = (
kwargs.get("headers")
or os.environ.get("LANGTRACE_HEADERS")
or os.environ.get("OTEL_EXPORTER_OTLP_HEADERS")
)


def get_host(config: LangtraceConfig) -> str:
Expand All @@ -96,23 +107,50 @@ def get_host(config: LangtraceConfig) -> str:
)


def get_service_name(config: LangtraceConfig):
service_name = os.environ.get("OTEL_SERVICE_NAME")
if service_name:
return service_name

resource_attributes = os.environ.get("OTEL_RESOURCE_ATTRIBUTES")
if resource_attributes:
attrs = dict(attr.split("=") for attr in resource_attributes.split(","))
if "service.name" in attrs:
return attrs["service.name"]

if config.service_name:
return config.service_name

return sys.argv[0]


def setup_tracer_provider(config: LangtraceConfig, host: str) -> TracerProvider:
sampler = LangtraceSampler(disabled_methods=config.disable_tracing_for_functions)
resource = Resource.create(
attributes={
SERVICE_NAME: os.environ.get("OTEL_SERVICE_NAME")
or config.service_name
or sys.argv[0]
}
)
resource = Resource.create(attributes={SERVICE_NAME: get_service_name(config)})
return TracerProvider(resource=resource, sampler=sampler)


def get_headers(config: LangtraceConfig):
if not config.headers:
return {
"x-api-key": config.api_key,
}

if isinstance(config.headers, str):
return parse_env_headers(config.headers, liberal=True)

return config.headers


def get_exporter(config: LangtraceConfig, host: str):
if config.custom_remote_exporter:
return config.custom_remote_exporter

return LangTraceExporter(host, config.api_key, config.disable_logging)
headers = get_headers(config)
if "http" in host.lower() or "https" in host.lower():
return HTTPExporter(endpoint=host, headers=headers)
else:
return GRPCExporter(endpoint=host, headers=headers)


def add_span_processor(provider: TracerProvider, config: LangtraceConfig, exporter):
Expand Down Expand Up @@ -200,6 +238,15 @@ def init(
+ Fore.RESET
)

if host == LANGTRACE_REMOTE_URL and not config.api_key:
print(Fore.RED)
print(
"Missing Langtrace API key, proceed to https://langtrace.ai to create one"
)
print("Set the API key as an environment variable LANGTRACE_API_KEY")
print(Fore.RESET)
return

provider = setup_tracer_provider(config, host)
exporter = get_exporter(config, host)

Expand Down
2 changes: 1 addition & 1 deletion src/langtrace_python_sdk/version.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = "3.0.2"
__version__ = "3.1.0"
Loading