Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
27 changes: 27 additions & 0 deletions .github/workflows/pr_qc.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
name: Quality Control

on:
workflow_dispatch:
pull_request: # Trigger the workflow on push events

jobs:
quality_control:
runs-on: ubuntu-latest

steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.11'
- name: Install dependencies
run: |
python -m pip install --upgrade pip
make install-dev
- name: Run Type Checks
run: |
make type
- name: Run Linter
run: |
make lint
120 changes: 120 additions & 0 deletions .github/workflows/release.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,120 @@
name: Publish to PyPI

permissions:
id-token: write
contents: write

on:
workflow_dispatch:
# push:
# branches:
# - main

jobs:
setup:
runs-on: ubuntu-latest
env:
GUARDRAILS_TOKEN: ${{ secrets.PRIV_PYPI_PUBLISH_TOKEN }}
PYPI_REPOSITORY_URL: 'https://pypi.guardrailsai.com'
steps:
- name: Checkout Repository
uses: actions/checkout@v4
with:
fetch-tags: true
fetch-depth: 0

- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.11'

- name: Check for version bump
id: version
continue-on-error: false
shell: bash {0}
run: |
PYPROJECT_TOML="pyproject.toml"

# Extract the version using grep and sed
version=$(grep -m 1 "version" "$PYPROJECT_TOML" | sed -E 's/.*version[[:space:]]*=[[:space:]]*"([^"]*)".*/\1/')

echo "Project version: $version"

if [ -z "$version" ]
then
echo "Version is missing from pyproject.toml!"
exit 1
fi

echo "Checking if $version already exists..."
version_commit="$(git rev-parse "$version" 2>/dev/null)"
if [ ! -z "$version_commit" ] && [ "$version_commit" != "$version" ];
then
echo "Version $version already exist on commit $version_commit!"
echo "Abandoning build..."
echo "To complete this release update the version field in the pyproject.toml with an appropriate semantic version."
exit 1
else
echo "version=$version" >> "$GITHUB_OUTPUT"
exit 0
fi

- name: Install Twine & Build
shell: bash
run: |
python -m pip install --upgrade pip
pip install twine build toml

- name: Create .pypirc
shell: bash
run: |
touch ~/.pypirc
echo "[distutils]" >> ~/.pypirc
echo "index-servers =" >> ~/.pypirc
echo " private-repository" >> ~/.pypirc
echo "" >> ~/.pypirc
echo "[private-repository]" >> ~/.pypirc
echo "repository = $PYPI_REPOSITORY_URL" >> ~/.pypirc
echo "username = __token__" >> ~/.pypirc
echo "password = $GUARDRAILS_TOKEN" >> ~/.pypirc

- name: Build & Upload
shell: bash
run: |
python -m build
twine upload dist/* -u __token__ -p $GUARDRAILS_TOKEN -r private-repository

- name: Create .pypirc for PyPI.org
shell: bash
env:
PYPI_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
run: |
echo "[distutils]" > ~/.pypirc
echo "index-servers =" >> ~/.pypirc
echo " pypi" >> ~/.pypirc
echo "" >> ~/.pypirc
echo "[pypi]" >> ~/.pypirc
echo "repository = https://upload.pypi.org/legacy/" >> ~/.pypirc
echo "username = __token__" >> ~/.pypirc
echo "password = $PYPI_PASSWORD" >> ~/.pypirc

- name: Upload to PyPI.org
shell: bash
env:
PYPI_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
run: |
twine upload dist/* -u __token__ -p $PYPI_PASSWORD -r pypi

- name: Tag
id: tag
continue-on-error: false
run: |
version="${{ steps.version.outputs.version }}"
echo "Configuring github bot"
git config user.name "github-actions[bot]"
# Comes from https://api.github.com/users/github-actions%5Bbot%5D
git config user.email "41898282+github-actions[bot]@users.noreply.github.com"
echo "Creating github tag: $version"
git tag "$version"
echo "Pushing tags"
git push --tags
4 changes: 4 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
build
*.egg-info
.venv
.ruff_cache
10 changes: 10 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.9.4
hooks:
# Run the linter.
- id: ruff
args: [ --fix ]
# Run the formatter.
- id: ruff-format
21 changes: 21 additions & 0 deletions LICENSE
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
MIT License

Copyright (c) 2024 Guardrails AI

Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:

The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.

THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
34 changes: 34 additions & 0 deletions Makefile
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
.PHONY: install install-dev lint lint-fix type qa test test-cov test-cov-ci
# Installs production dependencies
install:
pip install .;

# Installs development dependencies
install-dev:
pip install ".[dev]";

lint:
ruff check .
ruff format .

lint-fix:
ruff check . --fix
ruff format .

type:
pyright

qa:
make install-dev
make lint
make type

test:
python -m unittest discover -s tests --buffer --failfast

test-cov:
coverage run -m unittest discover --start-directory tests --buffer --failfast
coverage report -m

test-cov-ci:
coverage run -m unittest discover --start-directory tests --buffer --failfast
160 changes: 158 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,2 +1,158 @@
# snowglobe-telemetry-openinference
Telemetry instrumentation for the snowglobe client using OpenInference semantics.
# Snowlgobe Telemetry Instrumentation for OpenInference

Instrument your Snowglobe connected app with OpenInference and start sending traces to popular OpenInference compatible sinks like Arize or Arize Phoenix.

## Installation

```
pip install snowglobe-telemetry-openinference
```

If using uv, set the `--prerelease=allow` flag
```
uv pip install --prerelease=allow snowglobe-telemetry-openinference
```


## Add the OpenInferenceInstrumentor to your agent file

Reminder: Each agent wrapper file resides in the root directory of your project, and is named after the agent (e.g. `My Agent Name` becomes `my_agent_name.py`).

```python
from snowglobe.client import CompletionRequest, CompletionFunctionOutputs
from openai import OpenAI
import os

### Add these two lines to your agent file and watch context rich traces come in!
from snowglobe.telemetry.openinference import OpenInferenceInstrumentor
OpenInferenceInstrumentor().instrument()


client = OpenAI(api_key=os.getenv("SNOWGLOBE_API_KEY"))

def completion_fn(request: CompletionRequest) -> CompletionFunctionOutputs:
"""
Process a scenario request from Snowglobe.

This function is called by the Snowglobe client to process requests. It should return a
CompletionFunctionOutputs object with the response content.

Example CompletionRequest:
CompletionRequest(
messages=[
SnowglobeMessage(role="user", content="Hello, how are you?", snowglobe_data=None),
]
)

Example CompletionFunctionOutputs:
CompletionFunctionOutputs(response="This is a string response from your application")

Args:
request (CompletionRequest): The request object containing the messages.

Returns:
CompletionFunctionOutputs: The response object with the generated content.
"""

# Process the request using the messages. Example:
messages = request.to_openai_messages()
response = client.chat.completions.create(
model="gpt-4o-mini",
messages=messages
)
return CompletionFunctionOutputs(response=response.choices[0].message.content)
```



## Enhancing Snowglobe Connect SDK's Traces with OpenInference Integrations
You can add more rich context to the traces the Snowglobe Connect SDK captures by installing additional OpenInference instrumentors and registering the appropriate tracer provider in your agent wrapper file.

The below examples shows how to add OpenAI instrumentation for either Arize or Arize Phoenix in addition to Snowglobe's OpenInference instrumentation:

### Arize

Install the Arize OpenTelemetry pacakge and the OpenAI specific instrumentor.
```sh
pip install openinference-instrumentation-openai arize-otel
```

Then register the tracer provider and use the OpenAI instrumentator in your agent file:
```py
import os
from openai import OpenAI
from snowglobe.client import CompletionRequest, CompletionFunctionOutputs
from arize.otel import register

# Setup OTel via our convenience function
tracer_provider = register(
space_id = "your-space-id", # in app space settings page
api_key = "your-api-key", # in app space settings page
project_name = "your-project-name", # name this to whatever you would like
)

# Import the OpenAI instrumentor from OpenInference
from openinference.instrumentation.openai import OpenAIInstrumentor

# Instrument OpenAI
OpenAIInstrumentor().instrument(tracer_provider=tracer_provider)

# Import the OpenInference instrumentor from Snowglobe
from snowglobe.telemetry.openinference import OpenInferenceInstrumentor

# Instrument the Snowglobe client
OpenInferenceInstrumentor().instrument(tracer_provider=tracer_provider)


def completion_fn(request: CompletionRequest) -> CompletionFunctionOutputs:
messages = request.to_openai_messages()
response = client.chat.completions.create(
model="gpt-4o-mini",
messages=messages
)
return CompletionFunctionOutputs(response=response.choices[0].message.content)
```


### Arize Phoenix

Install the Arize Phoenix OpenTelemetry pacakge and the OpenAI specific instrumentor.
```sh
pip install openinference-instrumentation-openai arize-phoenix-otel
```

Then register the tracer provider and use the OpenAI instrumentator in your agent file:
```py
import os
from openai import OpenAI
from snowglobe.client import CompletionRequest, CompletionFunctionOutputs
from phoenix.otel import register

os.environ["PHOENIX_COLLECTOR_ENDPOINT"] = "http://localhost:6006"

# configure the Phoenix tracer
tracer_provider = register(
project_name="my-llm-app", # Default is 'default'
)

# Import the OpenAI instrumentor from OpenInference
from openinference.instrumentation.openai import OpenAIInstrumentor

# Instrument OpenAI
OpenAIInstrumentor().instrument(tracer_provider=tracer_provider)

# Import the OpenInference instrumentor from Snowglobe
from snowglobe.telemetry.openinference import OpenInferenceInstrumentor

# Instrument the Snowglobe client
OpenInferenceInstrumentor().instrument(tracer_provider=tracer_provider)


def completion_fn(request: CompletionRequest) -> CompletionFunctionOutputs:
messages = request.to_openai_messages()
response = client.chat.completions.create(
model="gpt-4o-mini",
messages=messages
)
return CompletionFunctionOutputs(response=response.choices[0].message.content)
```
Loading