Skip to content
Open
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions .env.example
Original file line number Diff line number Diff line change
Expand Up @@ -25,3 +25,7 @@ CARTESIA_API_KEY=your_cartesia_api_key_here

# Anthropic API credentials
ANTHROPIC_API_KEY=your_anthropic_api_key_here

# Baseten API credentials
BASETEN_API_KEY=your_baseten_api_key_here
BASETEN_BASE_URL=your_baseten_base_url_here
10 changes: 8 additions & 2 deletions agents-core/vision_agents/core/llm/llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,6 @@ class LLM(abc.ABC):
before_response_listener: BeforeCb
after_response_listener: AfterCb
agent: Optional["Agent"]
_conversation: Optional["Conversation"]
function_registry: FunctionRegistry

def __init__(self):
Expand All @@ -59,6 +58,8 @@ def __init__(self):
self.events = EventManager()
self.events.register_events_from_module(events)
self.function_registry = FunctionRegistry()
self.instructions: Optional[str] = None
self.parsed_instructions: Optional[str] = None

async def warmup(self) -> None:
"""
Expand All @@ -78,6 +79,12 @@ async def simple_response(
) -> LLMResponseEvent[Any]:
raise NotImplementedError

@property
def _conversation(self) -> Optional[Conversation]:
if not self.agent:
return None
return self.agent.conversation

def _build_enhanced_instructions(self) -> Optional[str]:
"""
Build enhanced instructions by combining the original instructions with markdown file contents.
Expand Down Expand Up @@ -187,7 +194,6 @@ def _attach_agent(self, agent: Agent):
Attach agent to the llm
"""
self.agent = agent
self._conversation = agent.conversation
self._set_instructions(agent.instructions)

def _set_instructions(self, instructions: str):
Expand Down
26 changes: 26 additions & 0 deletions plugins/baseten/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
# Baseten Plugin for Vision Agents

LLM integrations for the models hosted on Baseten for Vision Agents framework.

TODO

## Installation

```bash
pip install vision-agents-plugins-baseten
```

## Usage

```python

```


## Requirements
- Python 3.10+
- `openai`
- GetStream SDK

## License
MIT
Empty file added plugins/baseten/py.typed
Empty file.
36 changes: 36 additions & 0 deletions plugins/baseten/pyproject.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
[build-system]
requires = ["hatchling", "hatch-vcs"]
build-backend = "hatchling.build"

[project]
name = "vision-agents-plugins-baseten"
dynamic = ["version"]
description = "Baseten plugin for vision agents"
readme = "README.md"
requires-python = ">=3.10"
license = "MIT"
dependencies = [
"vision-agents",
"openai>=2.5.0",
]

[project.urls]
Documentation = "https://visionagents.ai/"
Website = "https://visionagents.ai/"
Source = "https://github.com/GetStream/Vision-Agents"

[tool.hatch.version]
source = "vcs"
raw-options = { root = "..", search_parent_directories = true, fallback_version = "0.0.0" }

[tool.hatch.build.targets.wheel]
packages = ["."]

[tool.uv.sources]
vision-agents = { workspace = true }

[dependency-groups]
dev = [
"pytest>=8.4.1",
"pytest-asyncio>=1.0.0",
]
4 changes: 4 additions & 0 deletions plugins/baseten/vision_agents/plugins/baseten/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
from .baseten_vlm import BasetenVLM as VLM


__all__ = ["VLM"]
Loading
Loading