Skip to content

Commit 4cb93c8

Browse files
committed
Added anthropic connector. Make ollama, openai, and anthropic optional
1 parent 9d9466e commit 4cb93c8

File tree

8 files changed

+230
-16
lines changed

8 files changed

+230
-16
lines changed

CHANGELOG.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
## 0.9.2 (in development)
44
### Improvements
55
- Added ability for sources and segments to have multiple names in chatterlang.
6+
- Removed signature segments
67

78
## 0.9.1
89
Forgot to import the lancedb module in talkpipe/__init__.py, so it wasn't registering the segments.

Dockerfile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@ COPY --chown=builder:builder tests/ tests/
3737
RUN python3 -m pip install --user --upgrade pip setuptools wheel build
3838
RUN python3 -m pip install --user numpy pandas matplotlib scikit-learn scipy
3939
ENV SETUPTOOLS_SCM_PRETEND_VERSION_FOR_TALKPIPE=0.1.0
40-
RUN python3 -m pip install --user -e .[dev]
40+
RUN python3 -m pip install --user -e .[dev,all]
4141
RUN python3 -m pytest --log-cli-level=DEBUG
4242
RUN python3 -m build --wheel
4343

pyproject.toml

Lines changed: 14 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,6 @@ dependencies = [
2424
'prompt_toolkit',
2525
'parsy',
2626
'pydantic',
27-
'ollama',
2827
'requests',
2928
'numpy',
3029
'numba==0.62.0',
@@ -34,7 +33,6 @@ dependencies = [
3433
'readability-lxml',
3534
'lxml',
3635
'lxml_html_clean',
37-
'openai',
3836
'fastapi[standard]',
3937
'ipywidgets',
4038
'pymongo',
@@ -43,7 +41,8 @@ dependencies = [
4341
'uvicorn',
4442
'whoosh',
4543
'lancedb',
46-
'deprecated'
44+
'deprecated',
45+
'pyyaml'
4746
]
4847
dynamic = ["version"]
4948

@@ -54,6 +53,18 @@ dev = [
5453
'pytest-cov',
5554
'mongomock'
5655
]
56+
ollama = [
57+
'ollama'
58+
]
59+
openai = [
60+
'openai'
61+
]
62+
anthropic = [
63+
'anthropic'
64+
]
65+
all = [
66+
'talkpipe[openai,ollama,anthropic]'
67+
]
5768

5869
[project.scripts]
5970
chatterlang_workbench = "talkpipe.app.chatterlang_workbench:main"

src/talkpipe/llm/config.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,14 @@
11
from typing import Dict, Type, TypeVar, List
2-
from .prompt_adapters import AbstractLLMPromptAdapter, OllamaPromptAdapter, OpenAIPromptAdapter
2+
from .prompt_adapters import AbstractLLMPromptAdapter, OllamaPromptAdapter, OpenAIPromptAdapter, AnthropicPromptAdapter
33
from .embedding_adapters import AbstractEmbeddingAdapter, OllamaEmbedderAdapter
44

55
T_PROMPTADAPTER = TypeVar("T_PROMPTADAPTER", bound=AbstractLLMPromptAdapter)
66
T_EMBEDDINGADAPTER = TypeVar("T_EMBEDDINGADAPTER", bound=AbstractEmbeddingAdapter)
77

88
_promptAdapter:Dict[str, Type[T_PROMPTADAPTER]] = {
99
"ollama": OllamaPromptAdapter,
10-
"openai": OpenAIPromptAdapter
10+
"openai": OpenAIPromptAdapter,
11+
"anthropic": AnthropicPromptAdapter
1112
}
1213

1314
def registerPromptAdapter(name:str, promptAdapter:Type[T_PROMPTADAPTER]):

src/talkpipe/llm/embedding_adapters.py

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
from typing import List
22

33
import numpy as np
4-
import ollama
54

65
class AbstractEmbeddingAdapter:
76
"""Abstract class for embedding text.
@@ -49,6 +48,13 @@ def __init__(self, model: str):
4948
super().__init__(model, "ollama")
5049

5150
def execute(self, text: str) -> List[float]:
51+
try:
52+
import ollama
53+
except ImportError:
54+
raise ImportError(
55+
"Ollama is not installed. Please install it with: pip install talkpipe[ollama]"
56+
)
57+
5258
response = ollama.embed(
5359
model=self.model_name,
5460
input=text

src/talkpipe/llm/prompt_adapters.py

Lines changed: 139 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
1+
import json
12
from pydantic import BaseModel
23
import logging
3-
import ollama
4-
import openai
54

65
from abc import ABC, abstractmethod
76

@@ -87,6 +86,13 @@ def execute(self, prompt: str) -> str:
8786
8887
Handles its own multi-turn conversation state.
8988
"""
89+
try:
90+
import ollama
91+
except ImportError:
92+
raise ImportError(
93+
"Ollama is not installed. Please install it with: pip install talkpipe[ollama]"
94+
)
95+
9096
logger.debug(f"Adding user message to chat history: {prompt}")
9197
self._messages.append({"role": "user", "content": prompt})
9298

@@ -117,6 +123,13 @@ def is_available(self) -> bool:
117123
Returns:
118124
bool: True if the model is available, False otherwise.
119125
"""
126+
try:
127+
import ollama
128+
except ImportError:
129+
raise ImportError(
130+
"Ollama is not installed. Please install it with: pip install talkpipe[ollama]"
131+
)
132+
120133
try:
121134
# Check if the model is available
122135
response = ollama.chat(self._model_name, messages=[self._system_message], options={"temperature": self._temperature})
@@ -126,12 +139,128 @@ def is_available(self) -> bool:
126139
return False
127140

128141

142+
class AnthropicPromptAdapter(AbstractLLMPromptAdapter):
143+
"""Prompt adapter for Anthropic Claude
144+
145+
"""
146+
147+
def __init__(self, model: str, system_prompt: str = "You are a helpful assistant.", multi_turn: bool = True, temperature: float = None, output_format: BaseModel = None):
148+
try:
149+
import anthropic
150+
except ImportError:
151+
raise ImportError(
152+
"Anthropic is not installed. Please install it with: pip install talkpipe[anthropic]"
153+
)
154+
155+
if output_format:
156+
self.pydantic_json_schema = json.dumps(output_format.model_json_schema())
157+
system_prompt += f"\nThe output should be in the following JSON format:\n{self.pydantic_json_schema}"
158+
else:
159+
self.pydantic_json_schema = None
160+
161+
super().__init__(model, "anthropic", system_prompt, multi_turn, temperature, output_format)
162+
self.client = anthropic.Anthropic()
163+
self._max_tokens = 4096 # Default max tokens for response
164+
165+
def execute(self, prompt: str) -> str:
166+
"""Execute the chat model.
167+
168+
Handles its own multi-turn conversation state.
169+
"""
170+
try:
171+
import anthropic
172+
except ImportError:
173+
raise ImportError(
174+
"Anthropic is not installed. Please install it with: pip install talkpipe[anthropic]"
175+
)
176+
177+
logger.debug(f"Adding user message to chat history: {prompt}")
178+
self._messages.append({"role": "user", "content": prompt})
179+
180+
logger.debug(f"Sending chat request to Anthropic model {self._model_name}")
181+
182+
# Build request parameters
183+
request_params = {
184+
"model": self._model_name,
185+
"messages": self._messages,
186+
"system": self._system_message["content"],
187+
"max_tokens": self._max_tokens
188+
}
189+
190+
if self._temperature_explicit:
191+
request_params["temperature"] = self._temperature
192+
193+
response = self.client.messages.create(**request_params)
194+
195+
# Extract text content from response
196+
response_text = ""
197+
for block in response.content:
198+
if hasattr(block, 'text'):
199+
response_text += block.text
200+
201+
if self._multi_turn:
202+
logger.debug("Multi-turn enabled, appending assistant response to chat history")
203+
self._messages.append({"role": "assistant", "content": response_text})
204+
else:
205+
logger.debug("Single-turn mode, clearing message history")
206+
self._messages = []
207+
208+
# Handle output format if specified
209+
if self._output_format:
210+
result = self._output_format.model_validate_json(response_text)
211+
else:
212+
result = response_text
213+
214+
logger.debug(f"Returning response: {result}")
215+
return result
216+
217+
def is_available(self) -> bool:
218+
"""Check if the chat model is available.
219+
220+
This method should be implemented in each subclass to check if
221+
the chat model is available.
222+
Returns:
223+
bool: True if the model is available, False otherwise.
224+
"""
225+
try:
226+
import anthropic
227+
except ImportError:
228+
raise ImportError(
229+
"Anthropic is not installed. Please install it with: pip install talkpipe[anthropic]"
230+
)
231+
232+
try:
233+
# Check if the model is available by making a minimal request
234+
request_params = {
235+
"model": self._model_name,
236+
"messages": [{"role": "user", "content": "test"}],
237+
"system": self._system_message["content"],
238+
"max_tokens": 1
239+
}
240+
241+
if self._temperature_explicit:
242+
request_params["temperature"] = self._temperature
243+
244+
response = self.client.messages.create(**request_params)
245+
return True
246+
except Exception as e:
247+
logger.error(f"Model {self._model_name} is not available: {e}")
248+
return False
249+
250+
129251
class OpenAIPromptAdapter(AbstractLLMPromptAdapter):
130252
"""Prompt adapter for OpenAI
131253
132254
"""
133255

134256
def __init__(self, model: str, system_prompt: str = "You are a helpful assistant.", multi_turn: bool = True, temperature: float = None, output_format: BaseModel = None):
257+
try:
258+
import openai
259+
except ImportError:
260+
raise ImportError(
261+
"OpenAI is not installed. Please install it with: pip install talkpipe[openai]"
262+
)
263+
135264
super().__init__(model, "openai", system_prompt, multi_turn, temperature, output_format)
136265
self.client = openai.OpenAI()
137266

@@ -140,11 +269,18 @@ def execute(self, prompt: str) -> str:
140269
141270
Handles its own multi-turn conversation state.
142271
"""
272+
try:
273+
import openai
274+
except ImportError:
275+
raise ImportError(
276+
"OpenAI is not installed. Please install it with: pip install talkpipe[openai]"
277+
)
278+
143279
logger.debug(f"Adding user message to chat history: {prompt}")
144280
self._messages.append({"role": "user", "content": prompt})
145281

146282
logger.debug(f"Sending chat request to OpenAI model {self._model_name}")
147-
283+
148284
# Build request parameters, only including temperature if explicitly set
149285
request_params = {
150286
"model": self._model_name,

tests/conftest.py

Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -58,6 +58,20 @@ def pytest_configure(config):
5858
config.is_openai_available = False
5959
logger.warning(f"OpenAI check failed: {e}. Skipping tests that require it.")
6060

61+
# Check if Anthropic is available (if needed in future)
62+
try:
63+
from talkpipe.llm.prompt_adapters import AnthropicPromptAdapter
64+
anthropic_adapter = AnthropicPromptAdapter("claude-3-5-haiku-latest", temperature=0.0)
65+
if anthropic_adapter.is_available():
66+
config.is_anthropic_available = True
67+
logger.warning("Anthropic is available.")
68+
else:
69+
config.is_anthropic_available = False
70+
logger.warning("Anthropic is not available. Skipping tests that require it.")
71+
except Exception as e:
72+
config.is_anthropic_available = False
73+
logger.warning(f"Anthropic check failed: {e}. Skipping tests that require it.")
74+
6175
@pytest.fixture
6276
def requires_mongodb(request):
6377
"""
@@ -87,6 +101,20 @@ def test_something(requires_ollama):
87101
pytest.skip("Test requires Ollama with llama3.2, but this model or the server is not available")
88102
return True
89103

104+
@pytest.fixture
105+
def requires_anthropic(request):
106+
"""
107+
Fixture that skips tests if Anthropic is not available.
108+
109+
Usage:
110+
def test_something(requires_anthropic):
111+
# This test will be skipped if Anthropic is not available
112+
...
113+
"""
114+
if not request.config.is_anthropic_available:
115+
pytest.skip("Test requires Anthropic, but Anthropic is not available")
116+
return True
117+
90118
@pytest.fixture
91119
def requires_openai(request):
92120
"""

0 commit comments

Comments
 (0)