Skip to content

Commit 7a3c52e

Browse files
authored
feat: support VertexAIAnthropic Provider (#776)
1 parent 699dd54 commit 7a3c52e

File tree

10 files changed

+326
-81
lines changed

10 files changed

+326
-81
lines changed

pyproject.toml

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ dependencies = [
1818
[project.optional-dependencies]
1919

2020
all = [
21-
"any-llm-sdk[mistral,anthropic,huggingface,gemini,vertexai,cohere,cerebras,fireworks,groq,bedrock,azure,azureopenai,watsonx,together,sambanova,ollama,moonshot,nebius,xai,databricks,deepseek,inception,openai,openrouter,portkey,lmstudio,llama,voyage,perplexity,platform,llamafile,llamacpp,sagemaker,gateway,zai,minimax,vllm]"
21+
"any-llm-sdk[mistral,anthropic,huggingface,gemini,vertexai,vertexaianthropic,cohere,cerebras,fireworks,groq,bedrock,azure,azureopenai,watsonx,together,sambanova,ollama,moonshot,nebius,xai,databricks,deepseek,inception,openai,openrouter,portkey,lmstudio,llama,voyage,perplexity,platform,llamafile,llamacpp,sagemaker,gateway,zai,minimax,vllm]"
2222
]
2323

2424
platform = [
@@ -43,6 +43,10 @@ vertexai = [
4343
"google-genai",
4444
]
4545

46+
vertexaianthropic = [
47+
"anthropic[vertex]",
48+
]
49+
4650
huggingface = [
4751
"huggingface-hub",
4852
]

src/any_llm/constants.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -45,6 +45,7 @@ class LLMProvider(StrEnum):
4545
SAGEMAKER = "sagemaker"
4646
TOGETHER = "together"
4747
VERTEXAI = "vertexai"
48+
VERTEXAIANTHROPIC = "vertexaianthropic"
4849
VLLM = "vllm"
4950
VOYAGE = "voyage"
5051
WATSONX = "watsonx"
Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
11
from .anthropic import AnthropicProvider
2+
from .base import BaseAnthropicProvider
23

3-
__all__ = ["AnthropicProvider"]
4+
__all__ = ["AnthropicProvider", "BaseAnthropicProvider"]

src/any_llm/providers/anthropic/anthropic.py

Lines changed: 3 additions & 78 deletions
Original file line numberDiff line numberDiff line change
@@ -2,32 +2,21 @@
22

33
from typing import TYPE_CHECKING, Any
44

5-
from any_llm.any_llm import AnyLLM
5+
from .base import BaseAnthropicProvider
66

77
MISSING_PACKAGES_ERROR = None
88
try:
99
from anthropic import AsyncAnthropic
10-
11-
from .utils import (
12-
_convert_models_list,
13-
_convert_params,
14-
_convert_response,
15-
_create_openai_chunk_from_anthropic_chunk,
16-
)
1710
except ImportError as e:
1811
MISSING_PACKAGES_ERROR = e
1912

2013
if TYPE_CHECKING:
21-
from collections.abc import AsyncIterator, Sequence
22-
23-
from anthropic.types import Message
24-
from anthropic.types.model_info import ModelInfo as AnthropicModelInfo
14+
from collections.abc import Sequence
2515

26-
from any_llm.types.completion import ChatCompletion, ChatCompletionChunk, CompletionParams, CreateEmbeddingResponse
2716
from any_llm.types.model import Model
2817

2918

30-
class AnthropicProvider(AnyLLM):
19+
class AnthropicProvider(BaseAnthropicProvider):
3120
"""
3221
Anthropic Provider using enhanced Provider framework.
3322
@@ -38,15 +27,7 @@ class AnthropicProvider(AnyLLM):
3827
ENV_API_KEY_NAME = "ANTHROPIC_API_KEY"
3928
PROVIDER_DOCUMENTATION_URL = "https://docs.anthropic.com/en/home"
4029

41-
SUPPORTS_COMPLETION_STREAMING = True
42-
SUPPORTS_COMPLETION = True
43-
SUPPORTS_RESPONSES = False
44-
SUPPORTS_COMPLETION_REASONING = True
45-
SUPPORTS_COMPLETION_IMAGE = True
46-
SUPPORTS_COMPLETION_PDF = False
47-
SUPPORTS_EMBEDDING = False
4830
SUPPORTS_LIST_MODELS = True
49-
SUPPORTS_BATCH = False
5031

5132
MISSING_PACKAGES_ERROR = MISSING_PACKAGES_ERROR
5233

@@ -59,62 +40,6 @@ def _init_client(self, api_key: str | None = None, api_base: str | None = None,
5940
**kwargs,
6041
)
6142

62-
@staticmethod
63-
def _convert_completion_params(params: CompletionParams, **kwargs: Any) -> dict[str, Any]:
64-
"""Convert CompletionParams to kwargs for Anthropic API."""
65-
return _convert_params(params, **kwargs)
66-
67-
@staticmethod
68-
def _convert_completion_response(response: Message) -> ChatCompletion:
69-
"""Convert Anthropic Message to OpenAI ChatCompletion format."""
70-
return _convert_response(response)
71-
72-
@staticmethod
73-
def _convert_completion_chunk_response(response: Any, **kwargs: Any) -> ChatCompletionChunk:
74-
"""Convert Anthropic streaming chunk to OpenAI ChatCompletionChunk format."""
75-
model_id = kwargs.get("model_id", "unknown")
76-
return _create_openai_chunk_from_anthropic_chunk(response, model_id)
77-
78-
@staticmethod
79-
def _convert_embedding_params(params: Any, **kwargs: Any) -> dict[str, Any]:
80-
"""Anthropic does not support embeddings."""
81-
msg = "Anthropic does not support embeddings"
82-
raise NotImplementedError(msg)
83-
84-
@staticmethod
85-
def _convert_embedding_response(response: Any) -> CreateEmbeddingResponse:
86-
"""Anthropic does not support embeddings."""
87-
msg = "Anthropic does not support embeddings"
88-
raise NotImplementedError(msg)
89-
90-
@staticmethod
91-
def _convert_list_models_response(response: list[AnthropicModelInfo]) -> Sequence[Model]:
92-
"""Convert Anthropic models list to OpenAI format."""
93-
return _convert_models_list(response)
94-
95-
async def _stream_completion_async(self, **kwargs: Any) -> AsyncIterator[ChatCompletionChunk]:
96-
"""Handle streaming completion - extracted to avoid generator issues."""
97-
async with self.client.messages.stream(
98-
**kwargs,
99-
) as anthropic_stream:
100-
async for event in anthropic_stream:
101-
yield self._convert_completion_chunk_response(event, model_id=kwargs.get("model", "unknown"))
102-
103-
async def _acompletion(
104-
self,
105-
params: CompletionParams,
106-
**kwargs: Any,
107-
) -> ChatCompletion | AsyncIterator[ChatCompletionChunk]:
108-
kwargs["provider_name"] = self.PROVIDER_NAME
109-
converted_kwargs = self._convert_completion_params(params, **kwargs)
110-
111-
if converted_kwargs.pop("stream", False):
112-
return self._stream_completion_async(**converted_kwargs)
113-
114-
message = await self.client.messages.create(**converted_kwargs)
115-
116-
return self._convert_completion_response(message)
117-
11843
async def _alist_models(self, **kwargs: Any) -> Sequence[Model]:
11944
models_list = await self.client.models.list(**kwargs)
12045
return self._convert_list_models_response(models_list.data)
Lines changed: 111 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,111 @@
1+
from __future__ import annotations
2+
3+
from abc import ABC, abstractmethod
4+
from typing import TYPE_CHECKING, Any
5+
6+
from any_llm.any_llm import AnyLLM
7+
8+
MISSING_PACKAGES_ERROR = None
9+
try:
10+
from anthropic import AsyncAnthropic
11+
12+
from .utils import (
13+
_convert_models_list,
14+
_convert_params,
15+
_convert_response,
16+
_create_openai_chunk_from_anthropic_chunk,
17+
)
18+
except ImportError as e:
19+
MISSING_PACKAGES_ERROR = e
20+
21+
if TYPE_CHECKING:
22+
from collections.abc import AsyncIterator, Sequence
23+
24+
from anthropic import AsyncAnthropic, AsyncAnthropicVertex
25+
from anthropic.types import Message
26+
from anthropic.types.model_info import ModelInfo as AnthropicModelInfo
27+
28+
from any_llm.types.completion import ChatCompletion, ChatCompletionChunk, CompletionParams, CreateEmbeddingResponse
29+
from any_llm.types.model import Model
30+
31+
32+
class BaseAnthropicProvider(AnyLLM, ABC):
33+
"""
34+
Base provider for Anthropic-compatible services.
35+
36+
This class provides a common foundation for providers that use Anthropic-compatible APIs.
37+
Subclasses need to override `_init_client()` for provider-specific client initialization.
38+
"""
39+
40+
SUPPORTS_COMPLETION_STREAMING = True
41+
SUPPORTS_COMPLETION = True
42+
SUPPORTS_RESPONSES = False
43+
SUPPORTS_COMPLETION_REASONING = True
44+
SUPPORTS_COMPLETION_IMAGE = True
45+
SUPPORTS_COMPLETION_PDF = False
46+
SUPPORTS_EMBEDDING = False
47+
SUPPORTS_LIST_MODELS = False
48+
SUPPORTS_BATCH = False
49+
50+
MISSING_PACKAGES_ERROR = MISSING_PACKAGES_ERROR
51+
52+
client: AsyncAnthropic | AsyncAnthropicVertex
53+
54+
@abstractmethod
55+
def _init_client(self, api_key: str | None = None, api_base: str | None = None, **kwargs: Any) -> None:
56+
msg = "Subclasses must implement this method"
57+
raise NotImplementedError(msg)
58+
59+
@staticmethod
60+
def _convert_completion_params(params: CompletionParams, **kwargs: Any) -> dict[str, Any]:
61+
"""Convert CompletionParams to kwargs for Anthropic API."""
62+
return _convert_params(params, **kwargs)
63+
64+
@staticmethod
65+
def _convert_completion_response(response: Message) -> ChatCompletion:
66+
"""Convert Anthropic Message to OpenAI ChatCompletion format."""
67+
return _convert_response(response)
68+
69+
@staticmethod
70+
def _convert_completion_chunk_response(response: Any, **kwargs: Any) -> ChatCompletionChunk:
71+
"""Convert Anthropic streaming chunk to OpenAI ChatCompletionChunk format."""
72+
model_id = kwargs.get("model_id", "unknown")
73+
return _create_openai_chunk_from_anthropic_chunk(response, model_id)
74+
75+
@staticmethod
76+
def _convert_embedding_params(params: Any, **kwargs: Any) -> dict[str, Any]:
77+
"""Anthropic does not support embeddings."""
78+
msg = "Anthropic does not support embeddings"
79+
raise NotImplementedError(msg)
80+
81+
@staticmethod
82+
def _convert_embedding_response(response: Any) -> CreateEmbeddingResponse:
83+
"""Anthropic does not support embeddings."""
84+
msg = "Anthropic does not support embeddings"
85+
raise NotImplementedError(msg)
86+
87+
@staticmethod
88+
def _convert_list_models_response(response: list[AnthropicModelInfo]) -> Sequence[Model]:
89+
"""Convert Anthropic models list to OpenAI format."""
90+
return _convert_models_list(response)
91+
92+
async def _stream_completion_async(self, **kwargs: Any) -> AsyncIterator[ChatCompletionChunk]:
93+
"""Handle streaming completion - extracted to avoid generator issues."""
94+
async with self.client.messages.stream(**kwargs) as anthropic_stream:
95+
async for event in anthropic_stream:
96+
yield self._convert_completion_chunk_response(event, model_id=kwargs.get("model", "unknown"))
97+
98+
async def _acompletion(
99+
self,
100+
params: CompletionParams,
101+
**kwargs: Any,
102+
) -> ChatCompletion | AsyncIterator[ChatCompletionChunk]:
103+
kwargs["provider_name"] = self.PROVIDER_NAME
104+
converted_kwargs = self._convert_completion_params(params, **kwargs)
105+
106+
if converted_kwargs.pop("stream", False):
107+
return self._stream_completion_async(**converted_kwargs)
108+
109+
message = await self.client.messages.create(**converted_kwargs)
110+
111+
return self._convert_completion_response(message)
Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
from .vertexaianthropic import VertexaianthropicProvider
2+
3+
__all__ = ["VertexaianthropicProvider"]
Lines changed: 54 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,54 @@
1+
from __future__ import annotations
2+
3+
import os
4+
from typing import Any
5+
6+
from any_llm.exceptions import MissingApiKeyError
7+
from any_llm.providers.anthropic.base import BaseAnthropicProvider
8+
9+
MISSING_PACKAGES_ERROR = None
10+
try:
11+
from anthropic import AsyncAnthropicVertex
12+
except ImportError as e:
13+
MISSING_PACKAGES_ERROR = e
14+
15+
16+
class VertexaianthropicProvider(BaseAnthropicProvider):
17+
"""
18+
VertexAI Anthropic Provider for Claude models through Google Cloud's Model Garden.
19+
20+
Uses Anthropic's `AsyncAnthropicVertex` client from `anthropic[vertex]` to access
21+
Claude models deployed on Google Cloud VertexAI.
22+
23+
Authentication is handled via Google Cloud Application Default Credentials (ADC),
24+
not an API key. Requires `GOOGLE_CLOUD_PROJECT` environment variable or `project_id`
25+
constructor argument.
26+
"""
27+
28+
PROVIDER_NAME = "vertexaianthropic"
29+
ENV_API_KEY_NAME = "" # VertexAI uses GCP ADC, not an API key
30+
PROVIDER_DOCUMENTATION_URL = "https://cloud.google.com/vertex-ai/generative-ai/docs/partner-models/use-claude"
31+
32+
SUPPORTS_LIST_MODELS = False
33+
34+
MISSING_PACKAGES_ERROR = MISSING_PACKAGES_ERROR
35+
36+
client: AsyncAnthropicVertex
37+
38+
def _verify_and_set_api_key(self, api_key: str | None = None) -> str | None:
39+
# VertexAI uses Google Cloud ADC, not an API key
40+
# We don't require an API key, but we do require project_id
41+
return api_key
42+
43+
def _init_client(self, api_key: str | None = None, api_base: str | None = None, **kwargs: Any) -> None:
44+
project_id = kwargs.pop("project_id", None) or os.getenv("GOOGLE_CLOUD_PROJECT")
45+
region = kwargs.pop("region", None) or os.getenv("GOOGLE_CLOUD_LOCATION", "us-central1")
46+
47+
if not project_id:
48+
raise MissingApiKeyError(self.PROVIDER_NAME, "GOOGLE_CLOUD_PROJECT")
49+
50+
self.client = AsyncAnthropicVertex(
51+
project_id=project_id,
52+
region=region,
53+
**kwargs,
54+
)

tests/constants.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@
1212

1313
# Providers that should never run in CI (only for local development)
1414
CI_EXCLUDED_PROVIDERS = [
15+
LLMProvider.VERTEXAIANTHROPIC,
1516
LLMProvider.VLLM,
1617
]
1718

tests/unit/providers/test_anthropic_provider.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
def mock_anthropic_provider(): # type: ignore[no-untyped-def]
1616
with (
1717
patch("any_llm.providers.anthropic.anthropic.AsyncAnthropic") as mock_anthropic,
18-
patch("any_llm.providers.anthropic.anthropic._convert_response"),
18+
patch("any_llm.providers.anthropic.base._convert_response"),
1919
):
2020
mock_client = Mock()
2121
mock_anthropic.return_value = mock_client

0 commit comments

Comments
 (0)