Skip to content

Commit 2289879

Browse files
authored
Add Ollama API Key Configuration Support (#566)
1 parent 06580b0 commit 2289879

File tree

1 file changed

+4
-1
lines changed

1 file changed

+4
-1
lines changed

pydantic_ai_slim/pydantic_ai/models/ollama.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -71,6 +71,7 @@ def __init__(
7171
model_name: OllamaModelName,
7272
*,
7373
base_url: str | None = 'http://localhost:11434/v1/',
74+
api_key: str = 'ollama',
7475
openai_client: AsyncOpenAI | None = None,
7576
http_client: AsyncHTTPClient | None = None,
7677
):
@@ -83,6 +84,8 @@ def __init__(
8384
model_name: The name of the Ollama model to use. List of models available [here](https://ollama.com/library)
8485
You must first download the model (`ollama pull <MODEL-NAME>`) in order to use the model
8586
base_url: The base url for the ollama requests. The default value is the ollama default
87+
api_key: The API key to use for authentication. Defaults to 'ollama' for local instances,
88+
but can be customized for proxy setups that require authentication
8689
openai_client: An existing
8790
[`AsyncOpenAI`](https://github.com/openai/openai-python?tab=readme-ov-file#async-usage)
8891
client to use, if provided, `base_url` and `http_client` must be `None`.
@@ -96,7 +99,7 @@ def __init__(
9699
else:
97100
# API key is not required for ollama but a value is required to create the client
98101
http_client_ = http_client or cached_async_http_client()
99-
oai_client = AsyncOpenAI(base_url=base_url, api_key='ollama', http_client=http_client_)
102+
oai_client = AsyncOpenAI(base_url=base_url, api_key=api_key, http_client=http_client_)
100103
self.openai_model = OpenAIModel(model_name=model_name, openai_client=oai_client)
101104

102105
async def agent_model(

0 commit comments

Comments
 (0)