@@ -71,6 +71,7 @@ def __init__(
71
71
model_name : OllamaModelName ,
72
72
* ,
73
73
base_url : str | None = 'http://localhost:11434/v1/' ,
74
+ api_key : str = 'ollama' ,
74
75
openai_client : AsyncOpenAI | None = None ,
75
76
http_client : AsyncHTTPClient | None = None ,
76
77
):
@@ -83,6 +84,8 @@ def __init__(
83
84
model_name: The name of the Ollama model to use. List of models available [here](https://ollama.com/library)
84
85
You must first download the model (`ollama pull <MODEL-NAME>`) in order to use the model
85
86
base_url: The base url for the ollama requests. The default value is the ollama default
87
+ api_key: The API key to use for authentication. Defaults to 'ollama' for local instances,
88
+ but can be customized for proxy setups that require authentication
86
89
openai_client: An existing
87
90
[`AsyncOpenAI`](https://github.com/openai/openai-python?tab=readme-ov-file#async-usage)
88
91
client to use, if provided, `base_url` and `http_client` must be `None`.
@@ -96,7 +99,7 @@ def __init__(
96
99
else :
97
100
# API key is not required for ollama but a value is required to create the client
98
101
http_client_ = http_client or cached_async_http_client ()
99
- oai_client = AsyncOpenAI (base_url = base_url , api_key = 'ollama' , http_client = http_client_ )
102
+ oai_client = AsyncOpenAI (base_url = base_url , api_key = api_key , http_client = http_client_ )
100
103
self .openai_model = OpenAIModel (model_name = model_name , openai_client = oai_client )
101
104
102
105
async def agent_model (
0 commit comments