@@ -22,11 +22,16 @@ const DEEPSEEK_DEFAULT_MODEL: &str = "deepseek-chat";
2222const ANTHROPIC_API_ENDPOINT : & str = "https://api.anthropic.com/v1" ;
2323const ANTHROPIC_DEFAULT_MODEL : & str = "claude-3-5-sonnet-latest" ;
2424
25+ // Ollama
26+ const OLLAMA_DEFAULT_API_ENDPOINT : & str = "http://localhost:11434/v1" ;
27+ const OLLAMA_DEFAULT_MODEL : & str = "llama3" ;
28+
2529#[ derive( Debug , Clone , Deserialize ) ]
2630pub struct AiConfig {
2731 pub openai : Option < ModelConfig > ,
2832 pub anthropic : Option < ModelConfig > ,
2933 pub deepseek : Option < ModelConfig > ,
34+ pub ollama : Option < ModelConfig > ,
3035}
3136
3237impl Default for AiConfig {
@@ -47,6 +52,17 @@ impl Default for AiConfig {
4752 api_endpoint : Some ( DEEPSEEK_API_ENDPOINT . into ( ) ) ,
4853 model : Some ( DEEPSEEK_DEFAULT_MODEL . into ( ) ) ,
4954 } ) ,
55+ ollama : env:: var ( "OLLAMA_API_ENDPOINT" )
56+ . ok ( )
57+ . map ( |endpoint| ModelConfig {
58+ api_key : EnvString ( String :: default ( ) ) , // Ollama does not require an API key
59+ api_endpoint : endpoint
60+ . parse ( )
61+ . ok ( )
62+ . map ( |url : String | url. into ( ) )
63+ . or ( Some ( OLLAMA_DEFAULT_API_ENDPOINT . into ( ) ) ) ,
64+ model : Some ( OLLAMA_DEFAULT_MODEL . into ( ) ) ,
65+ } ) ,
5066 }
5167 }
5268}
@@ -64,9 +80,7 @@ impl Default for ModelConfig {
6480 #[ cfg( feature = "ai_test" ) ]
6581 api_key : "" . into ( ) ,
6682 #[ cfg( not( feature = "ai_test" ) ) ]
67- api_key : env:: var ( "OPENAI_API_KEY" )
68- . expect ( "Expect `OPENAI_API_KEY` environment variable." )
69- . into ( ) ,
83+ api_key : EnvString ( env:: var ( "OPENAI_API_KEY" ) . unwrap_or_default ( ) ) ,
7084 api_endpoint : Some ( OPENAI_API_ENDPOINT . into ( ) ) ,
7185 model : Some ( OPENAI_DEFAULT_MODEL . into ( ) ) ,
7286 }
@@ -78,6 +92,12 @@ impl AiConfig {
7892 & self ,
7993 model_name : Option < String > ,
8094 ) -> Result < ModelConfig , String > {
95+ if let Some ( ollama) = self . ollama . as_ref ( ) {
96+ let model = model_name. as_deref ( ) . unwrap_or ( OLLAMA_DEFAULT_MODEL ) ;
97+ let mut config = ollama. clone ( ) ;
98+ config. model = Some ( EnvString ( model. to_string ( ) ) ) ;
99+ return Ok ( config) ;
100+ }
81101 if let Some ( model) = model_name {
82102 match model {
83103 m if m. starts_with ( "gpt" ) => {
@@ -121,6 +141,14 @@ impl AiConfig {
121141 }
122142 m => Err ( format ! ( "Unsupported model '{m}'." ) ) ,
123143 }
144+ } else if let Some ( ollama) = self . ollama . as_ref ( ) {
145+ if let Some ( model) = model_name {
146+ let mut config = ollama. clone ( ) ;
147+ config. model = Some ( EnvString ( model) ) ;
148+ return Ok ( config) ;
149+ } else {
150+ return Ok ( ollama. clone ( ) ) ;
151+ }
124152 } else {
125153 // Default is OpenAI model
126154 Ok ( ModelConfig :: default ( ) )
0 commit comments