Skip to content

Commit 73bc7f8

Browse files
committed
feat: suppot local models integrate with ollama
1 parent f20e08f commit 73bc7f8

File tree

4 files changed

+61
-4
lines changed

4 files changed

+61
-4
lines changed

README.md

Lines changed: 21 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -117,9 +117,10 @@ Check out the [examples](./examples) directory for more sample code.
117117

118118
AIScript supports the following AI models:
119119

120-
- [x] OpenAI ((uses `OPENAI_API_KEY` environment variable by default))
120+
- [x] OpenAI (uses `OPENAI_API_KEY` environment variable by default or local models with Ollama)
121121
- [x] DeepSeek
122122
- [x] Anthropic
123+
- [x] Ollama (100+ local models from various providers)
123124

124125
Configuration by `project.toml`:
125126

@@ -138,8 +139,27 @@ model = "deepseek-chat"
138139
[ai.anthropic]
139140
api_key = "YOUR_API_KEY"
140141
model = "claude-3-5-sonnet-latest"
142+
143+
# or use Ollama (local models)
144+
[ai.ollama]
145+
api_endpoint = "http://localhost:11434/v1" # Default Ollama endpoint
146+
model = "llama3.2" # or any other model installed in your Ollama instance
141147
```
142148

149+
### Using Ollama
150+
151+
[Ollama](https://ollama.ai/) allows you to run local AI models on your own hardware. To use Ollama with AIScript:
152+
153+
1. Install Ollama from [ollama.ai](https://ollama.ai/)
154+
2. Pull your desired models (e.g., `ollama pull llama3.2`)
155+
3. Make sure Ollama is running locally
156+
4. Configure AIScript to use Ollama as shown above or by setting the `OLLAMA_API_ENDPOINT` environment variable.
157+
158+
Ollama provides access to 100+ models ranging from small 135M parameter models to massive 671B parameter models, including:
159+
- Llama family (llama4, llama3.2, codellama)
160+
- DeepSeek models (deepseek-r1, deepseek-v3)
161+
- And [many more specialized models](https://ollama.com/search)
162+
143163
## Roadmap
144164

145165
See our [roadmap](https://aiscript.dev/guide/contribution/roadmap) for upcoming features and improvements.

aiscript-vm/src/ai/mod.rs

Lines changed: 31 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -22,11 +22,16 @@ const DEEPSEEK_DEFAULT_MODEL: &str = "deepseek-chat";
2222
const ANTHROPIC_API_ENDPOINT: &str = "https://api.anthropic.com/v1";
2323
const ANTHROPIC_DEFAULT_MODEL: &str = "claude-3-5-sonnet-latest";
2424

25+
// Ollama
26+
const OLLAMA_DEFAULT_API_ENDPOINT: &str = "http://localhost:11434/v1";
27+
const OLLAMA_DEFAULT_MODEL: &str = "llama3";
28+
2529
#[derive(Debug, Clone, Deserialize)]
2630
pub struct AiConfig {
2731
pub openai: Option<ModelConfig>,
2832
pub anthropic: Option<ModelConfig>,
2933
pub deepseek: Option<ModelConfig>,
34+
pub ollama: Option<ModelConfig>,
3035
}
3136

3237
impl Default for AiConfig {
@@ -47,6 +52,17 @@ impl Default for AiConfig {
4752
api_endpoint: Some(DEEPSEEK_API_ENDPOINT.into()),
4853
model: Some(DEEPSEEK_DEFAULT_MODEL.into()),
4954
}),
55+
ollama: env::var("OLLAMA_API_ENDPOINT")
56+
.ok()
57+
.map(|endpoint| ModelConfig {
58+
api_key: EnvString(String::default()), // Ollama does not require an API key
59+
api_endpoint: endpoint
60+
.parse()
61+
.ok()
62+
.map(|url: String| url.into())
63+
.or(Some(OLLAMA_DEFAULT_API_ENDPOINT.into())),
64+
model: Some(OLLAMA_DEFAULT_MODEL.into()),
65+
}),
5066
}
5167
}
5268
}
@@ -64,9 +80,7 @@ impl Default for ModelConfig {
6480
#[cfg(feature = "ai_test")]
6581
api_key: "".into(),
6682
#[cfg(not(feature = "ai_test"))]
67-
api_key: env::var("OPENAI_API_KEY")
68-
.expect("Expect `OPENAI_API_KEY` environment variable.")
69-
.into(),
83+
api_key: EnvString(env::var("OPENAI_API_KEY").unwrap_or_default()),
7084
api_endpoint: Some(OPENAI_API_ENDPOINT.into()),
7185
model: Some(OPENAI_DEFAULT_MODEL.into()),
7286
}
@@ -78,6 +92,12 @@ impl AiConfig {
7892
&self,
7993
model_name: Option<String>,
8094
) -> Result<ModelConfig, String> {
95+
if let Some(ollama) = self.ollama.as_ref() {
96+
let model = model_name.as_deref().unwrap_or(OLLAMA_DEFAULT_MODEL);
97+
let mut config = ollama.clone();
98+
config.model = Some(EnvString(model.to_string()));
99+
return Ok(config);
100+
}
81101
if let Some(model) = model_name {
82102
match model {
83103
m if m.starts_with("gpt") => {
@@ -121,6 +141,14 @@ impl AiConfig {
121141
}
122142
m => Err(format!("Unsupported model '{m}'.")),
123143
}
144+
} else if let Some(ollama) = self.ollama.as_ref() {
145+
if let Some(model) = model_name {
146+
let mut config = ollama.clone();
147+
config.model = Some(EnvString(model));
148+
return Ok(config);
149+
} else {
150+
return Ok(ollama.clone());
151+
}
124152
} else {
125153
// Default is OpenAI model
126154
Ok(ModelConfig::default())

examples/ollama.ai

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
let a = prompt {
2+
input: "What is rust?",
3+
model: "llama3.2"
4+
};
5+
print(a);

examples/project.toml

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -27,3 +27,7 @@ scopes = ["email"]
2727
[ai.anthropic]
2828
api_key = "$CLAUDE_API_KEY"
2929
model = "claude-3-5-sonnet-latest"
30+
31+
[ai.ollama]
32+
api_endpoint = "http://localhost:11434/v1"
33+
model = "llama3.2"

0 commit comments

Comments
 (0)