Skip to content

Commit 8d816ef

Browse files
committed
Support deepseek
1 parent 7b74dde commit 8d816ef

File tree

4 files changed

+47
-14
lines changed

4 files changed

+47
-14
lines changed

README.md

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -53,8 +53,15 @@ AIScript isn't just a language—it's a complete web development solution:
5353
## How AIScript works
5454

5555
```javascript
56+
// use OpenAI
5657
$ export OPENAI_API_KEY=<your-openai-api-key>
5758

59+
// or use Deepseek
60+
$ export DEEPSEEK_API_KEY=<your-deepseek-api-key>*/
61+
62+
// optional, default is https://api.deepseek.com
63+
$ export DEEPSEEK_API_ENDPOINT=<your-deepseek-apit-endpoint>
64+
5865
$ cat web.ai
5966
get / {
6067
"""An api to ask LLM"""

aiscript-vm/src/ai/agent.rs

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,6 @@ use openai_api_rs::v1::{
88
ChatCompletionMessage, ChatCompletionMessageForResponse, ChatCompletionRequest, Content,
99
MessageRole, Tool, ToolCall, ToolChoiceType, ToolType,
1010
},
11-
common::GPT3_5_TURBO,
1211
types::{self, FunctionParameters, JSONSchemaDefine},
1312
};
1413
use tokio::runtime::Handle;
@@ -278,6 +277,8 @@ pub async fn _run_agent<'gc>(
278277
mut agent: Gc<'gc, Agent<'gc>>,
279278
args: Vec<Value<'gc>>,
280279
) -> Value<'gc> {
280+
use super::default_model;
281+
281282
let message = args[0];
282283
let debug = args[1].as_boolean();
283284
let mut history = Vec::new();
@@ -292,7 +293,7 @@ pub async fn _run_agent<'gc>(
292293
loop {
293294
let mut messages = vec![agent.get_instruction_message()];
294295
messages.extend(history.clone());
295-
let mut req = ChatCompletionRequest::new(GPT3_5_TURBO.to_string(), messages);
296+
let mut req = ChatCompletionRequest::new(default_model().to_string(), messages);
296297
let tools = agent.get_tools();
297298
if !tools.is_empty() {
298299
req = req

aiscript-vm/src/ai/mod.rs

Lines changed: 32 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -4,13 +4,40 @@ mod prompt;
44
use std::env;
55

66
pub use agent::{Agent, run_agent};
7-
use openai_api_rs::v1::api::OpenAIClient;
7+
use openai_api_rs::v1::{api::OpenAIClient, common::GPT3_5_TURBO};
88
pub use prompt::{PromptConfig, prompt_with_config};
99

10+
// deepseek-chat
11+
const DEEPSEEK_CHAT: &str = "deepseek-chat";
12+
13+
/// We use OPENAI_API_KEY as default,
14+
/// buf if don't have OPENAI_API_KEY, we use DEEPSEEK_API_KEY and DEEPSEEK_API_ENDPOINT both
1015
#[allow(unused)]
1116
pub(crate) fn openai_client() -> OpenAIClient {
12-
OpenAIClient::builder()
13-
.with_api_key(env::var("OPENAI_API_KEY").unwrap().to_string())
14-
.build()
15-
.unwrap()
17+
if let Ok(api_key) = env::var("OPENAI_API_KEY") {
18+
return OpenAIClient::builder()
19+
.with_api_key(api_key)
20+
.build()
21+
.unwrap();
22+
}
23+
if let Ok(api_key) = env::var("DEEPSEEK_API_KEY") {
24+
let api_endpoint =
25+
env::var("DEEPSEEK_API_ENDPOINT").unwrap_or("https://api.deepseek.com".to_string());
26+
return OpenAIClient::builder()
27+
.with_api_key(api_key)
28+
.with_endpoint(api_endpoint)
29+
.build()
30+
.unwrap();
31+
}
32+
panic!("No API key or endpoint found.");
33+
}
34+
35+
pub(crate) fn default_model() -> &'static str {
36+
if env::var("OPENAI_API_KEY").is_ok() {
37+
return GPT3_5_TURBO;
38+
} else if env::var("DEEPSEEK_API_KEY").is_ok() {
39+
return DEEPSEEK_CHAT;
40+
} else {
41+
panic!("No API key found.");
42+
}
1643
}

aiscript-vm/src/ai/prompt.rs

Lines changed: 5 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
1-
use openai_api_rs::v1::common::GPT3_5_TURBO;
21
use tokio::runtime::Handle;
32

3+
use super::default_model;
4+
45
pub struct PromptConfig {
56
pub input: String,
67
pub model: Option<String>,
@@ -13,7 +14,7 @@ impl Default for PromptConfig {
1314
fn default() -> Self {
1415
Self {
1516
input: String::new(),
16-
model: Some(GPT3_5_TURBO.to_string()),
17+
model: Some(default_model().to_string()),
1718
max_tokens: Default::default(),
1819
temperature: Default::default(),
1920
system_prompt: Default::default(),
@@ -28,10 +29,7 @@ async fn _prompt_with_config(config: PromptConfig) -> String {
2829

2930
#[cfg(not(feature = "ai_test"))]
3031
async fn _prompt_with_config(mut config: PromptConfig) -> String {
31-
use openai_api_rs::v1::{
32-
chat_completion::{self, ChatCompletionRequest},
33-
common::GPT3_5_TURBO,
34-
};
32+
use openai_api_rs::v1::chat_completion::{self, ChatCompletionRequest};
3533

3634
let mut client = super::openai_client();
3735

@@ -61,7 +59,7 @@ async fn _prompt_with_config(mut config: PromptConfig) -> String {
6159
config
6260
.model
6361
.take()
64-
.unwrap_or_else(|| GPT3_5_TURBO.to_string()),
62+
.unwrap_or_else(|| default_model().to_string()),
6563
messages,
6664
);
6765

0 commit comments

Comments
 (0)