Skip to content

Commit ed1ef78

Browse files
Uchiha007Folyd
authored andcommitted
Support deepseek
1 parent 608e446 commit ed1ef78

File tree

4 files changed

+47
-14
lines changed

4 files changed

+47
-14
lines changed

README.md

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -53,8 +53,15 @@ AIScript isn't just a language—it's a complete web development solution:
5353
## How AIScript works
5454

5555
```javascript
56+
// use OpenAI
5657
$ export OPENAI_API_KEY=<your-openai-api-key>
5758

59+
// or use Deepseek
60+
$ export DEEPSEEK_API_KEY=<your-deepseek-api-key>*/
61+
62+
// optional, default is https://api.deepseek.com
63+
$ export DEEPSEEK_API_ENDPOINT=<your-deepseek-apit-endpoint>
64+
5865
$ cat web.ai
5966
get / {
6067
"""An api to ask LLM"""

aiscript-vm/src/ai/agent.rs

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,6 @@ use openai_api_rs::v1::{
88
ChatCompletionMessage, ChatCompletionMessageForResponse, ChatCompletionRequest, Content,
99
MessageRole, Tool, ToolCall, ToolChoiceType, ToolType,
1010
},
11-
common::GPT3_5_TURBO,
1211
types::{self, FunctionParameters, JSONSchemaDefine},
1312
};
1413
use tokio::runtime::Handle;
@@ -278,6 +277,8 @@ pub async fn _run_agent<'gc>(
278277
mut agent: Gc<'gc, Agent<'gc>>,
279278
args: Vec<Value<'gc>>,
280279
) -> Value<'gc> {
280+
use super::default_model;
281+
281282
let message = args[0];
282283
let debug = args[1].as_boolean();
283284
let mut history = Vec::new();
@@ -292,7 +293,7 @@ pub async fn _run_agent<'gc>(
292293
loop {
293294
let mut messages = vec![agent.get_instruction_message()];
294295
messages.extend(history.clone());
295-
let mut req = ChatCompletionRequest::new(GPT3_5_TURBO.to_string(), messages);
296+
let mut req = ChatCompletionRequest::new(default_model().to_string(), messages);
296297
let tools = agent.get_tools();
297298
if !tools.is_empty() {
298299
req = req

aiscript-vm/src/ai/mod.rs

Lines changed: 32 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ mod prompt;
44
use std::env;
55

66
pub use agent::{Agent, run_agent};
7-
use openai_api_rs::v1::api::OpenAIClient;
7+
use openai_api_rs::v1::{api::OpenAIClient, common::GPT3_5_TURBO};
88
pub use prompt::{PromptConfig, prompt_with_config};
99

1010
use serde::Deserialize;
@@ -22,10 +22,37 @@ pub struct ModelConfig {
2222
pub model: Option<String>,
2323
}
2424

25+
// deepseek-chat
26+
const DEEPSEEK_CHAT: &str = "deepseek-chat";
27+
28+
/// We use OPENAI_API_KEY as default,
29+
/// buf if don't have OPENAI_API_KEY, we use DEEPSEEK_API_KEY and DEEPSEEK_API_ENDPOINT both
2530
#[allow(unused)]
2631
pub(crate) fn openai_client() -> OpenAIClient {
27-
OpenAIClient::builder()
28-
.with_api_key(env::var("OPENAI_API_KEY").unwrap().to_string())
29-
.build()
30-
.unwrap()
32+
if let Ok(api_key) = env::var("OPENAI_API_KEY") {
33+
return OpenAIClient::builder()
34+
.with_api_key(api_key)
35+
.build()
36+
.unwrap();
37+
}
38+
if let Ok(api_key) = env::var("DEEPSEEK_API_KEY") {
39+
let api_endpoint =
40+
env::var("DEEPSEEK_API_ENDPOINT").unwrap_or("https://api.deepseek.com".to_string());
41+
return OpenAIClient::builder()
42+
.with_api_key(api_key)
43+
.with_endpoint(api_endpoint)
44+
.build()
45+
.unwrap();
46+
}
47+
panic!("No API key or endpoint found.");
48+
}
49+
50+
pub(crate) fn default_model() -> &'static str {
51+
if env::var("OPENAI_API_KEY").is_ok() {
52+
return GPT3_5_TURBO;
53+
} else if env::var("DEEPSEEK_API_KEY").is_ok() {
54+
return DEEPSEEK_CHAT;
55+
} else {
56+
panic!("No API key found.");
57+
}
3158
}

aiscript-vm/src/ai/prompt.rs

Lines changed: 5 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
1-
use openai_api_rs::v1::common::GPT3_5_TURBO;
21
use tokio::runtime::Handle;
32

3+
use super::default_model;
4+
45
pub struct PromptConfig {
56
pub input: String,
67
pub model: Option<String>,
@@ -13,7 +14,7 @@ impl Default for PromptConfig {
1314
fn default() -> Self {
1415
Self {
1516
input: String::new(),
16-
model: Some(GPT3_5_TURBO.to_string()),
17+
model: Some(default_model().to_string()),
1718
max_tokens: Default::default(),
1819
temperature: Default::default(),
1920
system_prompt: Default::default(),
@@ -28,10 +29,7 @@ async fn _prompt_with_config(config: PromptConfig) -> String {
2829

2930
#[cfg(not(feature = "ai_test"))]
3031
async fn _prompt_with_config(mut config: PromptConfig) -> String {
31-
use openai_api_rs::v1::{
32-
chat_completion::{self, ChatCompletionRequest},
33-
common::GPT3_5_TURBO,
34-
};
32+
use openai_api_rs::v1::chat_completion::{self, ChatCompletionRequest};
3533

3634
let mut client = super::openai_client();
3735

@@ -61,7 +59,7 @@ async fn _prompt_with_config(mut config: PromptConfig) -> String {
6159
config
6260
.model
6361
.take()
64-
.unwrap_or_else(|| GPT3_5_TURBO.to_string()),
62+
.unwrap_or_else(|| default_model().to_string()),
6563
messages,
6664
);
6765

0 commit comments

Comments
 (0)