Skip to content

Commit 6eca2ff

Browse files
committed
Bump openapi-api-rs version
1 parent 83159d5 commit 6eca2ff

File tree

4 files changed

+13
-12
lines changed

4 files changed

+13
-12
lines changed

Cargo.lock

Lines changed: 3 additions & 2 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

aiscript-vm/Cargo.toml

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,9 +21,10 @@ aiscript-arena = { path = "../aiscript-arena", version = "0.1.0", features = [
2121
# "allocator-api2",
2222
# "hashbrown",
2323
# ] }
24+
# openai-api-rs = { git = "https://github.com/aiscriptdev/openai-api-rs.git", branch = "main" }
25+
openai-api-rs = "6.0"
2426
ahash = "0.8"
2527
hashbrown = { version = "0.14", features = ["raw"] }
26-
openai-api-rs = { git = "https://github.com/aiscriptdev/openai-api-rs.git", branch = "main" }
2728
num_enum = "0.7.3"
2829
serde = { version = "1.0", features = ["derive"] }
2930
serde_json = "1.0"

aiscript-vm/src/ai/agent.rs

Lines changed: 7 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -280,7 +280,6 @@ pub async fn _run_agent<'gc>(
280280
) -> Value<'gc> {
281281
let message = args[0];
282282
let debug = args[1].as_boolean();
283-
println!("debug: {debug}");
284283
let mut history = Vec::new();
285284
history.push(ChatCompletionMessage {
286285
role: MessageRole::user,
@@ -289,7 +288,7 @@ pub async fn _run_agent<'gc>(
289288
tool_calls: None,
290289
tool_call_id: None,
291290
});
292-
let client = super::openai_client();
291+
let mut client = super::openai_client();
293292
loop {
294293
let mut messages = vec![agent.get_instruction_message()];
295294
messages.extend(history.clone());
@@ -309,7 +308,7 @@ pub async fn _run_agent<'gc>(
309308
if debug {
310309
println!("Response: {}", serde_json::to_string(&response).unwrap());
311310
}
312-
history.push(convert_chat_response_message(response.clone()));
311+
history.push(convert_chat_response_message(response));
313312
if response.tool_calls.is_none() {
314313
return make_response_object(
315314
state,
@@ -359,12 +358,12 @@ impl From<PrimitiveType> for JSONSchemaType {
359358
}
360359

361360
#[cfg(not(feature = "ai_test"))]
362-
fn convert_chat_response_message(m: ChatCompletionMessageForResponse) -> ChatCompletionMessage {
361+
fn convert_chat_response_message(m: &ChatCompletionMessageForResponse) -> ChatCompletionMessage {
363362
ChatCompletionMessage {
364-
role: m.role,
365-
content: Content::Text(m.content.unwrap_or_default()),
366-
name: m.name,
367-
tool_calls: m.tool_calls,
363+
role: m.role.clone(),
364+
content: Content::Text(m.content.clone().unwrap_or_default()),
365+
name: m.name.clone(),
366+
tool_calls: m.tool_calls.clone(),
368367
tool_call_id: None,
369368
}
370369
}

aiscript-vm/src/ai/prompt.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ async fn _prompt_with_config(mut config: PromptConfig) -> String {
3333
common::GPT3_5_TURBO,
3434
};
3535

36-
let client = super::openai_client();
36+
let mut client = super::openai_client();
3737

3838
// Create system message if provided
3939
let mut messages = Vec::new();

0 commit comments

Comments
 (0)