Skip to content

Commit eed96f5

Browse files
authored
Merge pull request #58 from jpmcb/openai-api-rs-2.0.0
feat: Upgrade to openai-api-rs 2.0.0
2 parents d454958 + 2d8b8ce commit eed96f5

File tree

4 files changed

+11
-23
lines changed

4 files changed

+11
-23
lines changed

Cargo.lock

Lines changed: 2 additions & 2 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

Cargo.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ rayon = "1"
1818
reqwest = { version = "0.11", default-features = false, features = ["rustls-tls", "json"] }
1919
serde = "1"
2020
tokenizers = "0.14"
21-
openai-api-rs = "1.0"
21+
openai-api-rs = "2.0"
2222
zip = "0.6"
2323
rust-fuzzy-search = "0.1"
2424
text-splitter = "0.4"

src/conversation/mod.rs

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -93,7 +93,8 @@ impl<D: RepositoryEmbeddingsDB, M: EmbeddingsModel> Conversation<D, M> {
9393
#[allow(unused_labels)]
9494
'conversation: loop {
9595
//Generate a request with the message history and functions
96-
let request = generate_completion_request(self.messages.clone(), FunctionCallType::Auto);
96+
let request =
97+
generate_completion_request(self.messages.clone(), FunctionCallType::Auto);
9798

9899
match self.send_request(request) {
99100
Ok(response) => {

src/conversation/prompts.rs

Lines changed: 6 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
use openai_api_rs::v1::chat_completion::{
2-
ChatCompletionMessage, ChatCompletionRequest, Function as F, FunctionParameters,
3-
JSONSchemaDefine, JSONSchemaType, FunctionCallType,
2+
ChatCompletionMessage, ChatCompletionRequest, Function as F, FunctionCallType,
3+
FunctionParameters, JSONSchemaDefine, JSONSchemaType,
44
};
55
use std::collections::HashMap;
66

@@ -17,23 +17,10 @@ pub fn generate_completion_request(
1717
messages: Vec<ChatCompletionMessage>,
1818
function_call: FunctionCallType,
1919
) -> ChatCompletionRequest {
20-
21-
ChatCompletionRequest {
22-
model: CHAT_COMPLETION_MODEL.into(),
23-
messages,
24-
functions: Some(functions()),
25-
function_call: Some(function_call),
26-
temperature: Some(CHAT_COMPLETION_TEMPERATURE),
27-
top_p: None,
28-
n: None,
29-
stream: None,
30-
stop: None,
31-
max_tokens: None,
32-
presence_penalty: None,
33-
frequency_penalty: None,
34-
logit_bias: None,
35-
user: None,
36-
}
20+
ChatCompletionRequest::new(CHAT_COMPLETION_MODEL.to_string(), messages)
21+
.functions(functions())
22+
.function_call(function_call)
23+
.temperature(CHAT_COMPLETION_TEMPERATURE)
3724
}
3825

3926
pub fn functions() -> Vec<F> {

0 commit comments

Comments
 (0)