Skip to content

Commit f0d336d

Browse files
committed
fix: handle OpenAI API error variants 🐛
- Improve error handling for various OpenAI API errors. - Enhance user feedback for network, JSON, and other API errors. - Refactor Result type for consistency across the `openai` module.
1 parent 70da054 commit f0d336d

File tree

2 files changed

+47
-26
lines changed

2 files changed

+47
-26
lines changed

src/main.rs

Lines changed: 40 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -9,13 +9,14 @@
99
* File Created: 2025-03-01 17:17:30
1010
*
1111
* Modified By: mingcheng ([email protected])
12-
* Last Modified: 2025-03-04 16:30:20
12+
* Last Modified: 2025-03-16 23:11:04
1313
*/
1414

1515
use aigitcommit::cli::Cli;
1616
use aigitcommit::openai::OpenAI;
1717
use aigitcommit::{git, openai};
1818
use arboard::Clipboard;
19+
use async_openai::error::OpenAIError;
1920
use async_openai::types::{
2021
ChatCompletionRequestSystemMessageArgs, ChatCompletionRequestUserMessageArgs,
2122
};
@@ -33,17 +34,17 @@ async fn main() -> std::result::Result<(), Box<dyn Error>> {
3334
let cli = Cli::parse();
3435

3536
// Initialize logging
36-
tracing_subscriber::fmt()
37-
.with_max_level(if cli.verbose {
38-
trace!("Verbose mode enabled, set the log level to TRACE. It will makes a little bit noise.");
39-
Level::TRACE
40-
} else {
41-
debug!("Verbose mode disabled, set the default log level to WARN");
42-
Level::WARN
43-
})
44-
.without_time()
45-
.with_target(false)
46-
.init();
37+
if cli.verbose {
38+
tracing_subscriber::fmt()
39+
.with_max_level(Level::TRACE)
40+
.without_time()
41+
.with_target(false)
42+
.init();
43+
44+
trace!(
45+
"Verbose mode enabled, set the log level to TRACE. It will makes a little bit noise."
46+
);
47+
}
4748

4849
// Check if the specified path is a valid directory
4950
let repo_dir = fs::canonicalize(&cli.repo_path)?;
@@ -67,11 +68,11 @@ async fn main() -> std::result::Result<(), Box<dyn Error>> {
6768
// Get the last 5 commit logs
6869
// if the repository has less than 5 commits, it will return all logs
6970
let logs = repository.get_logs(5)?;
70-
debug!("Got logs size is {}", logs.len());
71+
debug!("got logs size is {}", logs.len());
7172

7273
// If git commit log is empty, return error
7374
if logs.is_empty() {
74-
return Err("No commit logs found".into());
75+
return Err("no commit logs found".into());
7576
}
7677

7778
// Instantiate OpenAI client, ready to send requests to the OpenAI API
@@ -106,7 +107,24 @@ async fn main() -> std::result::Result<(), Box<dyn Error>> {
106107
];
107108

108109
// Send the request to OpenAI API and get the response
109-
let result = client.chat(&model_name.to_string(), messages).await?;
110+
let result = match client.chat(&model_name.to_string(), messages).await {
111+
Ok(s) => s,
112+
Err(e) => {
113+
let message = match e {
114+
OpenAIError::Reqwest(_) | OpenAIError::StreamError(_) => {
115+
"network request error".to_string()
116+
}
117+
OpenAIError::JSONDeserialize(_err) => "json deserialization error".to_string(),
118+
OpenAIError::InvalidArgument(_) => "invalid argument".to_string(),
119+
OpenAIError::FileSaveError(_) | OpenAIError::FileReadError(_) => {
120+
"io error".to_string()
121+
}
122+
OpenAIError::ApiError(e) => format!("api error {:?}", e),
123+
};
124+
125+
return Err(message.into());
126+
}
127+
};
110128

111129
trace!("write to stdout, and finish the process");
112130
writeln!(std::io::stdout(), "{}", result)?;
@@ -123,36 +141,36 @@ async fn main() -> std::result::Result<(), Box<dyn Error>> {
123141

124142
// directly commit the changes to the repository if the --commit option is enabled
125143
if cli.commit {
126-
trace!("Commit option is enabled, will commit the changes to the repository");
144+
trace!("commit option is enabled, will commit the changes to the repository");
127145
let mut confirm = Confirm::new();
128146
confirm
129-
.with_prompt("Do you want to commit the changes with the generated commit message?")
147+
.with_prompt("do you want to commit the changes with the generated commit message?")
130148
.default(false);
131149

132150
// Prompt the user for confirmation if --yes option is not enabled
133151
if cli.yes || confirm.interact()? {
134152
match repository.commit(&result) {
135153
Ok(_) => {
136-
writeln!(std::io::stdout(), "Commit successful!")?;
154+
writeln!(std::io::stdout(), "commit successful!")?;
137155
}
138156
Err(e) => {
139-
writeln!(std::io::stderr(), "Commit failed: {}", e)?;
157+
writeln!(std::io::stderr(), "commit failed: {}", e)?;
140158
}
141159
}
142160
}
143161
}
144162

145163
// If the --save option is enabled, save the commit message to a file
146164
if !cli.save.is_empty() {
147-
trace!("Save option is enabled, will save the commit message to a file");
165+
trace!("save option is enabled, will save the commit message to a file");
148166
let save_path = &cli.save;
149-
debug!("The save file path is {:?}", &save_path);
167+
debug!("the save file path is {:?}", &save_path);
150168

151169
let mut file = File::create(save_path)?;
152170
file.write_all(result.as_bytes())?;
153171
file.flush()?;
154172

155-
writeln!(std::io::stdout(), "Commit message saved to {}", &save_path)?;
173+
writeln!(std::io::stdout(), "commit message saved to {}", &save_path)?;
156174
}
157175

158176
Ok(())

src/openai.rs

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -11,9 +11,9 @@
1111
* Modified By: mingcheng ([email protected])
1212
* Last Modified: 2025-03-03 23:58:02
1313
*/
14-
1514
use askama::Template;
1615
use async_openai::config::OPENAI_API_BASE;
16+
use async_openai::error::OpenAIError;
1717
use async_openai::{
1818
config::OpenAIConfig,
1919
types::{ChatCompletionRequestMessage, CreateChatCompletionRequestArgs},
@@ -73,13 +73,16 @@ impl OpenAI {
7373
&self,
7474
model_name: &str,
7575
message: Vec<ChatCompletionRequestMessage>,
76-
) -> Result<String, Box<dyn Error>> {
76+
) -> Result<String, OpenAIError> {
7777
let request = CreateChatCompletionRequestArgs::default()
7878
.model(model_name)
7979
.messages(message)
8080
.build()?;
8181

82-
let response = self.client.chat().create(request).await?;
82+
let response = match self.client.chat().create(request).await {
83+
Ok(s) => s,
84+
Err(e) => return Err(e),
85+
};
8386

8487
let mut result = vec![];
8588
response.choices.iter().for_each(|choice| {
@@ -88,7 +91,7 @@ impl OpenAI {
8891

8992
if let Option::Some(usage) = response.usage {
9093
debug!(
91-
"Usage: completion_tokens: {}, prompt_tokens: {}, total_tokens: {}",
94+
"usage: completion_tokens: {}, prompt_tokens: {}, total_tokens: {}",
9295
usage.completion_tokens, usage.prompt_tokens, usage.total_tokens
9396
);
9497
}

0 commit comments

Comments
 (0)