@@ -50,11 +50,11 @@ use fig_settings::Settings;
5050use fig_util:: CLI_BINARY_NAME ;
5151use input_source:: InputSource ;
5252use parser:: {
53- RecvError ,
5453 RecvErrorKind ,
5554 ResponseParser ,
5655 ToolUse ,
5756} ;
57+ use regex:: Regex ;
5858use serde_json:: Map ;
5959use spinners:: {
6060 Spinner ,
@@ -65,6 +65,7 @@ use tokio::signal::unix::{
6565 SignalKind ,
6666 signal,
6767} ;
68+ use tools:: gh_issue:: GhIssueContext ;
6869use tools:: {
6970 Tool ,
7071 ToolSpec ,
@@ -260,6 +261,8 @@ pub struct ChatContext<W: Write> {
260261 /// State used to keep track of tool use relation
261262 tool_use_status : ToolUseStatus ,
262263 accept_all : bool ,
264+ /// Any failed requests that could be useful for error report/debugging
265+ failed_request_ids : Vec < String > ,
263266}
264267
265268impl < W : Write > ChatContext < W > {
@@ -291,6 +294,7 @@ impl<W: Write> ChatContext<W> {
291294 tool_use_telemetry_events : HashMap :: new ( ) ,
292295 tool_use_status : ToolUseStatus :: Idle ,
293296 accept_all,
297+ failed_request_ids : Vec :: new ( ) ,
294298 } )
295299 }
296300}
@@ -388,6 +392,9 @@ where
388392 } ) ;
389393 }
390394
395+ // Remove non-ASCII and ANSI characters.
396+ let re = Regex :: new ( r"((\x9B|\x1B\[)[0-?]*[ -\/]*[@-~])|([^\x00-\x7F]+)" ) . unwrap ( ) ;
397+
391398 loop {
392399 debug_assert ! ( next_state. is_some( ) ) ;
393400 let chat_state = next_state. take ( ) . unwrap_or_default ( ) ;
@@ -428,28 +435,37 @@ where
428435 match result {
429436 Ok ( state) => next_state = Some ( state) ,
430437 Err ( e) => {
431- fn print_error < W : Write > (
432- output : & mut W ,
433- prepend_msg : & str ,
434- report : Option < eyre:: Report > ,
435- ) -> Result < ( ) , std:: io:: Error > {
438+ let mut print_error = |output : & mut W ,
439+ prepend_msg : & str ,
440+ report : Option < eyre:: Report > |
441+ -> Result < ( ) , std:: io:: Error > {
436442 queue ! (
437443 output,
438444 style:: SetAttribute ( Attribute :: Bold ) ,
439445 style:: SetForegroundColor ( Color :: Red ) ,
440446 ) ?;
441447
442448 match report {
443- Some ( report) => queue ! ( output, style:: Print ( format!( "{}: {:?}\n " , prepend_msg, report) ) , ) ?,
444- None => queue ! ( output, style:: Print ( prepend_msg) , style:: Print ( "\n " ) ) ?,
449+ Some ( report) => {
450+ let text = re
451+ . replace_all ( & format ! ( "{}: {:?}\n " , prepend_msg, report) , "" )
452+ . into_owned ( ) ;
453+
454+ queue ! ( output, style:: Print ( & text) , ) ?;
455+ self . conversation_state . append_transcript ( text) ;
456+ } ,
457+ None => {
458+ queue ! ( output, style:: Print ( prepend_msg) , style:: Print ( "\n " ) ) ?;
459+ self . conversation_state . append_transcript ( prepend_msg. to_string ( ) ) ;
460+ } ,
445461 }
446462
447463 execute ! (
448464 output,
449465 style:: SetAttribute ( Attribute :: Reset ) ,
450466 style:: SetForegroundColor ( Color :: Reset ) ,
451467 )
452- }
468+ } ;
453469
454470 error ! ( ?e, "An error occurred processing the current state" ) ;
455471 if self . interactive && self . spinner . is_some ( ) {
@@ -987,7 +1003,13 @@ where
9871003 style:: Print ( format!( "{}\n " , "▔" . repeat( terminal_width) ) ) ,
9881004 style:: SetForegroundColor ( Color :: Reset ) ,
9891005 ) ?;
990- let invoke_result = tool. 1 . invoke ( & self . ctx , & mut self . output ) . await ;
1006+ let invoke_result = tool
1007+ . 1
1008+ . invoke ( & self . ctx , & mut self . output , GhIssueContext {
1009+ conversation_state : & self . conversation_state ,
1010+ failed_request_ids : & self . failed_request_ids ,
1011+ } )
1012+ . await ;
9911013
9921014 if self . interactive && self . spinner . is_some ( ) {
9931015 queue ! (
@@ -1105,78 +1127,81 @@ where
11051127 } ,
11061128 }
11071129 } ,
1108- Err ( RecvError {
1109- request_id,
1110- source : RecvErrorKind :: StreamTimeout { source, duration } ,
1111- } ) => {
1112- error ! (
1113- request_id,
1114- ?source,
1115- "Encountered a stream timeout after waiting for {}s" ,
1116- duration. as_secs( )
1117- ) ;
1118- if self . interactive {
1119- execute ! ( self . output, cursor:: Hide ) ?;
1120- self . spinner = Some ( Spinner :: new ( Spinners :: Dots , "Dividing up the work..." . to_string ( ) ) ) ;
1121- }
1122- // For stream timeouts, we'll tell the model to try and split its response into
1123- // smaller chunks.
1124- self . conversation_state
1125- . push_assistant_message ( AssistantResponseMessage {
1126- message_id : None ,
1127- content : "Response timed out - message took too long to generate" . to_string ( ) ,
1128- tool_uses : None ,
1129- } ) ;
1130- self . conversation_state
1131- . append_new_user_message (
1132- "You took too long to respond - try to split up the work into smaller steps." . to_string ( ) ,
1133- )
1134- . await ;
1135- self . send_tool_use_telemetry ( ) . await ;
1136- return Ok ( ChatState :: HandleResponseStream (
1137- self . client
1138- . send_message ( self . conversation_state . as_sendable_conversation_state ( ) . await )
1139- . await ?,
1140- ) ) ;
1141- } ,
1142- Err ( RecvError {
1143- request_id,
1144- source :
1130+ Err ( recv_error) => {
1131+ if let Some ( request_id) = & recv_error. request_id {
1132+ self . failed_request_ids . push ( request_id. clone ( ) ) ;
1133+ } ;
1134+
1135+ match recv_error. source {
1136+ RecvErrorKind :: StreamTimeout { source, duration } => {
1137+ error ! (
1138+ recv_error. request_id,
1139+ ?source,
1140+ "Encountered a stream timeout after waiting for {}s" ,
1141+ duration. as_secs( )
1142+ ) ;
1143+ if self . interactive {
1144+ execute ! ( self . output, cursor:: Hide ) ?;
1145+ self . spinner =
1146+ Some ( Spinner :: new ( Spinners :: Dots , "Dividing up the work..." . to_string ( ) ) ) ;
1147+ }
1148+ // For stream timeouts, we'll tell the model to try and split its response into
1149+ // smaller chunks.
1150+ self . conversation_state
1151+ . push_assistant_message ( AssistantResponseMessage {
1152+ message_id : None ,
1153+ content : "Response timed out - message took too long to generate" . to_string ( ) ,
1154+ tool_uses : None ,
1155+ } ) ;
1156+ self . conversation_state
1157+ . append_new_user_message (
1158+ "You took too long to respond - try to split up the work into smaller steps."
1159+ . to_string ( ) ,
1160+ )
1161+ . await ;
1162+ self . send_tool_use_telemetry ( ) . await ;
1163+ return Ok ( ChatState :: HandleResponseStream (
1164+ self . client
1165+ . send_message ( self . conversation_state . as_sendable_conversation_state ( ) . await )
1166+ . await ?,
1167+ ) ) ;
1168+ } ,
11451169 RecvErrorKind :: UnexpectedToolUseEos {
11461170 tool_use_id,
11471171 name,
11481172 message,
1173+ } => {
1174+ error ! (
1175+ recv_error. request_id,
1176+ tool_use_id, name, "The response stream ended before the entire tool use was received"
1177+ ) ;
1178+ if self . interactive {
1179+ execute ! ( self . output, cursor:: Hide ) ?;
1180+ self . spinner = Some ( Spinner :: new (
1181+ Spinners :: Dots ,
1182+ "The generated tool use was too large, trying to divide up the work..." . to_string ( ) ,
1183+ ) ) ;
1184+ }
1185+
1186+ self . conversation_state . push_assistant_message ( * message) ;
1187+ let tool_results = vec ! [ ToolResult {
1188+ tool_use_id,
1189+ content: vec![ ToolResultContentBlock :: Text (
1190+ "The generated tool was too large, try again but this time split up the work between multiple tool uses" . to_string( ) ,
1191+ ) ] ,
1192+ status: ToolResultStatus :: Error ,
1193+ } ] ;
1194+ self . conversation_state . add_tool_results ( tool_results) ;
1195+ self . send_tool_use_telemetry ( ) . await ;
1196+ return Ok ( ChatState :: HandleResponseStream (
1197+ self . client
1198+ . send_message ( self . conversation_state . as_sendable_conversation_state ( ) . await )
1199+ . await ?,
1200+ ) ) ;
11491201 } ,
1150- } ) => {
1151- error ! (
1152- request_id,
1153- tool_use_id, name, "The response stream ended before the entire tool use was received"
1154- ) ;
1155- if self . interactive {
1156- execute ! ( self . output, cursor:: Hide ) ?;
1157- self . spinner = Some ( Spinner :: new (
1158- Spinners :: Dots ,
1159- "The generated tool use was too large, trying to divide up the work..." . to_string ( ) ,
1160- ) ) ;
1202+ _ => return Err ( recv_error. into ( ) ) ,
11611203 }
1162-
1163- self . conversation_state . push_assistant_message ( * message) ;
1164- let tool_results = vec ! [ ToolResult {
1165- tool_use_id,
1166- content: vec![ ToolResultContentBlock :: Text (
1167- "The generated tool was too large, try again but this time split up the work between multiple tool uses" . to_string( ) ,
1168- ) ] ,
1169- status: ToolResultStatus :: Error ,
1170- } ] ;
1171- self . conversation_state . add_tool_results ( tool_results) ;
1172- self . send_tool_use_telemetry ( ) . await ;
1173- return Ok ( ChatState :: HandleResponseStream (
1174- self . client
1175- . send_message ( self . conversation_state . as_sendable_conversation_state ( ) . await )
1176- . await ?,
1177- ) ) ;
11781204 } ,
1179- Err ( err) => return Err ( err. into ( ) ) ,
11801205 }
11811206
11821207 // Fix for the markdown parser copied over from q chat:
0 commit comments