@@ -140,8 +140,7 @@ func generateInteractiveWithReadline(cmd *cobra.Command, desktopClient *desktop.
140140 AltPlaceholder : `Use """ to end multi-line input` ,
141141 })
142142 if err != nil {
143- // Fall back to basic input mode if readline initialization fails
144- return generateInteractiveBasic (cmd , desktopClient , model )
143+ return err
145144 }
146145
147146 // Disable history if the environment variable is set
@@ -154,6 +153,7 @@ func generateInteractiveWithReadline(cmd *cobra.Command, desktopClient *desktop.
154153
155154 var sb strings.Builder
156155 var multiline bool
156+ var conversationHistory []desktop.OpenAIChatMessage
157157
158158 // Add a helper function to handle file inclusion when @ is pressed
159159 // We'll implement a basic version here that shows a message when @ is pressed
@@ -245,7 +245,7 @@ func generateInteractiveWithReadline(cmd *cobra.Command, desktopClient *desktop.
245245 }
246246 }()
247247
248- err := chatWithMarkdownContext (chatCtx , cmd , desktopClient , model , userInput )
248+ assistantResponse , err := chatWithMarkdownContext (chatCtx , cmd , desktopClient , model , userInput , conversationHistory )
249249
250250 // Clean up signal handler
251251 signal .Stop (sigChan )
@@ -263,70 +263,22 @@ func generateInteractiveWithReadline(cmd *cobra.Command, desktopClient *desktop.
263263 continue
264264 }
265265
266+ // Add the user message and assistant response to conversation history
267+ conversationHistory = append (conversationHistory , desktop.OpenAIChatMessage {
268+ Role : "user" ,
269+ Content : userInput ,
270+ })
271+ conversationHistory = append (conversationHistory , desktop.OpenAIChatMessage {
272+ Role : "assistant" ,
273+ Content : assistantResponse ,
274+ })
275+
266276 cmd .Println ()
267277 sb .Reset ()
268278 }
269279 }
270280}
271281
272- // generateInteractiveBasic provides a basic interactive mode (fallback)
273- func generateInteractiveBasic (cmd * cobra.Command , desktopClient * desktop.Client , model string ) error {
274- scanner := bufio .NewScanner (os .Stdin )
275- for {
276- userInput , err := readMultilineInput (cmd , scanner )
277- if err != nil {
278- if err .Error () == "EOF" {
279- break
280- }
281- return fmt .Errorf ("Error reading input: %w" , err )
282- }
283-
284- if strings .ToLower (strings .TrimSpace (userInput )) == "/bye" {
285- break
286- }
287-
288- if strings .TrimSpace (userInput ) == "" {
289- continue
290- }
291-
292- // Create a cancellable context for the chat request
293- // This allows us to cancel the request if the user presses Ctrl+C during response generation
294- chatCtx , cancelChat := context .WithCancel (cmd .Context ())
295-
296- // Set up signal handler to cancel the context on Ctrl+C
297- sigChan := make (chan os.Signal , 1 )
298- signal .Notify (sigChan , syscall .SIGINT )
299- go func () {
300- select {
301- case <- sigChan :
302- cancelChat ()
303- case <- chatCtx .Done ():
304- // Context cancelled, exit goroutine
305- // Context cancelled, exit goroutine
306- }
307- }()
308-
309- err = chatWithMarkdownContext (chatCtx , cmd , desktopClient , model , userInput )
310-
311- cancelChat ()
312- signal .Stop (sigChan )
313- cancelChat ()
314-
315- if err != nil {
316- // Check if the error is due to context cancellation (Ctrl+C during response)
317- if errors .Is (err , context .Canceled ) {
318- fmt .Println ("\n Use Ctrl + d or /bye to exit." )
319- } else {
320- cmd .PrintErrln (handleClientError (err , "Failed to generate a response" ))
321- }
322- continue
323- }
324-
325- cmd .Println ()
326- }
327- return nil
328- }
329-
330282var (
331283 markdownRenderer * glamour.TermRenderer
332284 lastWidth int
@@ -509,40 +461,42 @@ func renderMarkdown(content string) (string, error) {
509461
510462// chatWithMarkdown performs chat and streams the response with selective markdown rendering.
511463func chatWithMarkdown (cmd * cobra.Command , client * desktop.Client , model , prompt string ) error {
512- return chatWithMarkdownContext (cmd .Context (), cmd , client , model , prompt )
464+ _ , err := chatWithMarkdownContext (cmd .Context (), cmd , client , model , prompt , nil )
465+ return err
513466}
514467
515468// chatWithMarkdownContext performs chat with context support and streams the response with selective markdown rendering.
516- func chatWithMarkdownContext (ctx context.Context , cmd * cobra.Command , client * desktop.Client , model , prompt string ) error {
469+ // It accepts an optional conversation history and returns the assistant's response for history tracking.
470+ func chatWithMarkdownContext (ctx context.Context , cmd * cobra.Command , client * desktop.Client , model , prompt string , conversationHistory []desktop.OpenAIChatMessage ) (string , error ) {
517471 colorMode , _ := cmd .Flags ().GetString ("color" )
518472 useMarkdown := shouldUseMarkdown (colorMode )
519473 debug , _ := cmd .Flags ().GetBool ("debug" )
520474
521475 // Process file inclusions first (files referenced with @ symbol)
522476 prompt , err := processFileInclusions (prompt )
523477 if err != nil {
524- return fmt .Errorf ("failed to process file inclusions: %w" , err )
478+ return "" , fmt .Errorf ("failed to process file inclusions: %w" , err )
525479 }
526480
527481 var imageURLs []string
528482 cleanedPrompt , imgs , err := processImagesInPrompt (prompt )
529483 if err != nil {
530- return fmt .Errorf ("failed to process images: %w" , err )
484+ return "" , fmt .Errorf ("failed to process images: %w" , err )
531485 }
532486 prompt = cleanedPrompt
533487 imageURLs = imgs
534488
535489 if ! useMarkdown {
536490 // Simple case: just stream as plain text
537- return client .ChatWithContext (ctx , model , prompt , imageURLs , func (content string ) {
491+ return client .ChatWithMessagesContext (ctx , model , conversationHistory , prompt , imageURLs , func (content string ) {
538492 cmd .Print (content )
539493 }, false )
540494 }
541495
542496 // For markdown: use streaming buffer to render code blocks as they complete
543497 markdownBuffer := NewStreamingMarkdownBuffer ()
544498
545- err = client .ChatWithContext (ctx , model , prompt , imageURLs , func (content string ) {
499+ assistantResponse , err : = client .ChatWithMessagesContext (ctx , model , conversationHistory , prompt , imageURLs , func (content string ) {
546500 // Use the streaming markdown buffer to intelligently render content
547501 rendered , err := markdownBuffer .AddContent (content , true )
548502 if err != nil {
@@ -556,15 +510,15 @@ func chatWithMarkdownContext(ctx context.Context, cmd *cobra.Command, client *de
556510 }
557511 }, true )
558512 if err != nil {
559- return err
513+ return assistantResponse , err
560514 }
561515
562516 // Flush any remaining content from the markdown buffer
563517 if remaining , flushErr := markdownBuffer .Flush (true ); flushErr == nil && remaining != "" {
564518 cmd .Print (remaining )
565519 }
566520
567- return nil
521+ return assistantResponse , nil
568522}
569523
570524func newRunCmd () * cobra.Command {
@@ -641,14 +595,10 @@ func newRunCmd() *cobra.Command {
641595 return nil
642596 }
643597
644- // Interactive mode for external OpenAI endpoint
645- if term .IsTerminal (int (os .Stdin .Fd ())) {
646- termenv .SetDefaultOutput (
647- termenv .NewOutput (asPrinter (cmd ), termenv .WithColorCache (true )),
648- )
649- return generateInteractiveWithReadline (cmd , openaiClient , model )
650- }
651- return generateInteractiveBasic (cmd , openaiClient , model )
598+ termenv .SetDefaultOutput (
599+ termenv .NewOutput (asPrinter (cmd ), termenv .WithColorCache (true )),
600+ )
601+ return generateInteractiveWithReadline (cmd , openaiClient , model )
652602 }
653603
654604 if _ , err := ensureStandaloneRunnerAvailable (cmd .Context (), asPrinter (cmd ), debug ); err != nil {
@@ -746,19 +696,15 @@ func newRunCmd() *cobra.Command {
746696 return nil
747697 }
748698
749- // Use enhanced readline-based interactive mode when terminal is available
750- if term .IsTerminal (int (os .Stdin .Fd ())) {
751- // Initialize termenv with color caching before starting interactive session.
752- // This queries the terminal background color once and caches it, preventing
753- // OSC response sequences from appearing in stdin during the interactive loop.
754- termenv .SetDefaultOutput (
755- termenv .NewOutput (asPrinter (cmd ), termenv .WithColorCache (true )),
756- )
757- return generateInteractiveWithReadline (cmd , desktopClient , model )
758- }
699+ // Initialize termenv with color caching before starting interactive session.
700+ // This queries the terminal background color once and caches it, preventing
701+ // OSC response sequences from appearing in stdin during the interactive loop.
702+ termenv .SetDefaultOutput (
703+ termenv .NewOutput (asPrinter (cmd ), termenv .WithColorCache (true )),
704+ )
705+
706+ return generateInteractiveWithReadline (cmd , desktopClient , model )
759707
760- // Fall back to basic mode if not a terminal
761- return generateInteractiveBasic (cmd , desktopClient , model )
762708 },
763709 ValidArgsFunction : completion .ModelNames (getDesktopClient , 1 ),
764710 }
0 commit comments