Skip to content

Commit ce19f2a

Browse files
committed
Update blox.go
1 parent 8d1d71d commit ce19f2a

File tree

1 file changed

+8
-8
lines changed

1 file changed

+8
-8
lines changed

blockchain/blox.go

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -565,22 +565,25 @@ func (bl *FxBlockchain) handleFetchContainerLogs(ctx context.Context, from peer.
565565
func (bl *FxBlockchain) handleChatWithAI(ctx context.Context, from peer.ID, w http.ResponseWriter, r *http.Request) {
566566
log := log.With("action", actionChatWithAI, "from", from)
567567

568+
// Decode the incoming request
568569
var req wifi.ChatWithAIRequest
569570
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
570571
log.Error("failed to decode request: %v", err)
571572
http.Error(w, "failed to decode request", http.StatusBadRequest)
572573
return
573574
}
574575

576+
// Set up headers for streaming response
575577
w.Header().Set("Content-Type", "application/json")
576-
w.WriteHeader(http.StatusAccepted) // Use StatusAccepted for consistency
578+
w.WriteHeader(http.StatusAccepted)
577579

578580
flusher, ok := w.(http.Flusher)
579581
if !ok {
580582
http.Error(w, "Streaming not supported", http.StatusInternalServerError)
581583
return
582584
}
583585

586+
// Fetch AI response using FetchAIResponse
584587
chunks, err := wifi.FetchAIResponse(ctx, req.AIModel, req.UserMessage)
585588
if err != nil {
586589
log.Error("error in fetchAIResponse: %v", err)
@@ -591,6 +594,7 @@ func (bl *FxBlockchain) handleChatWithAI(ctx context.Context, from peer.ID, w ht
591594
log.Debugw("Streaming AI response started", "ai_model", req.AIModel)
592595
defer log.Debugw("Streaming AI response ended", "ai_model", req.AIModel)
593596

597+
// Stream chunks to the client
594598
for {
595599
select {
596600
case <-ctx.Done(): // Handle client disconnect or cancellation
@@ -600,22 +604,18 @@ func (bl *FxBlockchain) handleChatWithAI(ctx context.Context, from peer.ID, w ht
600604
if !ok {
601605
return // Channel closed
602606
}
607+
603608
response := wifi.ChatWithAIResponse{
604609
Status: true,
605610
Msg: chunk,
606611
}
607612

608613
if err := json.NewEncoder(w).Encode(response); err != nil {
609614
log.Error("failed to write response: %v", err)
610-
errorResponse := wifi.ChatWithAIResponse{
611-
Status: false,
612-
Msg: fmt.Sprintf("Error writing response: %v", err),
613-
}
614-
json.NewEncoder(w).Encode(errorResponse) // Send error as part of stream
615-
flusher.Flush()
615+
http.Error(w, fmt.Sprintf("Error writing response: %v", err), http.StatusInternalServerError)
616616
return
617617
}
618-
flusher.Flush()
618+
flusher.Flush() // Flush each chunk to ensure real-time streaming
619619
}
620620
}
621621
}

0 commit comments

Comments
 (0)