@@ -954,14 +954,10 @@ func handleFinalStream(c *gin.Context, info *relaycommon.RelayInfo, resp *dto.Ch
954954 return nil
955955}
956956
957- func GeminiChatStreamHandler (c * gin.Context , info * relaycommon.RelayInfo , resp * http.Response ) (* dto.Usage , * types.NewAPIError ) {
958- // responseText := ""
959- id := helper .GetResponseID (c )
960- createAt := common .GetTimestamp ()
961- responseText := strings.Builder {}
957+ func geminiStreamHandler (c * gin.Context , info * relaycommon.RelayInfo , resp * http.Response , callback func (data string , geminiResponse * dto.GeminiChatResponse ) bool ) (* dto.Usage , * types.NewAPIError ) {
962958 var usage = & dto.Usage {}
963959 var imageCount int
964- finishReason := constant . FinishReasonStop
960+ responseText := strings. Builder {}
965961
966962 helper .StreamScannerHandler (c , resp , info , func (data string ) bool {
967963 var geminiResponse dto.GeminiChatResponse
@@ -971,6 +967,7 @@ func GeminiChatStreamHandler(c *gin.Context, info *relaycommon.RelayInfo, resp *
971967 return false
972968 }
973969
970+ // 统计图片数量
974971 for _ , candidate := range geminiResponse .Candidates {
975972 for _ , part := range candidate .Content .Parts {
976973 if part .InlineData != nil && part .InlineData .MimeType != "" {
@@ -982,14 +979,10 @@ func GeminiChatStreamHandler(c *gin.Context, info *relaycommon.RelayInfo, resp *
982979 }
983980 }
984981
985- response , isStop := streamResponseGeminiChat2OpenAI (& geminiResponse )
986-
987- response .Id = id
988- response .Created = createAt
989- response .Model = info .UpstreamModelName
982+ // 更新使用量统计
990983 if geminiResponse .UsageMetadata .TotalTokenCount != 0 {
991984 usage .PromptTokens = geminiResponse .UsageMetadata .PromptTokenCount
992- usage .CompletionTokens = geminiResponse .UsageMetadata .CandidatesTokenCount
985+ usage .CompletionTokens = geminiResponse .UsageMetadata .CandidatesTokenCount + geminiResponse . UsageMetadata . ThoughtsTokenCount
993986 usage .CompletionTokenDetails .ReasoningTokens = geminiResponse .UsageMetadata .ThoughtsTokenCount
994987 usage .TotalTokens = geminiResponse .UsageMetadata .TotalTokenCount
995988 for _ , detail := range geminiResponse .UsageMetadata .PromptTokensDetails {
@@ -1000,6 +993,45 @@ func GeminiChatStreamHandler(c *gin.Context, info *relaycommon.RelayInfo, resp *
1000993 }
1001994 }
1002995 }
996+
997+ return callback (data , & geminiResponse )
998+ })
999+
1000+ if imageCount != 0 {
1001+ if usage .CompletionTokens == 0 {
1002+ usage .CompletionTokens = imageCount * 1400
1003+ }
1004+ }
1005+
1006+ usage .PromptTokensDetails .TextTokens = usage .PromptTokens
1007+ if usage .TotalTokens > 0 {
1008+ usage .CompletionTokens = usage .TotalTokens - usage .PromptTokens
1009+ }
1010+
1011+ if usage .CompletionTokens <= 0 {
1012+ str := responseText .String ()
1013+ if len (str ) > 0 {
1014+ usage = service .ResponseText2Usage (c , responseText .String (), info .UpstreamModelName , info .PromptTokens )
1015+ } else {
1016+ usage = & dto.Usage {}
1017+ }
1018+ }
1019+
1020+ return usage , nil
1021+ }
1022+
1023+ func GeminiChatStreamHandler (c * gin.Context , info * relaycommon.RelayInfo , resp * http.Response ) (* dto.Usage , * types.NewAPIError ) {
1024+ id := helper .GetResponseID (c )
1025+ createAt := common .GetTimestamp ()
1026+ finishReason := constant .FinishReasonStop
1027+
1028+ usage , err := geminiStreamHandler (c , info , resp , func (data string , geminiResponse * dto.GeminiChatResponse ) bool {
1029+ response , isStop := streamResponseGeminiChat2OpenAI (geminiResponse )
1030+
1031+ response .Id = id
1032+ response .Created = createAt
1033+ response .Model = info .UpstreamModelName
1034+
10031035 logger .LogDebug (c , fmt .Sprintf ("info.SendResponseCount = %d" , info .SendResponseCount ))
10041036 if info .SendResponseCount == 0 {
10051037 // send first response
@@ -1015,7 +1047,7 @@ func GeminiChatStreamHandler(c *gin.Context, info *relaycommon.RelayInfo, resp *
10151047 emptyResponse .Choices [0 ].Delta .ToolCalls = copiedToolCalls
10161048 }
10171049 finishReason = constant .FinishReasonToolCalls
1018- err = handleStream (c , info , emptyResponse )
1050+ err : = handleStream (c , info , emptyResponse )
10191051 if err != nil {
10201052 logger .LogError (c , err .Error ())
10211053 }
@@ -1025,14 +1057,14 @@ func GeminiChatStreamHandler(c *gin.Context, info *relaycommon.RelayInfo, resp *
10251057 response .Choices [0 ].FinishReason = nil
10261058 }
10271059 } else {
1028- err = handleStream (c , info , emptyResponse )
1060+ err : = handleStream (c , info , emptyResponse )
10291061 if err != nil {
10301062 logger .LogError (c , err .Error ())
10311063 }
10321064 }
10331065 }
10341066
1035- err = handleStream (c , info , response )
1067+ err : = handleStream (c , info , response )
10361068 if err != nil {
10371069 logger .LogError (c , err .Error ())
10381070 }
@@ -1042,40 +1074,15 @@ func GeminiChatStreamHandler(c *gin.Context, info *relaycommon.RelayInfo, resp *
10421074 return true
10431075 })
10441076
1045- if info .SendResponseCount == 0 {
1046- // 空补全,报错不计费
1047- // empty response, throw an error
1048- return nil , types .NewOpenAIError (errors .New ("no response received from Gemini API" ), types .ErrorCodeEmptyResponse , http .StatusInternalServerError )
1049- }
1050-
1051- if imageCount != 0 {
1052- if usage .CompletionTokens == 0 {
1053- usage .CompletionTokens = imageCount * 258
1054- }
1055- }
1056-
1057- usage .PromptTokensDetails .TextTokens = usage .PromptTokens
1058- usage .CompletionTokens = usage .TotalTokens - usage .PromptTokens
1059-
1060- if usage .CompletionTokens == 0 {
1061- str := responseText .String ()
1062- if len (str ) > 0 {
1063- usage = service .ResponseText2Usage (responseText .String (), info .UpstreamModelName , info .PromptTokens )
1064- } else {
1065- // 空补全,不需要使用量
1066- usage = & dto.Usage {}
1067- }
1077+ if err != nil {
1078+ return usage , err
10681079 }
10691080
10701081 response := helper .GenerateFinalUsageResponse (id , createAt , info .UpstreamModelName , * usage )
1071- err := handleFinalStream (c , info , response )
1072- if err != nil {
1073- common .SysLog ("send final response failed: " + err .Error ())
1082+ handleErr := handleFinalStream (c , info , response )
1083+ if handleErr != nil {
1084+ common .SysLog ("send final response failed: " + handleErr .Error ())
10741085 }
1075- //if info.RelayFormat == relaycommon.RelayFormatOpenAI {
1076- // helper.Done(c)
1077- //}
1078- //resp.Body.Close()
10791086 return usage , nil
10801087}
10811088
0 commit comments