-
Notifications
You must be signed in to change notification settings - Fork 191
Expand file tree
/
Copy pathgemini_openai.go
More file actions
99 lines (82 loc) · 2.43 KB
/
gemini_openai.go
File metadata and controls
99 lines (82 loc) · 2.43 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
package adapter
import (
google_gemini "simple-one-api/pkg/llm/google-gemini"
myopenai "simple-one-api/pkg/openai"
"strings"
"time"
)
func GeminiResponseToOpenAIResponse(qfResp *google_gemini.GeminiResponse) *myopenai.OpenAIResponse {
// 创建 OpenAIResponse 实例
openAIResp := &myopenai.OpenAIResponse{
Object: "chat.completion",
Usage: &myopenai.Usage{
PromptTokens: qfResp.UsageMetadata.PromptTokenCount,
CompletionTokens: qfResp.UsageMetadata.CandidatesTokenCount,
TotalTokens: qfResp.UsageMetadata.TotalTokenCount,
},
Choices: make([]myopenai.Choice, len(qfResp.Candidates)),
}
// 遍历所有候选项
for i, candidate := range qfResp.Candidates {
role := candidate.Content.Role
if strings.ToLower(role) == "model" {
role = "assistant"
}
var content string
if len(candidate.Content.Parts) > 0 {
content = candidate.Content.Parts[0].Text
}
openAIResp.Choices[i] = myopenai.Choice{
Index: candidate.Index,
Message: myopenai.ResponseMessage{
Role: role,
Content: content,
},
FinishReason: candidate.FinishReason,
}
// 示例代码,假设不处理 LogProbs
/*
var logProbs json.RawMessage = nil
openAIResp.Choices[i].LogProbs = &logProbs
*/
}
return openAIResp
}
func GeminiResponseToOpenAIStreamResponse(qfResp *google_gemini.GeminiResponse) *myopenai.OpenAIStreamResponse {
if qfResp == nil {
return nil
}
var Choices []myopenai.OpenAIStreamResponseChoice
for i, candidate := range qfResp.Candidates {
role := candidate.Content.Role
if strings.ToLower(role) == "model" {
role = "assistant"
}
var content string
if len(candidate.Content.Parts) > 0 {
content = candidate.Content.Parts[0].Text
}
choice := myopenai.OpenAIStreamResponseChoice{
Index: i,
Delta: myopenai.ResponseDelta{
Role: role,
Content: content,
},
//FinishReason: candidate.FinishReason,
}
Choices = append(Choices, choice)
}
openAIResponse := &myopenai.OpenAIStreamResponse{
ID: "chatcmpl-" + time.Now().Format("20060102150405"), // 生成一个唯一的ID
Object: "chat.completion.chunk",
Created: time.Now().Unix(),
//Model: "gpt-3.5-turbo-0613", // 假设模型名称
Choices: Choices,
Usage: &myopenai.Usage{
PromptTokens: qfResp.UsageMetadata.PromptTokenCount,
CompletionTokens: qfResp.UsageMetadata.CandidatesTokenCount,
TotalTokens: qfResp.UsageMetadata.TotalTokenCount,
},
}
return openAIResponse
}