Skip to content

Commit 56eb570

Browse files
committed
#153: Deduplicated the ErrEmptyResponse and adjusted a few providers to work with the new params arguments
1 parent 122f2e1 commit 56eb570

File tree

11 files changed

+50
-89
lines changed

11 files changed

+50
-89
lines changed

pkg/providers/anthropic/chat.go

Lines changed: 17 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@ import (
55
"context"
66
"encoding/json"
77
"fmt"
8+
"github.com/EinStack/glide/pkg/providers/clients"
89
"io"
910
"net/http"
1011
"time"
@@ -27,6 +28,10 @@ type ChatRequest struct {
2728
StopSequences []string `json:"stop_sequences,omitempty"`
2829
}
2930

31+
func (r *ChatRequest) ApplyParams(params *schemas.ChatParams) {
32+
r.Messages = params.Messages
33+
}
34+
3035
// NewChatRequestFromConfig fills the struct from the config. Not using reflection because of performance penalty it gives
3136
func NewChatRequestFromConfig(cfg *Config) *ChatRequest {
3237
return &ChatRequest{
@@ -42,42 +47,24 @@ func NewChatRequestFromConfig(cfg *Config) *ChatRequest {
4247
}
4348
}
4449

45-
func NewChatMessagesFromUnifiedRequest(request *schemas.ChatRequest) []ChatMessage {
46-
messages := make([]ChatMessage, 0, len(request.MessageHistory)+1)
47-
48-
// Add items from messageHistory first and the new chat message last
49-
for _, message := range request.MessageHistory {
50-
messages = append(messages, ChatMessage{Role: message.Role, Content: message.Content})
51-
}
52-
53-
messages = append(messages, ChatMessage{Role: request.Message.Role, Content: request.Message.Content})
54-
55-
return messages
56-
}
57-
5850
// Chat sends a chat request to the specified anthropic model.
5951
//
6052
// Ref: https://docs.anthropic.com/claude/reference/messages_post
61-
func (c *Client) Chat(ctx context.Context, request *schemas.ChatRequest) (*schemas.ChatResponse, error) {
53+
func (c *Client) Chat(ctx context.Context, params *schemas.ChatParams) (*schemas.ChatResponse, error) {
6254
// Create a new chat request
63-
chatRequest := c.createChatRequestSchema(request)
55+
// TODO: consider using objectpool to optimize memory allocation
56+
chatReq := *c.chatRequestTemplate
57+
chatReq.ApplyParams(params)
58+
59+
chatResponse, err := c.doChatRequest(ctx, &chatReq)
6460

65-
chatResponse, err := c.doChatRequest(ctx, chatRequest)
6661
if err != nil {
6762
return nil, err
6863
}
6964

7065
return chatResponse, nil
7166
}
7267

73-
func (c *Client) createChatRequestSchema(request *schemas.ChatRequest) *ChatRequest {
74-
// TODO: consider using objectpool to optimize memory allocation
75-
chatRequest := c.chatRequestTemplate // hoping to get a copy of the template
76-
chatRequest.Messages = NewChatMessagesFromUnifiedRequest(request)
77-
78-
return chatRequest
79-
}
80-
8168
func (c *Client) doChatRequest(ctx context.Context, payload *ChatRequest) (*schemas.ChatResponse, error) {
8269
// Build request payload
8370
rawPayload, err := json.Marshal(payload)
@@ -129,10 +116,15 @@ func (c *Client) doChatRequest(ctx context.Context, payload *ChatRequest) (*sche
129116
}
130117

131118
if len(anthropicResponse.Content) == 0 {
132-
return nil, ErrEmptyResponse
119+
return nil, clients.ErrEmptyResponse
133120
}
134121

135122
completion := anthropicResponse.Content[0]
123+
124+
if len(completion.Text) == 0 {
125+
return nil, clients.ErrEmptyResponse
126+
}
127+
136128
usage := anthropicResponse.Usage
137129

138130
// Map response to ChatResponse schema

pkg/providers/anthropic/client.go

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
package anthropic
22

33
import (
4-
"errors"
54
"net/http"
65
"net/url"
76
"time"
@@ -15,11 +14,6 @@ const (
1514
providerName = "anthropic"
1615
)
1716

18-
// ErrEmptyResponse is returned when the OpenAI API returns an empty response.
19-
var (
20-
ErrEmptyResponse = errors.New("empty response")
21-
)
22-
2317
// Client is a client for accessing OpenAI API
2418
type Client struct {
2519
baseURL string

pkg/providers/azureopenai/chat.go

Lines changed: 23 additions & 38 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@ import (
55
"context"
66
"encoding/json"
77
"fmt"
8+
"github.com/EinStack/glide/pkg/providers/clients"
89
"io"
910
"net/http"
1011

@@ -36,39 +37,21 @@ func NewChatRequestFromConfig(cfg *Config) *ChatRequest {
3637
}
3738

3839
// Chat sends a chat request to the specified azure openai model.
39-
func (c *Client) Chat(ctx context.Context, request *schemas.ChatRequest) (*schemas.ChatResponse, error) {
40+
func (c *Client) Chat(ctx context.Context, params *schemas.ChatParams) (*schemas.ChatResponse, error) {
4041
// Create a new chat request
41-
chatRequest := c.createRequestSchema(request)
42+
// TODO: consider using objectpool to optimize memory allocation
43+
chatReq := *c.chatRequestTemplate // hoping to get a copy of the template
44+
chatReq.ApplyParams(params)
45+
46+
chatResponse, err := c.doChatRequest(ctx, &chatReq)
4247

43-
chatResponse, err := c.doChatRequest(ctx, chatRequest)
4448
if err != nil {
4549
return nil, err
4650
}
4751

48-
if len(chatResponse.ModelResponse.Message.Content) == 0 {
49-
return nil, ErrEmptyResponse
50-
}
51-
5252
return chatResponse, nil
5353
}
5454

55-
// createRequestSchema creates a new ChatRequest object based on the given request.
56-
func (c *Client) createRequestSchema(request *schemas.ChatRequest) *ChatRequest {
57-
// TODO: consider using objectpool to optimize memory allocation
58-
chatRequest := *c.chatRequestTemplate // hoping to get a copy of the template
59-
60-
chatRequest.Messages = make([]ChatMessage, 0, len(request.MessageHistory)+1)
61-
62-
// Add items from messageHistory first and the new chat message last
63-
for _, message := range request.MessageHistory {
64-
chatRequest.Messages = append(chatRequest.Messages, ChatMessage{Role: message.Role, Content: message.Content})
65-
}
66-
67-
chatRequest.Messages = append(chatRequest.Messages, ChatMessage{Role: request.Message.Role, Content: request.Message.Content})
68-
69-
return &chatRequest
70-
}
71-
7255
func (c *Client) doChatRequest(ctx context.Context, payload *ChatRequest) (*schemas.ChatResponse, error) {
7356
// Build request payload
7457
rawPayload, err := json.Marshal(payload)
@@ -110,35 +93,37 @@ func (c *Client) doChatRequest(ctx context.Context, payload *ChatRequest) (*sche
11093
}
11194

11295
// Parse the response JSON
113-
var openAICompletion openai.ChatCompletion
96+
var chatCompletion openai.ChatCompletion
11497

115-
err = json.Unmarshal(bodyBytes, &openAICompletion)
98+
err = json.Unmarshal(bodyBytes, &chatCompletion)
11699
if err != nil {
117100
c.tel.Logger.Error("failed to parse openai chat response", zap.Error(err))
118101
return nil, err
119102
}
120103

121-
openAICompletion.SystemFingerprint = "" // Azure OpenAI doesn't return this
104+
modelChoice := chatCompletion.Choices[0]
105+
106+
if len(modelChoice.Message.Content) == 0 {
107+
return nil, clients.ErrEmptyResponse
108+
}
122109

123110
// Map response to UnifiedChatResponse schema
124111
response := schemas.ChatResponse{
125-
ID: openAICompletion.ID,
126-
Created: openAICompletion.Created,
112+
ID: chatCompletion.ID,
113+
Created: chatCompletion.Created,
127114
Provider: providerName,
128-
ModelName: openAICompletion.ModelName,
115+
ModelName: chatCompletion.ModelName,
129116
Cached: false,
130117
ModelResponse: schemas.ModelResponse{
131-
Metadata: map[string]string{
132-
"system_fingerprint": openAICompletion.SystemFingerprint,
133-
},
118+
Metadata: map[string]string{},
134119
Message: schemas.ChatMessage{
135-
Role: openAICompletion.Choices[0].Message.Role,
136-
Content: openAICompletion.Choices[0].Message.Content,
120+
Role: modelChoice.Message.Role,
121+
Content: modelChoice.Message.Content,
137122
},
138123
TokenUsage: schemas.TokenUsage{
139-
PromptTokens: openAICompletion.Usage.PromptTokens,
140-
ResponseTokens: openAICompletion.Usage.CompletionTokens,
141-
TotalTokens: openAICompletion.Usage.TotalTokens,
124+
PromptTokens: chatCompletion.Usage.PromptTokens,
125+
ResponseTokens: chatCompletion.Usage.CompletionTokens,
126+
TotalTokens: chatCompletion.Usage.TotalTokens,
142127
},
143128
},
144129
}

pkg/providers/azureopenai/client.go

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
package azureopenai
22

33
import (
4-
"errors"
54
"fmt"
65
"net/http"
76
"time"
@@ -17,11 +16,6 @@ const (
1716
providerName = "azureopenai"
1817
)
1918

20-
// ErrEmptyResponse is returned when the OpenAI API returns an empty response.
21-
var (
22-
ErrEmptyResponse = errors.New("empty response")
23-
)
24-
2519
// Client is a client for accessing Azure OpenAI API
2620
type Client struct {
2721
baseURL string // The name of your Azure OpenAI Resource (e.g https://glide-test.openai.azure.com/)

pkg/providers/azureopenai/schemas.go

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,10 @@ type ChatRequest struct {
2121
ResponseFormat interface{} `json:"response_format,omitempty"`
2222
}
2323

24+
func (r *ChatRequest) ApplyParams(params *schemas.ChatParams) {
25+
r.Messages = params.Messages
26+
}
27+
2428
// ChatCompletion
2529
// Ref: https://learn.microsoft.com/en-us/azure/ai-services/openai/reference#chat-completions
2630
type ChatCompletion struct {

pkg/providers/clients/errors.go

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@ import (
77
)
88

99
var (
10+
ErrEmptyResponse = errors.New("empty model response")
1011
ErrProviderUnavailable = errors.New("provider is not available")
1112
ErrUnauthorized = errors.New("API key is wrong or not set")
1213
ErrChatStreamNotImplemented = errors.New("streaming chat API is not implemented for provider")

pkg/providers/cohere/chat.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ func (c *Client) Chat(ctx context.Context, request *schemas.ChatRequest) (*schem
4040
}
4141

4242
if len(chatResponse.ModelResponse.Message.Content) == 0 {
43-
return nil, ErrEmptyResponse
43+
return nil, clients.ErrEmptyResponse
4444
}
4545

4646
return chatResponse, nil

pkg/providers/cohere/client.go

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
package cohere
22

33
import (
4-
"errors"
54
"net/http"
65
"net/url"
76
"time"
@@ -15,11 +14,6 @@ const (
1514
providerName = "cohere"
1615
)
1716

18-
// ErrEmptyResponse is returned when the Cohere API returns an empty response.
19-
var (
20-
ErrEmptyResponse = errors.New("empty response")
21-
)
22-
2317
// Client is a client for accessing Cohere API
2418
type Client struct {
2519
baseURL string

pkg/providers/openai/chat.go

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@ import (
55
"context"
66
"encoding/json"
77
"fmt"
8+
"github.com/EinStack/glide/pkg/providers/clients"
89
"io"
910
"net/http"
1011

@@ -118,7 +119,7 @@ func (c *Client) doChatRequest(ctx context.Context, payload *ChatRequest) (*sche
118119
modelChoice := chatCompletion.Choices[0]
119120

120121
if len(modelChoice.Message.Content) == 0 {
121-
return nil, ErrEmptyResponse
122+
return nil, clients.ErrEmptyResponse
122123
}
123124

124125
// Map response to ChatResponse schema

pkg/providers/openai/client.go

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
package openai
22

33
import (
4-
"errors"
54
"net/http"
65
"net/url"
76
"time"
@@ -17,11 +16,6 @@ const (
1716
providerName = "openai"
1817
)
1918

20-
// ErrEmptyResponse is returned when the OpenAI API returns an empty response.
21-
var (
22-
ErrEmptyResponse = errors.New("empty response")
23-
)
24-
2519
// Client is a client for accessing OpenAI API
2620
type Client struct {
2721
baseURL string

0 commit comments

Comments
 (0)