-
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathexample_openai_chat_basic_test.go
More file actions
96 lines (86 loc) · 2.62 KB
/
example_openai_chat_basic_test.go
File metadata and controls
96 lines (86 loc) · 2.62 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
package integration
import (
"context"
"fmt"
"os"
"time"
"github.com/flexigpt/inference-go"
"github.com/flexigpt/inference-go/spec"
)
// Example_openAIChat_basicConversation demonstrates a minimal non-streaming
// call to OpenAI's Chat Completions API.
func Example_openAIChat_basicConversation() {
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
defer cancel()
ps, err := newProviderSetWithDebug()
if err != nil {
fmt.Fprintln(os.Stderr, "error creating ProviderSetAPI:", err)
return
}
_, err = ps.AddProvider(ctx, "openai-chat", &inference.AddProviderConfig{
SDKType: spec.ProviderSDKTypeOpenAIChatCompletions,
Origin: spec.DefaultOpenAIOrigin,
ChatCompletionPathPrefix: spec.DefaultOpenAIChatCompletionsPrefix,
APIKeyHeaderKey: spec.DefaultAuthorizationHeaderKey,
DefaultHeaders: spec.OpenAIChatCompletionsDefaultHeaders,
})
if err != nil {
fmt.Fprintln(os.Stderr, "error adding OpenAI Chat provider:", err)
return
}
apiKey := os.Getenv("OPENAI_API_KEY")
if apiKey == "" {
fmt.Fprintln(os.Stderr, "OPENAI_API_KEY not set; skipping live OpenAI Chat call")
fmt.Println("OK")
return
}
if err := ps.SetProviderAPIKey(ctx, "openai-chat", apiKey); err != nil {
fmt.Fprintln(os.Stderr, "error setting OpenAI API key:", err)
return
}
req := &spec.FetchCompletionRequest{
ModelParam: spec.ModelParam{
Name: "gpt-4.1-mini",
Stream: false,
MaxPromptLength: 4096,
MaxOutputLength: 256,
SystemPrompt: "You are a concise assistant.",
},
Inputs: []spec.InputUnion{
{
Kind: spec.InputKindInputMessage,
InputMessage: &spec.InputOutputContent{
Role: spec.RoleUser,
Contents: []spec.InputOutputContentItemUnion{
{
Kind: spec.ContentItemKindText,
TextItem: &spec.ContentItemText{
Text: "Say hello from OpenAI Chat Completions in one short sentence.",
},
},
},
},
},
},
}
resp, err := ps.FetchCompletion(ctx, "openai-chat", req, &spec.FetchCompletionOptions{CompletionKey: "gpt41"})
if err != nil {
fmt.Fprintln(os.Stderr, "FetchCompletion error:", err)
if resp != nil && resp.Error != nil {
fmt.Fprintln(os.Stderr, "Provider error:", resp.Error.Message)
}
return
}
for _, out := range resp.Outputs {
if out.Kind != spec.OutputKindOutputMessage || out.OutputMessage == nil {
continue
}
for _, c := range out.OutputMessage.Contents {
if c.Kind == spec.ContentItemKindText && c.TextItem != nil {
fmt.Fprintln(os.Stderr, "OpenAI Chat assistant:", c.TextItem.Text)
}
}
}
fmt.Println("OK")
// Output: OK
}