9
9
"os"
10
10
"testing"
11
11
12
+ "github.com/sashabaranov/go-openai"
12
13
"github.com/sashabaranov/go-openai/internal/test/checks"
13
14
"github.com/sashabaranov/go-openai/jsonschema"
14
15
)
@@ -20,7 +21,7 @@ func TestAPI(t *testing.T) {
20
21
}
21
22
22
23
var err error
23
- c := NewClient (apiToken )
24
+ c := openai . NewClient (apiToken )
24
25
ctx := context .Background ()
25
26
_ , err = c .ListEngines (ctx )
26
27
checks .NoError (t , err , "ListEngines error" )
@@ -36,23 +37,23 @@ func TestAPI(t *testing.T) {
36
37
checks .NoError (t , err , "GetFile error" )
37
38
} // else skip
38
39
39
- embeddingReq := EmbeddingRequest {
40
+ embeddingReq := openai. EmbeddingRequest {
40
41
Input : []string {
41
42
"The food was delicious and the waiter" ,
42
43
"Other examples of embedding request" ,
43
44
},
44
- Model : AdaSearchQuery ,
45
+ Model : openai . AdaSearchQuery ,
45
46
}
46
47
_ , err = c .CreateEmbeddings (ctx , embeddingReq )
47
48
checks .NoError (t , err , "Embedding error" )
48
49
49
50
_ , err = c .CreateChatCompletion (
50
51
ctx ,
51
- ChatCompletionRequest {
52
- Model : GPT3Dot5Turbo ,
53
- Messages : []ChatCompletionMessage {
52
+ openai. ChatCompletionRequest {
53
+ Model : openai . GPT3Dot5Turbo ,
54
+ Messages : []openai. ChatCompletionMessage {
54
55
{
55
- Role : ChatMessageRoleUser ,
56
+ Role : openai . ChatMessageRoleUser ,
56
57
Content : "Hello!" ,
57
58
},
58
59
},
@@ -63,11 +64,11 @@ func TestAPI(t *testing.T) {
63
64
64
65
_ , err = c .CreateChatCompletion (
65
66
ctx ,
66
- ChatCompletionRequest {
67
- Model : GPT3Dot5Turbo ,
68
- Messages : []ChatCompletionMessage {
67
+ openai. ChatCompletionRequest {
68
+ Model : openai . GPT3Dot5Turbo ,
69
+ Messages : []openai. ChatCompletionMessage {
69
70
{
70
- Role : ChatMessageRoleUser ,
71
+ Role : openai . ChatMessageRoleUser ,
71
72
Name : "John_Doe" ,
72
73
Content : "Hello!" ,
73
74
},
@@ -76,9 +77,9 @@ func TestAPI(t *testing.T) {
76
77
)
77
78
checks .NoError (t , err , "CreateChatCompletion (with name) returned error" )
78
79
79
- stream , err := c .CreateCompletionStream (ctx , CompletionRequest {
80
+ stream , err := c .CreateCompletionStream (ctx , openai. CompletionRequest {
80
81
Prompt : "Ex falso quodlibet" ,
81
- Model : GPT3Ada ,
82
+ Model : openai . GPT3Ada ,
82
83
MaxTokens : 5 ,
83
84
Stream : true ,
84
85
})
@@ -103,15 +104,15 @@ func TestAPI(t *testing.T) {
103
104
104
105
_ , err = c .CreateChatCompletion (
105
106
context .Background (),
106
- ChatCompletionRequest {
107
- Model : GPT3Dot5Turbo ,
108
- Messages : []ChatCompletionMessage {
107
+ openai. ChatCompletionRequest {
108
+ Model : openai . GPT3Dot5Turbo ,
109
+ Messages : []openai. ChatCompletionMessage {
109
110
{
110
- Role : ChatMessageRoleUser ,
111
+ Role : openai . ChatMessageRoleUser ,
111
112
Content : "What is the weather like in Boston?" ,
112
113
},
113
114
},
114
- Functions : []FunctionDefinition {{
115
+ Functions : []openai. FunctionDefinition {{
115
116
Name : "get_current_weather" ,
116
117
Parameters : jsonschema.Definition {
117
118
Type : jsonschema .Object ,
@@ -140,12 +141,12 @@ func TestAPIError(t *testing.T) {
140
141
}
141
142
142
143
var err error
143
- c := NewClient (apiToken + "_invalid" )
144
+ c := openai . NewClient (apiToken + "_invalid" )
144
145
ctx := context .Background ()
145
146
_ , err = c .ListEngines (ctx )
146
147
checks .HasError (t , err , "ListEngines should fail with an invalid key" )
147
148
148
- var apiErr * APIError
149
+ var apiErr * openai. APIError
149
150
if ! errors .As (err , & apiErr ) {
150
151
t .Fatalf ("Error is not an APIError: %+v" , err )
151
152
}
0 commit comments