Skip to content

Commit 08bcf08

Browse files
committed
Replaces usage of param.NewOpt with openai.Int for MaxTokens and openai.Bool with param.NewOpt for IncludeUsage in simulator_test.go to align with updated API usage.
Signed-off-by: Sergey Marunich <[email protected]>
1 parent f835c9e commit 08bcf08

File tree

1 file changed

+3
-3
lines changed

1 file changed

+3
-3
lines changed

pkg/llm-d-inference-sim/simulator_test.go

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -120,7 +120,7 @@ var _ = Describe("Simulator", func() {
120120
openai.UserMessage(userMessage),
121121
},
122122
Model: model,
123-
StreamOptions: openai.ChatCompletionStreamOptionsParam{IncludeUsage: openai.Bool(true)},
123+
StreamOptions: openai.ChatCompletionStreamOptionsParam{IncludeUsage: param.NewOpt(true)},
124124
}
125125
stream := openaiclient.Chat.Completions.NewStreaming(ctx, params)
126126
defer func() {
@@ -444,7 +444,7 @@ var _ = Describe("Simulator", func() {
444444
openai.UserMessage("This is a test message"),
445445
},
446446
Model: model,
447-
MaxTokens: param.NewOpt(int64(8)),
447+
MaxTokens: openai.Int(8),
448448
})
449449

450450
Expect(err).To(HaveOccurred())
@@ -471,7 +471,7 @@ var _ = Describe("Simulator", func() {
471471
openai.UserMessage("Hello"),
472472
},
473473
Model: model,
474-
MaxTokens: param.NewOpt(int64(5)),
474+
MaxTokens: openai.Int(5),
475475
})
476476

477477
Expect(err).NotTo(HaveOccurred())

0 commit comments

Comments
 (0)