Skip to content

Commit 50c678e

Browse files
committed
temp
1 parent 5641530 commit 50c678e

File tree

7 files changed

+676
-102
lines changed

7 files changed

+676
-102
lines changed

api.ts

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -26,30 +26,30 @@ import { BASE_PATH, COLLECTION_FORMATS, BaseAPI, RequiredError } from './base';
2626
/**
2727
*
2828
* @export
29-
* @interface ChatCompletionRequestMessage
29+
* @interface ChatCompletionMessage
3030
*/
31-
export interface ChatCompletionRequestMessage {
31+
export interface ChatCompletionMessage {
3232
/**
3333
* The entity that sent the message
3434
* @type {string}
35-
* @memberof ChatCompletionRequestMessage
35+
* @memberof ChatCompletionMessage
3636
*/
37-
'role'?: ChatCompletionRequestMessageRoleEnum;
37+
'role'?: ChatCompletionMessageRoleEnum;
3838
/**
3939
* The contents of the message
4040
* @type {string}
41-
* @memberof ChatCompletionRequestMessage
41+
* @memberof ChatCompletionMessage
4242
*/
4343
'content'?: string;
4444
}
4545

46-
export const ChatCompletionRequestMessageRoleEnum = {
46+
export const ChatCompletionMessageRoleEnum = {
4747
System: 'system',
4848
User: 'user',
4949
Assistant: 'assistant'
5050
} as const;
5151

52-
export type ChatCompletionRequestMessageRoleEnum = typeof ChatCompletionRequestMessageRoleEnum[keyof typeof ChatCompletionRequestMessageRoleEnum];
52+
export type ChatCompletionMessageRoleEnum = typeof ChatCompletionMessageRoleEnum[keyof typeof ChatCompletionMessageRoleEnum];
5353

5454
/**
5555
*
@@ -249,10 +249,10 @@ export interface CreateChatCompletionRequest {
249249
'model': string;
250250
/**
251251
* The messages to generate chat completions for, in the [chat format](/docs/guides/chat/introduction).
252-
* @type {Array<ChatCompletionRequestMessage>}
252+
* @type {Array<ChatCompletionMessage>}
253253
* @memberof CreateChatCompletionRequest
254254
*/
255-
'messages': Array<ChatCompletionRequestMessage>;
255+
'messages': Array<ChatCompletionMessage>;
256256
/**
257257
* What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. We generally recommend altering this or `top_p` but not both.
258258
* @type {number}
@@ -366,10 +366,10 @@ export interface CreateChatCompletionResponseChoicesInner {
366366
'index'?: number;
367367
/**
368368
*
369-
* @type {Objects}
369+
* @type {ChatCompletionMessage}
370370
* @memberof CreateChatCompletionResponseChoicesInner
371371
*/
372-
'message'?: Objects;
372+
'message'?: ChatCompletionMessage;
373373
/**
374374
*
375375
* @type {string}
@@ -2504,7 +2504,7 @@ export const OpenAIApiAxiosParamCreator = function (configuration?: Configuratio
25042504
}
25052505

25062506
if (temperature !== undefined) {
2507-
localVarFormParams.append('temperature', new Blob([JSON.stringify(temperature)], { type: "application/json", }));
2507+
localVarFormParams.append('temperature', temperature as any);
25082508
}
25092509

25102510

0 commit comments

Comments
 (0)