Skip to content

Commit 9eaa723

Browse files
committed
style: change code style
1 parent 3de240b commit 9eaa723

File tree

5 files changed

+60
-81
lines changed

5 files changed

+60
-81
lines changed

dist/main_bun.mjs

Lines changed: 12 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -105,22 +105,18 @@ function genModel(req) {
105105
const model = GeminiModel.modelMapping(req.model);
106106
let functions = req.tools?.filter((it) => it.type === "function")?.map((it) => it.function) ?? [];
107107
functions = functions.concat((req.functions ?? []).map((it) => ({ strict: null, ...it })));
108-
let responseMimeType;
109-
let responseSchema;
110-
switch (req.response_format?.type) {
111-
case "json_object":
112-
responseMimeType = "application/json";
113-
break;
114-
case "json_schema":
115-
responseMimeType = "application/json";
116-
responseSchema = req.response_format.json_schema.schema;
117-
break;
118-
case "text":
119-
responseMimeType = "text/plain";
120-
break;
121-
default:
122-
break;
123-
}
108+
const [responseMimeType, responseSchema] = (() => {
109+
switch (req.response_format?.type) {
110+
case "json_object":
111+
return ["application/json", void 0];
112+
case "json_schema":
113+
return ["application/json", req.response_format.json_schema.schema];
114+
case "text":
115+
return ["text/plain", void 0];
116+
default:
117+
return [void 0, void 0];
118+
}
119+
})();
124120
const generateContentRequest = {
125121
contents: openAiMessageToGeminiMessage(req.messages),
126122
generationConfig: {

dist/main_cloudflare-workers.mjs

Lines changed: 12 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -105,22 +105,18 @@ function genModel(req) {
105105
const model = GeminiModel.modelMapping(req.model);
106106
let functions = req.tools?.filter((it) => it.type === "function")?.map((it) => it.function) ?? [];
107107
functions = functions.concat((req.functions ?? []).map((it) => ({ strict: null, ...it })));
108-
let responseMimeType;
109-
let responseSchema;
110-
switch (req.response_format?.type) {
111-
case "json_object":
112-
responseMimeType = "application/json";
113-
break;
114-
case "json_schema":
115-
responseMimeType = "application/json";
116-
responseSchema = req.response_format.json_schema.schema;
117-
break;
118-
case "text":
119-
responseMimeType = "text/plain";
120-
break;
121-
default:
122-
break;
123-
}
108+
const [responseMimeType, responseSchema] = (() => {
109+
switch (req.response_format?.type) {
110+
case "json_object":
111+
return ["application/json", void 0];
112+
case "json_schema":
113+
return ["application/json", req.response_format.json_schema.schema];
114+
case "text":
115+
return ["text/plain", void 0];
116+
default:
117+
return [void 0, void 0];
118+
}
119+
})();
124120
const generateContentRequest = {
125121
contents: openAiMessageToGeminiMessage(req.messages),
126122
generationConfig: {

dist/main_deno.mjs

Lines changed: 12 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -105,22 +105,18 @@ function genModel(req) {
105105
const model = GeminiModel.modelMapping(req.model);
106106
let functions = req.tools?.filter((it) => it.type === "function")?.map((it) => it.function) ?? [];
107107
functions = functions.concat((req.functions ?? []).map((it) => ({ strict: null, ...it })));
108-
let responseMimeType;
109-
let responseSchema;
110-
switch (req.response_format?.type) {
111-
case "json_object":
112-
responseMimeType = "application/json";
113-
break;
114-
case "json_schema":
115-
responseMimeType = "application/json";
116-
responseSchema = req.response_format.json_schema.schema;
117-
break;
118-
case "text":
119-
responseMimeType = "text/plain";
120-
break;
121-
default:
122-
break;
123-
}
108+
const [responseMimeType, responseSchema] = (() => {
109+
switch (req.response_format?.type) {
110+
case "json_object":
111+
return ["application/json", void 0];
112+
case "json_schema":
113+
return ["application/json", req.response_format.json_schema.schema];
114+
case "text":
115+
return ["text/plain", void 0];
116+
default:
117+
return [void 0, void 0];
118+
}
119+
})();
124120
const generateContentRequest = {
125121
contents: openAiMessageToGeminiMessage(req.messages),
126122
generationConfig: {

dist/main_node.mjs

Lines changed: 12 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -563,22 +563,18 @@ function genModel(req) {
563563
const model = GeminiModel.modelMapping(req.model);
564564
let functions = req.tools?.filter((it) => it.type === "function")?.map((it) => it.function) ?? [];
565565
functions = functions.concat((req.functions ?? []).map((it) => ({ strict: null, ...it })));
566-
let responseMimeType;
567-
let responseSchema;
568-
switch (req.response_format?.type) {
569-
case "json_object":
570-
responseMimeType = "application/json";
571-
break;
572-
case "json_schema":
573-
responseMimeType = "application/json";
574-
responseSchema = req.response_format.json_schema.schema;
575-
break;
576-
case "text":
577-
responseMimeType = "text/plain";
578-
break;
579-
default:
580-
break;
581-
}
566+
const [responseMimeType, responseSchema] = (() => {
567+
switch (req.response_format?.type) {
568+
case "json_object":
569+
return ["application/json", void 0];
570+
case "json_schema":
571+
return ["application/json", req.response_format.json_schema.schema];
572+
case "text":
573+
return ["text/plain", void 0];
574+
default:
575+
return [void 0, void 0];
576+
}
577+
})();
582578
const generateContentRequest = {
583579
contents: openAiMessageToGeminiMessage(req.messages),
584580
generationConfig: {

src/utils.ts

Lines changed: 12 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -78,23 +78,18 @@ export function genModel(req: OpenAI.Chat.ChatCompletionCreateParams): [GeminiMo
7878

7979
functions = functions.concat((req.functions ?? []).map((it) => ({ strict: null, ...it })))
8080

81-
let responseMimeType: string | undefined
82-
let responseSchema: JsonSchema | undefined
83-
84-
switch (req.response_format?.type) {
85-
case "json_object":
86-
responseMimeType = "application/json"
87-
break
88-
case "json_schema":
89-
responseMimeType = "application/json"
90-
responseSchema = req.response_format.json_schema.schema
91-
break
92-
case "text":
93-
responseMimeType = "text/plain"
94-
break
95-
default:
96-
break
97-
}
81+
const [responseMimeType, responseSchema] = (() => {
82+
switch (req.response_format?.type) {
83+
case "json_object":
84+
return ["application/json", undefined]
85+
case "json_schema":
86+
return ["application/json", req.response_format.json_schema.schema satisfies JsonSchema | undefined]
87+
case "text":
88+
return ["text/plain", undefined]
89+
default:
90+
return [undefined, undefined]
91+
}
92+
})()
9893

9994
const generateContentRequest: GenerateContentRequest = {
10095
contents: openAiMessageToGeminiMessage(req.messages),

0 commit comments

Comments
 (0)