Skip to content

Commit dcd1a6a

Browse files
committed
handle falsy check in vertex parameter mapping
1 parent da1a1d0 commit dcd1a6a

File tree

2 files changed

+20
-14
lines changed

2 files changed

+20
-14
lines changed

src/providers/google-vertex-ai/transformGenerationConfig.ts

Lines changed: 10 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -10,19 +10,22 @@ import { EmbedInstancesData } from './types';
1010
*/
1111
export function transformGenerationConfig(params: Params) {
1212
const generationConfig: Record<string, any> = {};
13-
if (params['temperature']) {
13+
if (params['temperature'] != null && params['temperature'] != undefined) {
1414
generationConfig['temperature'] = params['temperature'];
1515
}
16-
if (params['top_p']) {
16+
if (params['top_p'] != null && params['top_p'] != undefined) {
1717
generationConfig['topP'] = params['top_p'];
1818
}
19-
if (params['top_k']) {
19+
if (params['top_k'] != null && params['top_k'] != undefined) {
2020
generationConfig['topK'] = params['top_k'];
2121
}
22-
if (params['max_tokens']) {
22+
if (params['max_tokens'] != null && params['max_tokens'] != undefined) {
2323
generationConfig['maxOutputTokens'] = params['max_tokens'];
2424
}
25-
if (params['max_completion_tokens']) {
25+
if (
26+
params['max_completion_tokens'] != null &&
27+
params['max_completion_tokens'] != undefined
28+
) {
2629
generationConfig['maxOutputTokens'] = params['max_completion_tokens'];
2730
}
2831
if (params['stop']) {
@@ -34,10 +37,10 @@ export function transformGenerationConfig(params: Params) {
3437
if (params['logprobs']) {
3538
generationConfig['responseLogprobs'] = params['logprobs'];
3639
}
37-
if (params['top_logprobs']) {
40+
if (params['top_logprobs'] != null && params['top_logprobs'] != undefined) {
3841
generationConfig['logprobs'] = params['top_logprobs']; // range 1-5, openai supports 1-20
3942
}
40-
if (params['seed']) {
43+
if (params['seed'] != null && params['seed'] != undefined) {
4144
generationConfig['seed'] = params['seed'];
4245
}
4346
if (params?.response_format?.type === 'json_schema') {

src/providers/google/chatComplete.ts

Lines changed: 10 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -35,19 +35,22 @@ import { GOOGLE_GENERATE_CONTENT_FINISH_REASON } from './types';
3535

3636
const transformGenerationConfig = (params: Params) => {
3737
const generationConfig: Record<string, any> = {};
38-
if (params['temperature']) {
38+
if (params['temperature'] != null && params['temperature'] != undefined) {
3939
generationConfig['temperature'] = params['temperature'];
4040
}
41-
if (params['top_p']) {
41+
if (params['top_p'] != null && params['top_p'] != undefined) {
4242
generationConfig['topP'] = params['top_p'];
4343
}
44-
if (params['top_k']) {
44+
if (params['top_k'] != null && params['top_k'] != undefined) {
4545
generationConfig['topK'] = params['top_k'];
4646
}
47-
if (params['max_tokens']) {
47+
if (params['max_tokens'] != null && params['max_tokens'] != undefined) {
4848
generationConfig['maxOutputTokens'] = params['max_tokens'];
4949
}
50-
if (params['max_completion_tokens']) {
50+
if (
51+
params['max_completion_tokens'] != null &&
52+
params['max_completion_tokens'] != undefined
53+
) {
5154
generationConfig['maxOutputTokens'] = params['max_completion_tokens'];
5255
}
5356
if (params['stop']) {
@@ -59,10 +62,10 @@ const transformGenerationConfig = (params: Params) => {
5962
if (params['logprobs']) {
6063
generationConfig['responseLogprobs'] = params['logprobs'];
6164
}
62-
if (params['top_logprobs']) {
65+
if (params['top_logprobs'] != null && params['top_logprobs'] != undefined) {
6366
generationConfig['logprobs'] = params['top_logprobs']; // range 1-5, openai supports 1-20
6467
}
65-
if (params['seed']) {
68+
if (params['seed'] != null && params['seed'] != undefined) {
6669
generationConfig['seed'] = params['seed'];
6770
}
6871
if (params?.response_format?.type === 'json_schema') {

0 commit comments

Comments
 (0)