Skip to content
This repository was archived by the owner on Oct 8, 2024. It is now read-only.

Commit 5c66421

Browse files
Update models (#2)
1 parent e7584f7 commit 5c66421

File tree

6 files changed

+121
-69
lines changed

6 files changed

+121
-69
lines changed

src/index.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@ export interface ModelFactory {
1616
export abstract class Model<TInput extends object, TOutput extends object> {
1717
protected constructor(
1818
public readonly info: ModelInfo,
19-
private invoker: ModelInvoker,
19+
protected invoker: ModelInvoker,
2020
) {}
2121

2222
debug: boolean = false;
@@ -42,6 +42,6 @@ export abstract class Model<TInput extends object, TOutput extends object> {
4242
console.debug(`Received output: ${outputJson}`);
4343
}
4444

45-
return JSON.parse<TOutput>(outputJson, true);
45+
return JSON.parse<TOutput>(outputJson);
4646
}
4747
}

src/models/meta/llama.ts

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -30,6 +30,7 @@ class LlamaInput {
3030
*
3131
* Default: 0.6
3232
*/
33+
@omitif("this.temperature == 0.6")
3334
temperature: f64 = 0.6;
3435

3536
/**
@@ -38,6 +39,7 @@ class LlamaInput {
3839
*
3940
* Default: 0.9
4041
*/
42+
@omitif("this.topP == 0.9")
4143
@alias("top_p")
4244
topP: f64 = 0.9;
4345

@@ -46,6 +48,7 @@ class LlamaInput {
4648
*
4749
* Default: 512
4850
*/
51+
@omitif("this.maxGenLen == 512")
4952
@alias("max_gen_len")
5053
maxGenLen: i32 = 512;
5154
}

src/models/openai/chat.ts

Lines changed: 49 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -20,53 +20,80 @@ class ChatCompletionInput {
2020

2121

2222
@alias("frequency_penalty")
23+
@omitif("this.frequencyPenalty == 0.0")
2324
frequencyPenalty: f64 = 0.0;
2425

2526

2627
@alias("logit_bias")
28+
@omitnull()
2729
logitBias: Map<string, f64> | null = null;
2830

31+
32+
@omitif("this.logprobs == false")
2933
logprobs: bool = false;
3034

31-
// @alias("top_logprobs")
32-
// topLogprobs: i32 = 0; // TODO: only send when logprobs is true
35+
36+
@alias("top_logprobs")
37+
@omitif("this.logprobs == false")
38+
topLogprobs: i32 = 0;
39+
3340

3441
@alias("max_tokens")
35-
maxTokens: i32 = 4096;
42+
@omitif("this.maxTokens == 4096")
43+
maxTokens: i32 = 4096; // TODO: make this an `i32 | null` when supported
44+
3645

46+
@omitif("this.n == 1")
3747
n: i32 = 1;
3848

3949

4050
@alias("presence_penalty")
51+
@omitif("this.presencePenalty == 0.0")
4152
presencePenalty: f64 = 0.0;
4253

4354

4455
@alias("response_format")
56+
@omitif("this.responseFormat.type == 'text'")
4557
responseFormat: ResponseFormat = ResponseFormat.Text;
4658

47-
// seed: i32 | null = null; // TODO: we need a true Nullable<i32> type for this to work
59+
60+
@omitif("this.seed == -1")
61+
seed: i32 = -1; // TODO: make this an `i32 | null` when supported
62+
63+
64+
@omitnull()
4865
stop: string[] | null = null;
4966

5067
// stream: bool = false;
68+
69+
// @omitif("this.stream == false")
5170
// @alias("stream_options")
5271
// streamOptions: StreamOptions | null = null;
5372

73+
@omitif("this.temperature == 1.0")
5474
temperature: f64 = 1.0;
5575

5676

5777
@alias("top_p")
78+
@omitif("this.topP == 1.0")
5879
topP: f64 = 1.0;
5980

81+
82+
@omitnull()
6083
tools: Tool[] | null = null;
6184

6285

6386
@alias("tool_choice")
87+
@omitnull()
6488
toolChoice: string | null = null; // TODO: verify this works
6589

66-
// @alias("parallel_tool_calls")
67-
// parallelToolCalls: bool = true; // TODO: omit this when no tools
6890

69-
@alias("user")
91+
@alias("parallel_tool_calls")
92+
@omitif("this.parallelToolCalls == true || !this.tools || this.tools!.length == 0")
93+
parallelToolCalls: bool = true;
94+
95+
96+
@omitnull()
7097
user: string | null = null;
7198
}
7299

@@ -98,6 +125,7 @@ export class ResponseFormat {
98125
// @json
99126
// export class StreamOptions {
100127

128+
// @omitif("this.includeUsage == false")
101129
// @alias("include_usage")
102130
// includeUsage: bool = false;
103131
// }
@@ -111,8 +139,14 @@ export class Tool {
111139

112140
@json
113141
export class FunctionDefinition {
114-
description: string | null = null;
115142
name!: string;
143+
144+
145+
@omitnull()
146+
description: string | null = null;
147+
148+
149+
@omitnull()
116150
parameters: string | null = null; // TODO: verify this works
117151
}
118152

@@ -212,6 +246,8 @@ export class SystemMessage extends Message {
212246
super("system", content);
213247
}
214248

249+
250+
@omitnull()
215251
name: string | null = null;
216252
}
217253

@@ -222,6 +258,8 @@ export class UserMessage extends Message {
222258
super("user", content);
223259
}
224260

261+
262+
@omitnull()
225263
name: string | null = null;
226264
}
227265

@@ -232,10 +270,13 @@ export class AssistantMessage extends Message {
232270
super("assistant", content);
233271
}
234272

273+
274+
@omitnull()
235275
name: string | null = null;
236276

237277

238278
@alias("tool_calls")
279+
@omitif("this.toolCalls.length == 0")
239280
toolCalls: ToolCall[] = [];
240281
}
241282

src/models/openai/embeddings.ts

Lines changed: 11 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
// import { Box } from "as-container/assembly";
21
import { Model } from "../..";
32

43
// Reference: https://platform.openai.com/docs/api-reference/embeddings
@@ -18,9 +17,18 @@ export default class EmbeddingsModel extends Model<
1817
class EmbeddingsInput {
1918
input!: string; // todo: support other types of input (arrays, etc.)
2019
model!: string;
20+
21+
22+
@omitif("this.encodingFormat.type == 'float'")
2123
encodingFormat: EncodingFormat = EncodingFormat.Float;
22-
// dimensions: Box<i32> | null = null;
23-
user: string = "";
24+
25+
26+
@omitif("this.dimensions == -1")
27+
dimensions: i32 = -1; // TODO: make this an `i32 | null` when supported
28+
29+
30+
@omitnull()
31+
user: string | null = null;
2432
}
2533

2634

0 commit comments

Comments
 (0)