Skip to content
This repository was archived by the owner on Oct 8, 2024. It is now read-only.

Commit 877d822

Browse files
Update type exports and publish as v0.1.6 (#5)
1 parent fcd4e39 commit 877d822

File tree

5 files changed

+17
-20
lines changed

5 files changed

+17
-20
lines changed

src/models/meta/llama.ts

Lines changed: 8 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,21 +1,24 @@
11
import { Model } from "../..";
22

3-
export default class LlamaModel extends Model<LlamaInput, LlamaOutput> {
3+
export class TextGenerationModel extends Model<
4+
TextGenerationInput,
5+
TextGenerationOutput
6+
> {
47
/**
58
* Creates a new input object for the model.
69
* @param prompt The prompt text to pass to the model.
710
* @returns A new input object.
811
* @remarks Optional properties may be set on the returned input object to
912
* control the behavior of the model.
1013
*/
11-
createInput(prompt: string): LlamaInput {
12-
return <LlamaInput>{ prompt };
14+
createInput(prompt: string): TextGenerationInput {
15+
return <TextGenerationInput>{ prompt };
1316
}
1417
}
1518

1619

1720
@json
18-
class LlamaInput {
21+
class TextGenerationInput {
1922
/**
2023
* The prompt text to pass to the model.
2124
* May contain special tokens to control the behavior of the model.
@@ -55,7 +58,7 @@ class LlamaInput {
5558

5659

5760
@json
58-
class LlamaOutput {
61+
class TextGenerationOutput {
5962
/**
6063
* The generated text.
6164
*/

src/models/openai/chat.ts

Lines changed: 5 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -2,19 +2,16 @@ import { Model } from "../..";
22

33
// Reference: https://platform.openai.com/docs/api-reference/chat
44

5-
export default class ChatCompletionModel extends Model<
6-
ChatCompletionInput,
7-
ChatCompletionOutput
8-
> {
9-
createInput(messages: Message[]): ChatCompletionInput {
5+
export class ChatModel extends Model<ChatInput, ChatOutput> {
6+
createInput(messages: Message[]): ChatInput {
107
const model = this.info.fullName;
11-
return <ChatCompletionInput>{ model, messages };
8+
return <ChatInput>{ model, messages };
129
}
1310
}
1411

1512

1613
@json
17-
class ChatCompletionInput {
14+
class ChatInput {
1815
model!: string;
1916
messages!: Message[];
2017

@@ -99,7 +96,7 @@ class ChatCompletionInput {
9996

10097

10198
@json
102-
class ChatCompletionOutput {
99+
class ChatOutput {
103100
id!: string;
104101
object!: string;
105102
choices!: Choice[];

src/models/openai/embeddings.ts

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -2,10 +2,7 @@ import { Model } from "../..";
22

33
// Reference: https://platform.openai.com/docs/api-reference/embeddings
44

5-
export default class EmbeddingsModel extends Model<
6-
EmbeddingsInput,
7-
EmbeddingsOutput
8-
> {
5+
export class EmbeddingsModel extends Model<EmbeddingsInput, EmbeddingsOutput> {
96
createInput(text: string): EmbeddingsInput {
107
const model = this.info.fullName;
118
return <EmbeddingsInput>{ model, input: text };

src/package-lock.json

Lines changed: 2 additions & 2 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

src/package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "@hypermode/models-as",
3-
"version": "0.1.5",
3+
"version": "0.1.6",
44
"description": "Hypermode Model Interface Library for AssemblyScript",
55
"author": "Hypermode, Inc.",
66
"license": "MIT",

0 commit comments

Comments
 (0)