Skip to content

Commit e78542a

Browse files
committed
stamp: improved error handling
- Applying 'pattern matching' and 'Result pattern' to improve error handling. It enforces that users must first check for errors before consuming the message
1 parent 98ad9f1 commit e78542a

File tree

4 files changed

+281
-96
lines changed

4 files changed

+281
-96
lines changed

ext/ai/js/ai.ts

Lines changed: 60 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,9 @@ import {
1010
// @ts-ignore deno_core environment
1111
const core = globalThis.Deno.core;
1212

13+
// TODO: extract to utils file
14+
export type Result<T, E> = [T, undefined] | [undefined, E];
15+
1316
// NOTE:(kallebysantos) do we still need gte-small? Or maybe add another type 'embeddings' with custom model opt.
1417
export type SessionType = LLMProviderName | "gte-small";
1518

@@ -47,6 +50,16 @@ export type SessionOutput<T extends SessionType, O> = T extends "gte-small"
4750
: LLMProviderInstance<T>["output"]
4851
: never;
4952

53+
export type SessionError<T = object | string> = {
54+
message: string;
55+
inner: T;
56+
};
57+
58+
export type SessionOutputError<T extends SessionType> = T extends "gte-small"
59+
? SessionError<Error>
60+
: T extends LLMProviderName ? SessionError<LLMProviderInstance<T>["error"]>
61+
: any;
62+
5063
export class Session<T extends SessionType> {
5164
#model?: string;
5265
#init?: Promise<void>;
@@ -87,42 +100,58 @@ export class Session<T extends SessionType> {
87100
async run<O extends SessionInputOptions<T>>(
88101
input: SessionInput<T>,
89102
options: O,
90-
): Promise<SessionOutput<T, O>> {
91-
if (this.isLLMType()) {
92-
const opts = options as LLMInputOptions;
93-
const stream = opts.stream ?? false;
94-
95-
const llmSession = LLMSession.fromProvider(this.type, {
96-
// safety: We did check `options` during construction
97-
baseURL: this.options!.baseURL,
98-
model: this.options!.model,
99-
...this.options, // allows custom provider initialization like 'apiKey'
100-
});
101-
102-
return await llmSession.run(input, {
103-
stream,
104-
signal: opts.signal,
105-
timeout: opts.timeout,
106-
}) as SessionOutput<T, typeof options>;
107-
}
103+
): Promise<
104+
[SessionOutput<T, O>, undefined] | [undefined, SessionOutputError<T>]
105+
> {
106+
try {
107+
if (this.isLLMType()) {
108+
const opts = options as LLMInputOptions;
109+
const stream = opts.stream ?? false;
110+
111+
const llmSession = LLMSession.fromProvider(this.type, {
112+
// safety: We did check `options` during construction
113+
baseURL: this.options!.baseURL,
114+
model: this.options!.model,
115+
...this.options, // allows custom provider initialization like 'apiKey'
116+
});
117+
118+
const [output, error] = await llmSession.run(input, {
119+
stream,
120+
signal: opts.signal,
121+
timeout: opts.timeout,
122+
});
123+
if (error) {
124+
return [undefined, error as SessionOutputError<T>];
125+
}
126+
127+
return [output as SessionOutput<T, typeof options>, undefined];
128+
}
108129

109-
if (this.#init) {
110-
await this.#init;
111-
}
130+
if (this.#init) {
131+
await this.#init;
132+
}
112133

113-
const opts = options as EmbeddingInputOptions;
134+
const opts = options as EmbeddingInputOptions;
114135

115-
const mean_pool = opts.mean_pool ?? true;
116-
const normalize = opts.normalize ?? true;
136+
const mean_pool = opts.mean_pool ?? true;
137+
const normalize = opts.normalize ?? true;
117138

118-
const result = await core.ops.op_ai_run_model(
119-
this.#model,
120-
prompt,
121-
mean_pool,
122-
normalize,
123-
);
139+
const result = await core.ops.op_ai_run_model(
140+
this.#model,
141+
prompt,
142+
mean_pool,
143+
normalize,
144+
) as SessionOutput<T, typeof options>;
124145

125-
return result;
146+
return [result, undefined];
147+
} catch (e: any) {
148+
const error = (e instanceof Error) ? e : new Error(e);
149+
150+
return [
151+
undefined,
152+
{ inner: error, message: error.message } as SessionOutputError<T>,
153+
];
154+
}
126155
}
127156

128157
private isEmbeddingType(

ext/ai/js/llm/llm_session.ts

Lines changed: 27 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
import { Result, SessionError } from "../ai.ts";
12
import { OllamaLLMSession } from "./providers/ollama.ts";
23
import { OpenAILLMSession } from "./providers/openai.ts";
34

@@ -20,6 +21,7 @@ export type LLMRunInput = {
2021
export interface ILLMProviderMeta {
2122
input: ILLMProviderInput;
2223
output: unknown;
24+
error: unknown;
2325
options: ILLMProviderOptions;
2426
}
2527

@@ -41,15 +43,23 @@ export interface ILLMProviderOutput<T = object> {
4143
inner: T;
4244
}
4345

46+
export interface ILLMProviderError<T = object> extends SessionError<T> {
47+
}
48+
4449
export interface ILLMProvider {
4550
getStream(
4651
input: ILLMProviderInput,
4752
signal: AbortSignal,
48-
): Promise<AsyncIterable<ILLMProviderOutput>>;
53+
): Promise<
54+
Result<
55+
AsyncIterable<Result<ILLMProviderOutput, ILLMProviderError>>,
56+
ILLMProviderError
57+
>
58+
>;
4959
getText(
5060
input: ILLMProviderInput,
5161
signal: AbortSignal,
52-
): Promise<ILLMProviderOutput>;
62+
): Promise<Result<ILLMProviderOutput, ILLMProviderError>>;
5363
}
5464

5565
export const providers = {
@@ -81,6 +91,10 @@ export type LLMSessionRunInputOptions = {
8191
signal?: AbortSignal;
8292
};
8393

94+
export type LLMSessionOutput =
95+
| AsyncIterable<Result<ILLMProviderOutput, ILLMProviderError>>
96+
| ILLMProviderOutput;
97+
8498
export class LLMSession {
8599
#inner: ILLMProvider;
86100

@@ -97,10 +111,10 @@ export class LLMSession {
97111
return new LLMSession(provider);
98112
}
99113

100-
run(
114+
async run(
101115
input: ILLMProviderInput,
102116
opts: LLMSessionRunInputOptions,
103-
): Promise<AsyncIterable<ILLMProviderOutput>> | Promise<ILLMProviderOutput> {
117+
): Promise<Result<LLMSessionOutput, ILLMProviderError>> {
104118
const isStream = opts.stream ?? false;
105119

106120
const timeoutSeconds = typeof opts.timeout === "number" ? opts.timeout : 60;
@@ -112,7 +126,15 @@ export class LLMSession {
112126
const signal = AbortSignal.any(abortSignals);
113127

114128
if (isStream) {
115-
return this.#inner.getStream(input, signal);
129+
const [stream, getStreamError] = await this.#inner.getStream(
130+
input,
131+
signal,
132+
);
133+
if (getStreamError) {
134+
return [undefined, getStreamError];
135+
}
136+
137+
return [stream, undefined];
116138
}
117139

118140
return this.#inner.getText(input, signal);

0 commit comments

Comments
 (0)