@@ -10,6 +10,9 @@ import {
10
10
// @ts -ignore deno_core environment
11
11
const core = globalThis . Deno . core ;
12
12
13
+ // TODO: extract to utils file
14
+ export type Result < T , E > = [ T , undefined ] | [ undefined , E ] ;
15
+
13
16
// NOTE:(kallebysantos) do we still need gte-small? Or maybe add another type 'embeddings' with custom model opt.
14
17
export type SessionType = LLMProviderName | "gte-small" ;
15
18
@@ -47,6 +50,16 @@ export type SessionOutput<T extends SessionType, O> = T extends "gte-small"
47
50
: LLMProviderInstance < T > [ "output" ]
48
51
: never ;
49
52
53
+ export type SessionError < T = object | string > = {
54
+ message : string ;
55
+ inner : T ;
56
+ } ;
57
+
58
+ export type SessionOutputError < T extends SessionType > = T extends "gte-small"
59
+ ? SessionError < Error >
60
+ : T extends LLMProviderName ? SessionError < LLMProviderInstance < T > [ "error" ] >
61
+ : any ;
62
+
50
63
export class Session < T extends SessionType > {
51
64
#model?: string ;
52
65
#init?: Promise < void > ;
@@ -87,42 +100,58 @@ export class Session<T extends SessionType> {
87
100
async run < O extends SessionInputOptions < T > > (
88
101
input : SessionInput < T > ,
89
102
options : O ,
90
- ) : Promise < SessionOutput < T , O > > {
91
- if ( this . isLLMType ( ) ) {
92
- const opts = options as LLMInputOptions ;
93
- const stream = opts . stream ?? false ;
94
-
95
- const llmSession = LLMSession . fromProvider ( this . type , {
96
- // safety: We did check `options` during construction
97
- baseURL : this . options ! . baseURL ,
98
- model : this . options ! . model ,
99
- ...this . options , // allows custom provider initialization like 'apiKey'
100
- } ) ;
101
-
102
- return await llmSession . run ( input , {
103
- stream,
104
- signal : opts . signal ,
105
- timeout : opts . timeout ,
106
- } ) as SessionOutput < T , typeof options > ;
107
- }
103
+ ) : Promise <
104
+ [ SessionOutput < T , O > , undefined ] | [ undefined , SessionOutputError < T > ]
105
+ > {
106
+ try {
107
+ if ( this . isLLMType ( ) ) {
108
+ const opts = options as LLMInputOptions ;
109
+ const stream = opts . stream ?? false ;
110
+
111
+ const llmSession = LLMSession . fromProvider ( this . type , {
112
+ // safety: We did check `options` during construction
113
+ baseURL : this . options ! . baseURL ,
114
+ model : this . options ! . model ,
115
+ ...this . options , // allows custom provider initialization like 'apiKey'
116
+ } ) ;
117
+
118
+ const [ output , error ] = await llmSession . run ( input , {
119
+ stream,
120
+ signal : opts . signal ,
121
+ timeout : opts . timeout ,
122
+ } ) ;
123
+ if ( error ) {
124
+ return [ undefined , error as SessionOutputError < T > ] ;
125
+ }
126
+
127
+ return [ output as SessionOutput < T , typeof options > , undefined ] ;
128
+ }
108
129
109
- if ( this . #init) {
110
- await this . #init;
111
- }
130
+ if ( this . #init) {
131
+ await this . #init;
132
+ }
112
133
113
- const opts = options as EmbeddingInputOptions ;
134
+ const opts = options as EmbeddingInputOptions ;
114
135
115
- const mean_pool = opts . mean_pool ?? true ;
116
- const normalize = opts . normalize ?? true ;
136
+ const mean_pool = opts . mean_pool ?? true ;
137
+ const normalize = opts . normalize ?? true ;
117
138
118
- const result = await core . ops . op_ai_run_model (
119
- this . #model,
120
- prompt ,
121
- mean_pool ,
122
- normalize ,
123
- ) ;
139
+ const result = await core . ops . op_ai_run_model (
140
+ this . #model,
141
+ prompt ,
142
+ mean_pool ,
143
+ normalize ,
144
+ ) as SessionOutput < T , typeof options > ;
124
145
125
- return result ;
146
+ return [ result , undefined ] ;
147
+ } catch ( e : any ) {
148
+ const error = ( e instanceof Error ) ? e : new Error ( e ) ;
149
+
150
+ return [
151
+ undefined ,
152
+ { inner : error , message : error . message } as SessionOutputError < T > ,
153
+ ] ;
154
+ }
126
155
}
127
156
128
157
private isEmbeddingType (
0 commit comments