@@ -7,6 +7,7 @@ import type { ApiHandlerOptions } from "../../shared/api"
77import { getOllamaModels } from "./fetchers/ollama"
88import { XmlMatcher } from "../../utils/xml-matcher"
99import type { SingleCompletionHandler , ApiHandlerCreateMessageMetadata } from "../index"
10+ import { t } from "../../i18n"
1011
1112const TOKEN_ESTIMATION_FACTOR = 4 // Industry standard technique for estimating token counts without actually implementing a parser/tokenizer
1213
@@ -171,7 +172,7 @@ export class NativeOllamaHandler extends BaseProvider implements SingleCompletio
171172
172173 this . client = new Ollama ( clientOptions )
173174 } catch ( error : any ) {
174- throw new Error ( `Error creating Ollama client: ${ error . message } ` )
175+ throw new Error ( t ( "common:errors.ollama.clientCreationError" , { error : error . message } ) )
175176 }
176177 }
177178 return this . client
@@ -199,7 +200,7 @@ export class NativeOllamaHandler extends BaseProvider implements SingleCompletio
199200 const estimatedTokenCount = estimateOllamaTokenCount ( ollamaMessages )
200201 if ( modelInfo . maxTokens && estimatedTokenCount > modelInfo . maxTokens ) {
201202 throw new Error (
202- `Input message is too long for the selected model. Estimated tokens: ${ estimatedTokenCount } , Max tokens: ${ modelInfo . maxTokens } . To increase the context window size, please set the OLLAMA_NUM_CTX environment variable or see Ollama documentation.` ,
203+ t ( "common:errors.ollama.inputTooLong" , { estimatedTokenCount, maxTokens : modelInfo . maxTokens } ) ,
203204 )
204205 }
205206
@@ -261,7 +262,11 @@ export class NativeOllamaHandler extends BaseProvider implements SingleCompletio
261262 }
262263 } catch ( streamError : any ) {
263264 console . error ( "Error processing Ollama stream:" , streamError )
264- throw new Error ( `Ollama stream processing error: ${ streamError . message || "Unknown error" } ` )
265+ throw new Error (
266+ t ( "common:errors.ollama.streamProcessingError" , {
267+ error : streamError . message || t ( "common:errors.ollama.unknownError" ) ,
268+ } ) ,
269+ )
265270 }
266271 } catch ( error : any ) {
267272 // Enhance error reporting
@@ -270,12 +275,12 @@ export class NativeOllamaHandler extends BaseProvider implements SingleCompletio
270275
271276 if ( error . code === "ECONNREFUSED" ) {
272277 throw new Error (
273- `Ollama service is not running at ${ this . options . ollamaBaseUrl || "http://localhost:11434" } . Please start Ollama first.` ,
278+ t ( "common:errors.ollama.serviceNotRunning" , {
279+ baseUrl : this . options . ollamaBaseUrl || "http://localhost:11434" ,
280+ } ) ,
274281 )
275282 } else if ( statusCode === 404 ) {
276- throw new Error (
277- `Model ${ this . getModel ( ) . id } not found in Ollama. Please pull the model first with: ollama pull ${ this . getModel ( ) . id } ` ,
278- )
283+ throw new Error ( t ( "common:errors.ollama.modelNotFound" , { modelId : this . getModel ( ) . id } ) )
279284 }
280285
281286 console . error ( `Ollama API error (${ statusCode || "unknown" } ): ${ errorMessage } ` )
@@ -296,8 +301,11 @@ export class NativeOllamaHandler extends BaseProvider implements SingleCompletio
296301 const availableModels = Object . keys ( this . models )
297302 const errorMessage =
298303 availableModels . length > 0
299- ? `Model ${ modelId } not found. Available models: ${ availableModels . join ( ", " ) } `
300- : `Model ${ modelId } not found. No models available. Please pull the model first with: ollama pull ${ modelId } `
304+ ? t ( "common:errors.ollama.modelNotFoundWithAvailable" , {
305+ modelId,
306+ availableModels : availableModels . join ( ", " ) ,
307+ } )
308+ : t ( "common:errors.ollama.modelNotFoundNoModels" , { modelId } )
301309 throw new Error ( errorMessage )
302310 }
303311
@@ -329,7 +337,7 @@ export class NativeOllamaHandler extends BaseProvider implements SingleCompletio
329337 return response . message ?. content || ""
330338 } catch ( error ) {
331339 if ( error instanceof Error ) {
332- throw new Error ( `Ollama completion error: ${ error . message } ` )
340+ throw new Error ( t ( "common:errors.ollama.completionError" , { error : error . message } ) )
333341 }
334342 throw error
335343 }
0 commit comments