@@ -6,22 +6,39 @@ import { ApiHandlerOptions, ModelInfo, glamaDefaultModelId, glamaDefaultModelInf
66import { parseApiPrice } from "../../utils/cost"
77import { convertToOpenAiMessages } from "../transform/openai-format"
88import { ApiStream } from "../transform/stream"
9- import { ApiHandler , SingleCompletionHandler } from "../"
9+ import { SingleCompletionHandler } from "../"
10+ import { BaseProvider } from "./base-provider"
1011
1112const GLAMA_DEFAULT_TEMPERATURE = 0
1213
13- export class GlamaHandler implements ApiHandler , SingleCompletionHandler {
14- private options : ApiHandlerOptions
14+ export class GlamaHandler extends BaseProvider implements SingleCompletionHandler {
15+ protected options : ApiHandlerOptions
1516 private client : OpenAI
1617
1718 constructor ( options : ApiHandlerOptions ) {
19+ super ( )
1820 this . options = options
1921 const baseURL = "https://glama.ai/api/gateway/openai/v1"
2022 const apiKey = this . options . glamaApiKey ?? "not-provided"
2123 this . client = new OpenAI ( { baseURL, apiKey } )
2224 }
2325
24- async * createMessage ( systemPrompt : string , messages : Anthropic . Messages . MessageParam [ ] ) : ApiStream {
26+ private supportsTemperature ( ) : boolean {
27+ return ! this . getModel ( ) . id . startsWith ( "openai/o3-mini" )
28+ }
29+
30+ override getModel ( ) : { id : string ; info : ModelInfo } {
31+ const modelId = this . options . glamaModelId
32+ const modelInfo = this . options . glamaModelInfo
33+
34+ if ( modelId && modelInfo ) {
35+ return { id : modelId , info : modelInfo }
36+ }
37+
38+ return { id : glamaDefaultModelId , info : glamaDefaultModelInfo }
39+ }
40+
41+ override async * createMessage ( systemPrompt : string , messages : Anthropic . Messages . MessageParam [ ] ) : ApiStream {
2542 // Convert Anthropic messages to OpenAI format
2643 const openAiMessages : OpenAI . Chat . ChatCompletionMessageParam [ ] = [
2744 { role : "system" , content : systemPrompt } ,
@@ -152,21 +169,6 @@ export class GlamaHandler implements ApiHandler, SingleCompletionHandler {
152169 }
153170 }
154171
155- private supportsTemperature ( ) : boolean {
156- return ! this . getModel ( ) . id . startsWith ( "openai/o3-mini" )
157- }
158-
159- getModel ( ) : { id : string ; info : ModelInfo } {
160- const modelId = this . options . glamaModelId
161- const modelInfo = this . options . glamaModelInfo
162-
163- if ( modelId && modelInfo ) {
164- return { id : modelId , info : modelInfo }
165- }
166-
167- return { id : glamaDefaultModelId , info : glamaDefaultModelInfo }
168- }
169-
170172 async completePrompt ( prompt : string ) : Promise < string > {
171173 try {
172174 const requestOptions : OpenAI . Chat . Completions . ChatCompletionCreateParamsNonStreaming = {
0 commit comments