11import { Anthropic } from "@anthropic-ai/sdk"
22import OpenAI from "openai"
33
4- import { AuthState , rooDefaultModelId , type ModelInfo } from "@roo-code/types"
4+ import { rooDefaultModelId } from "@roo-code/types"
55import { CloudService } from "@roo-code/cloud"
66
77import type { ApiHandlerOptions , ModelRecord } from "../../shared/api"
@@ -12,9 +12,8 @@ import type { RooReasoningParams } from "../transform/reasoning"
1212import { getRooReasoning } from "../transform/reasoning"
1313
1414import type { ApiHandlerCreateMessageMetadata } from "../index"
15- import { DEFAULT_HEADERS } from "./constants"
1615import { BaseOpenAiCompatibleProvider } from "./base-openai-compatible-provider"
17- import { getModels , flushModels , getModelsFromCache } from "../providers/fetchers/modelCache"
16+ import { getModels , getModelsFromCache } from "../providers/fetchers/modelCache"
1817import { handleOpenAIError } from "./utils/openai-error-handler"
1918
2019// Extend OpenAI's CompletionUsage to include Roo specific fields
@@ -28,16 +27,16 @@ type RooChatCompletionParams = OpenAI.Chat.ChatCompletionCreateParamsStreaming &
2827 reasoning ?: RooReasoningParams
2928}
3029
30+ function getSessionToken ( ) : string {
31+ const token = CloudService . hasInstance ( ) ? CloudService . instance . authService ?. getSessionToken ( ) : undefined
32+ return token ?? "unauthenticated"
33+ }
34+
3135export class RooHandler extends BaseOpenAiCompatibleProvider < string > {
32- private authStateListener ?: ( state : { state : AuthState } ) => void
3336 private fetcherBaseURL : string
3437
3538 constructor ( options : ApiHandlerOptions ) {
36- let sessionToken : string | undefined = undefined
37-
38- if ( CloudService . hasInstance ( ) ) {
39- sessionToken = CloudService . instance . authService ?. getSessionToken ( )
40- }
39+ const sessionToken = getSessionToken ( )
4140
4241 let baseURL = process . env . ROO_CODE_PROVIDER_URL ?? "https://api.roocode.com/proxy"
4342
@@ -52,7 +51,7 @@ export class RooHandler extends BaseOpenAiCompatibleProvider<string> {
5251 ...options ,
5352 providerName : "Roo Code Cloud" ,
5453 baseURL, // Already has /v1 suffix
55- apiKey : sessionToken || "unauthenticated" , // Use a placeholder if no token.
54+ apiKey : sessionToken ,
5655 defaultProviderModelId : rooDefaultModelId ,
5756 providerModels : { } ,
5857 defaultTemperature : 0.7 ,
@@ -63,29 +62,6 @@ export class RooHandler extends BaseOpenAiCompatibleProvider<string> {
6362 this . loadDynamicModels ( this . fetcherBaseURL , sessionToken ) . catch ( ( error ) => {
6463 console . error ( "[RooHandler] Failed to load dynamic models:" , error )
6564 } )
66-
67- if ( CloudService . hasInstance ( ) ) {
68- const cloudService = CloudService . instance
69-
70- this . authStateListener = ( state : { state : AuthState } ) => {
71- // Update OpenAI client with current auth token
72- // Note: Model cache flush/reload is handled by extension.ts authStateChangedHandler
73- const newToken = cloudService . authService ?. getSessionToken ( )
74- this . client = new OpenAI ( {
75- baseURL : this . baseURL ,
76- apiKey : newToken ?? "unauthenticated" ,
77- defaultHeaders : DEFAULT_HEADERS ,
78- } )
79- }
80-
81- cloudService . on ( "auth-state-changed" , this . authStateListener )
82- }
83- }
84-
85- dispose ( ) {
86- if ( this . authStateListener && CloudService . hasInstance ( ) ) {
87- CloudService . instance . off ( "auth-state-changed" , this . authStateListener )
88- }
8965 }
9066
9167 protected override createStream (
@@ -127,6 +103,7 @@ export class RooHandler extends BaseOpenAiCompatibleProvider<string> {
127103 }
128104
129105 try {
106+ this . client . apiKey = getSessionToken ( )
130107 return this . client . chat . completions . create ( rooParams , requestOptions )
131108 } catch ( error ) {
132109 throw handleOpenAIError ( error , this . providerName )
@@ -195,6 +172,11 @@ export class RooHandler extends BaseOpenAiCompatibleProvider<string> {
195172 }
196173 }
197174 }
175+ override async completePrompt ( prompt : string ) : Promise < string > {
176+ // Update API key before making request to ensure we use the latest session token
177+ this . client . apiKey = getSessionToken ( )
178+ return super . completePrompt ( prompt )
179+ }
198180
199181 private async loadDynamicModels ( baseURL : string , apiKey ?: string ) : Promise < void > {
200182 try {
0 commit comments