@@ -88,7 +88,7 @@ class DevChat {
8888 const secretStorage : vscode . SecretStorage = ExtensionContextHolder . context ! . secrets ;
8989 let openaiApiKey = await secretStorage . get ( "devchat_OPENAI_API_KEY" ) ;
9090 if ( ! openaiApiKey ) {
91- openaiApiKey = vscode . workspace . getConfiguration ( 'DevChat' ) . get ( 'OpenAI.apiKey ' ) ;
91+ openaiApiKey = vscode . workspace . getConfiguration ( 'DevChat' ) . get ( 'API_KEY ' ) ;
9292 }
9393 if ( ! openaiApiKey ) {
9494 openaiApiKey = process . env . OPENAI_API_KEY ;
@@ -146,6 +146,22 @@ class DevChat {
146146 isError : false ,
147147 } ;
148148 }
149+
150+ apiEndpoint ( apiKey : string | undefined ) : any {
151+ let openAiApiBase : string | undefined = undefined ;
152+ if ( apiKey ?. startsWith ( "DC." ) ) {
153+ // TODO add devchat proxy
154+ openAiApiBase = "https://xw4ymuy6qj.ap-southeast-1.awsapprunner.com/api/v1" ;
155+ }
156+
157+ if ( vscode . workspace . getConfiguration ( 'DevChat' ) . get ( 'API_ENDPOINT' ) ) {
158+ openAiApiBase = vscode . workspace . getConfiguration ( 'DevChat' ) . get ( 'API_ENDPOINT' ) ;
159+ }
160+
161+ const openAiApiBaseObject = openAiApiBase ? { OPENAI_API_BASE : openAiApiBase } : { } ;
162+ return openAiApiBaseObject ;
163+ }
164+
149165 async chat ( content : string , options : ChatOptions = { } , onData : ( data : ChatResponse ) => void ) : Promise < ChatResponse > {
150166 const args = await this . buildArgs ( options ) ;
151167 args . push ( content ) ;
@@ -158,8 +174,8 @@ class DevChat {
158174 }
159175
160176
161- const openaiApiBase = vscode . workspace . getConfiguration ( 'DevChat' ) . get ( 'OpenAI.EndPoint' ) ;
162- const openaiApiBaseObject = openaiApiBase ? { OPENAI_API_BASE : openaiApiBase } : { } ;
177+ // 如果配置了devchat的TOKEN,那么就需要使用默认的代理
178+ let openAiApiBaseObject = this . apiEndpoint ( openaiApiKey ) ;
163179
164180 const openaiModel = vscode . workspace . getConfiguration ( 'DevChat' ) . get ( 'OpenAI.model' ) ;
165181 const openaiTemperature = vscode . workspace . getConfiguration ( 'DevChat' ) . get ( 'OpenAI.temperature' ) ;
@@ -196,16 +212,19 @@ class DevChat {
196212 onData ( data ) ;
197213 } ;
198214
199- logger . channel ( ) ?. info ( `Running devchat with args: ${ args . join ( " " ) } ` ) ;
200- const { exitCode : code , stdout, stderr } = await this . commandRun . spawnAsync ( devChat , args , {
215+ const spawnAsyncOptions = {
201216 maxBuffer : 10 * 1024 * 1024 , // Set maxBuffer to 10 MB
202217 cwd : workspaceDir ,
203218 env : {
204219 ...process . env ,
205220 OPENAI_API_KEY : openaiApiKey ,
206- ...openaiApiBaseObject
221+ ...openAiApiBaseObject
207222 } ,
208- } , onStdoutPartial , undefined , undefined , undefined ) ;
223+ } ;
224+
225+ logger . channel ( ) ?. info ( `Running devchat with args: ${ args . join ( " " ) } ` ) ;
226+ logger . channel ( ) ?. info ( `Running devchat with env: ${ JSON . stringify ( openAiApiBaseObject ) } ` ) ;
227+ const { exitCode : code , stdout, stderr } = await this . commandRun . spawnAsync ( devChat , args , spawnAsyncOptions , onStdoutPartial , undefined , undefined , undefined ) ;
209228
210229 if ( stderr ) {
211230 const errorMessage = stderr . trim ( ) . match ( / E r r o r : ( .+ ) / ) ?. [ 1 ] ;
0 commit comments