@@ -138,13 +138,14 @@ export class OpenAiHandler extends BaseProvider implements SingleCompletionHandl
138138 }
139139
140140 const isGrokXAI = this . _isGrokXAI ( this . options . openAiBaseUrl )
141+ const isDatabricksAI = this . _isDatabricksAI ( this . options . openAiBaseUrl )
141142
142143 const requestOptions : OpenAI . Chat . Completions . ChatCompletionCreateParamsStreaming = {
143144 model : modelId ,
144145 temperature : this . options . modelTemperature ?? ( deepseekReasoner ? DEEP_SEEK_DEFAULT_TEMPERATURE : 0 ) ,
145146 messages : convertedMessages ,
146147 stream : true as const ,
147- ...( isGrokXAI ? { } : { stream_options : { include_usage : true } } ) ,
148+ ...( isGrokXAI || isDatabricksAI ? { } : { stream_options : { include_usage : true } } ) ,
148149 }
149150 if ( this . options . includeMaxTokens ) {
150151 requestOptions . max_tokens = modelInfo . maxTokens
@@ -268,6 +269,7 @@ export class OpenAiHandler extends BaseProvider implements SingleCompletionHandl
268269 const methodIsAzureAiInference = this . _isAzureAiInference ( this . options . openAiBaseUrl )
269270
270271 const isGrokXAI = this . _isGrokXAI ( this . options . openAiBaseUrl )
272+ const isDatabricksAI = this . _isDatabricksAI ( this . options . openAiBaseUrl )
271273
272274 const stream = await this . client . chat . completions . create (
273275 {
@@ -280,7 +282,7 @@ export class OpenAiHandler extends BaseProvider implements SingleCompletionHandl
280282 ...convertToOpenAiMessages ( messages ) ,
281283 ] ,
282284 stream : true ,
283- ...( isGrokXAI ? { } : { stream_options : { include_usage : true } } ) ,
285+ ...( isGrokXAI || isDatabricksAI ? { } : { stream_options : { include_usage : true } } ) ,
284286 reasoning_effort : this . getModel ( ) . info . reasoningEffort ,
285287 } ,
286288 methodIsAzureAiInference ? { path : AZURE_AI_INFERENCE_PATH } : { } ,
@@ -346,6 +348,11 @@ export class OpenAiHandler extends BaseProvider implements SingleCompletionHandl
346348 return urlHost . includes ( "x.ai" )
347349 }
348350
351+ private _isDatabricksAI ( baseUrl ?: string ) : boolean {
352+ const urlHost = this . _getUrlHost ( baseUrl )
353+ return urlHost . includes ( ".azuredatabricks.net" )
354+ }
355+
349356 private _isAzureAiInference ( baseUrl ?: string ) : boolean {
350357 const urlHost = this . _getUrlHost ( baseUrl )
351358 return urlHost . endsWith ( ".services.ai.azure.com" )
0 commit comments