11import { HttpsProxyAgent } from 'https-proxy-agent' ;
2- import { KnownEndpoints } from 'librechat-data-provider' ;
3- import type { AzureOpenAIInput } from '@langchain/azure-openai' ;
2+ import { KnownEndpoints , ReasoningEffort , ReasoningSummary , Verbosity } from 'librechat-data-provider' ;
3+ import type { AzureOpenAIInput } from '@langchain/openai' ;
4+ import type { BindToolsInput } from '@langchain/core/language_models/chat_models' ;
5+ import type { OpenAI } from 'openai' ;
46import type * as t from '~/types' ;
57import { sanitizeModelName , constructAzureURL } from '~/utils/azure' ;
68import { isEnabled } from '~/utils/common' ;
@@ -82,7 +84,7 @@ function hasReasoningParams({
8284 */
8385export function getOpenAIConfig (
8486 apiKey : string ,
85- options : t . LLMConfigOptions = { } ,
87+ options : t . OpenAIConfigOptions = { } ,
8688 endpoint ?: string | null ,
8789) : t . LLMConfigResult {
8890 const {
@@ -174,18 +176,18 @@ export function getOpenAIConfig(
174176
175177 // Handle reasoning parameters for Responses API
176178 if ( hasReasoningParams ( { reasoning_effort, reasoning_summary } ) ) {
177- if ( reasoning_effort && reasoning_effort !== '' ) {
179+ if ( reasoning_effort != null && reasoning_effort !== ReasoningEffort . none ) {
178180 modelKwargs . reasoning_effort = reasoning_effort ;
179181 hasModelKwargs = true ;
180182 }
181- if ( reasoning_summary && reasoning_summary !== '' ) {
183+ if ( reasoning_summary != null && reasoning_summary !== ReasoningSummary . none ) {
182184 modelKwargs . reasoning_summary = reasoning_summary ;
183185 hasModelKwargs = true ;
184186 }
185187 }
186188
187189 // Add verbosity parameter
188- if ( verbosity && verbosity !== '' ) {
190+ if ( verbosity != null && verbosity !== Verbosity . none ) {
189191 modelKwargs . verbosity = verbosity ;
190192 hasModelKwargs = true ;
191193 }
@@ -256,7 +258,7 @@ export function getOpenAIConfig(
256258
257259 if ( useOpenRouter && llmConfig . reasoning_effort != null ) {
258260 llmConfig . reasoning = {
259- effort : llmConfig . reasoning_effort ,
261+ effort : llmConfig . reasoning_effort as any ,
260262 } ;
261263 delete llmConfig . reasoning_effort ;
262264 }
@@ -270,10 +272,22 @@ export function getOpenAIConfig(
270272 llmConfig . modelKwargs = modelKwargs ;
271273 }
272274
275+ const tools : BindToolsInput [ ] = [ ] ;
276+
277+ if ( modelOptions . web_search ) {
278+ llmConfig . useResponsesApi = true ;
279+ tools . push ( { type : 'web_search_preview' } ) ;
280+ }
281+
273282 const result : t . LLMConfigResult = {
274283 llmConfig,
275284 configOptions,
285+ tools,
276286 } ;
277287
288+ if ( useOpenRouter ) {
289+ result . provider = 'openrouter' ;
290+ }
291+
278292 return result ;
279293}
0 commit comments