diff --git a/libs/providers/langchain-openai/src/azure/chat_models/common.ts b/libs/providers/langchain-openai/src/azure/chat_models/common.ts index 4cf133c343dd..ee6dcde7502d 100644 --- a/libs/providers/langchain-openai/src/azure/chat_models/common.ts +++ b/libs/providers/langchain-openai/src/azure/chat_models/common.ts @@ -1,4 +1,4 @@ -import { AzureOpenAI as AzureOpenAIClient } from "openai"; +import { AzureOpenAI as AzureOpenAIClient, type ClientOptions } from "openai"; import { getEnv, getEnvironmentVariable } from "@langchain/core/utils/env"; import type { Serialized } from "@langchain/core/load/serializable"; import { ChatOpenAICallOptions } from "../../chat_models/index.js"; @@ -56,8 +56,10 @@ export function _constructAzureFields( ) { this.azureOpenAIApiKey = fields?.azureOpenAIApiKey ?? - fields?.openAIApiKey ?? - fields?.apiKey ?? + (typeof fields?.openAIApiKey === "string" + ? fields?.openAIApiKey + : undefined) ?? + (typeof fields?.apiKey === "string" ? fields?.apiKey : undefined) ?? getEnvironmentVariable("AZURE_OPENAI_API_KEY"); this.azureOpenAIApiInstanceName = @@ -106,8 +108,9 @@ export function _getAzureClientOptions( const endpoint = getEndpoint(openAIEndpointConfig); - const params = { - ...this.clientConfig, + const { apiKey: existingApiKey, ...clientConfigRest } = this.clientConfig; + const params: Omit & { apiKey?: string } = { + ...clientConfigRest, baseURL: endpoint, timeout: this.timeout, maxRetries: 0, diff --git a/libs/providers/langchain-openai/src/azure/embeddings.ts b/libs/providers/langchain-openai/src/azure/embeddings.ts index 70a6ffbc104c..1b807f01883e 100644 --- a/libs/providers/langchain-openai/src/azure/embeddings.ts +++ b/libs/providers/langchain-openai/src/azure/embeddings.ts @@ -41,7 +41,7 @@ export class AzureOpenAIEmbeddings extends OpenAIEmbeddings { this.batchSize = fields?.batchSize ?? 1; this.azureOpenAIApiKey = fields?.azureOpenAIApiKey ?? - fields?.apiKey ?? + (typeof fields?.apiKey === "string" ? fields?.apiKey : undefined) ?? getEnvironmentVariable("AZURE_OPENAI_API_KEY"); this.azureOpenAIApiVersion = @@ -81,8 +81,9 @@ export class AzureOpenAIEmbeddings extends OpenAIEmbeddings { const endpoint = getEndpoint(openAIEndpointConfig); - const params = { - ...this.clientConfig, + const { apiKey: existingApiKey, ...clientConfigRest } = this.clientConfig; + const params: Omit & { apiKey?: string } = { + ...clientConfigRest, baseURL: endpoint, timeout: this.timeout, maxRetries: 0, diff --git a/libs/providers/langchain-openai/src/azure/llms.ts b/libs/providers/langchain-openai/src/azure/llms.ts index c22f1fbc9ce2..a7167d23569e 100644 --- a/libs/providers/langchain-openai/src/azure/llms.ts +++ b/libs/providers/langchain-openai/src/azure/llms.ts @@ -70,8 +70,10 @@ export class AzureOpenAI extends OpenAI { this.azureOpenAIApiKey = fields?.azureOpenAIApiKey ?? - fields?.openAIApiKey ?? - fields?.apiKey ?? + (typeof fields?.openAIApiKey === "string" + ? fields?.openAIApiKey + : undefined) ?? + (typeof fields?.apiKey === "string" ? fields?.apiKey : undefined) ?? getEnvironmentVariable("AZURE_OPENAI_API_KEY"); this.azureOpenAIApiInstanceName = @@ -113,8 +115,9 @@ export class AzureOpenAI extends OpenAI { const endpoint = getEndpoint(openAIEndpointConfig); - const params = { - ...this.clientConfig, + const { apiKey: existingApiKey, ...clientConfigRest } = this.clientConfig; + const params: Omit & { apiKey?: string } = { + ...clientConfigRest, baseURL: endpoint, timeout: this.timeout, maxRetries: 0, diff --git a/libs/providers/langchain-openai/src/chat_models/base.ts b/libs/providers/langchain-openai/src/chat_models/base.ts index 594a0b53f735..c2a1e5ef455b 100644 --- a/libs/providers/langchain-openai/src/chat_models/base.ts +++ b/libs/providers/langchain-openai/src/chat_models/base.ts @@ -39,6 +39,7 @@ import { type ChatOpenAIResponseFormat, ResponseFormatConfiguration, OpenAIVerbosityParam, + type OpenAIApiKey, } from "../types.js"; import { type OpenAIEndpointConfig, getEndpoint } from "../utils/azure.js"; import { @@ -245,7 +246,7 @@ export abstract class BaseChatOpenAI< topLogprobs?: number; - apiKey?: string; + apiKey?: OpenAIApiKey; organization?: string; @@ -420,7 +421,8 @@ export abstract class BaseChatOpenAI< super(fields ?? {}); const configApiKey = - typeof fields?.configuration?.apiKey === "string" + typeof fields?.configuration?.apiKey === "string" || + typeof fields?.configuration?.apiKey === "function" ? fields?.configuration?.apiKey : undefined; this.apiKey = @@ -615,7 +617,7 @@ export abstract class BaseChatOpenAI< : this._convertChatOpenAIToolToCompletionsTool(tool, { strict }) ), ...kwargs, - }); + } as Partial); } override async stream(input: BaseLanguageModelInput, options?: CallOptions) { @@ -894,7 +896,7 @@ export abstract class BaseChatOpenAI< kwargs: { method: "json_mode" }, schema: { title: name ?? "extract", ...asJsonSchema }, }, - }); + } as Partial); } else if (method === "jsonSchema") { const openaiJsonSchemaParams = { name: name ?? "extract", @@ -917,7 +919,7 @@ export abstract class BaseChatOpenAI< ...asJsonSchema, }, }, - }); + } as Partial); if (isInteropZodSchema(schema)) { const altParser = StructuredOutputParser.fromZodSchema(schema); outputParser = RunnableLambda.from( @@ -960,7 +962,7 @@ export abstract class BaseChatOpenAI< }, // Do not pass `strict` argument to OpenAI if `config.strict` is undefined ...(config?.strict !== undefined ? { strict: config.strict } : {}), - }); + } as Partial); outputParser = new JsonOutputKeyToolsParser({ returnSingle: true, keyName: functionName, @@ -1004,7 +1006,7 @@ export abstract class BaseChatOpenAI< }, // Do not pass `strict` argument to OpenAI if `config.strict` is undefined ...(config?.strict !== undefined ? { strict: config.strict } : {}), - }); + } as Partial); outputParser = new JsonOutputKeyToolsParser({ returnSingle: true, keyName: functionName, diff --git a/libs/providers/langchain-openai/src/embeddings.ts b/libs/providers/langchain-openai/src/embeddings.ts index 6394f4076aaf..550b8eef8f37 100644 --- a/libs/providers/langchain-openai/src/embeddings.ts +++ b/libs/providers/langchain-openai/src/embeddings.ts @@ -2,6 +2,7 @@ import { type ClientOptions, OpenAI as OpenAIClient } from "openai"; import { getEnvironmentVariable } from "@langchain/core/utils/env"; import { Embeddings, type EmbeddingsParams } from "@langchain/core/embeddings"; import { chunkArray } from "@langchain/core/utils/chunk_array"; +import type { OpenAIApiKey } from "./types.js"; import { getEndpoint, OpenAIEndpointConfig } from "./utils/azure.js"; import { wrapOpenAIClientError } from "./utils/client.js"; @@ -102,6 +103,8 @@ export class OpenAIEmbeddings protected clientConfig: ClientOptions; + protected apiKey?: OpenAIApiKey; + constructor( fields?: Partial & { verbose?: boolean; @@ -109,9 +112,9 @@ export class OpenAIEmbeddings * The OpenAI API key to use. * Alias for `apiKey`. */ - openAIApiKey?: string; + openAIApiKey?: OpenAIApiKey; /** The OpenAI API key to use. */ - apiKey?: string; + apiKey?: OpenAIApiKey; configuration?: ClientOptions; } ) { diff --git a/libs/providers/langchain-openai/src/llms.ts b/libs/providers/langchain-openai/src/llms.ts index b918a2908b38..34f31bd34c48 100644 --- a/libs/providers/langchain-openai/src/llms.ts +++ b/libs/providers/langchain-openai/src/llms.ts @@ -10,6 +10,7 @@ import { } from "@langchain/core/language_models/llms"; import { chunkArray } from "@langchain/core/utils/chunk_array"; import type { + OpenAIApiKey, OpenAICallOptions, OpenAICoreRequestOptions, OpenAIInput, @@ -121,9 +122,9 @@ export class OpenAI streaming = false; - openAIApiKey?: string; + openAIApiKey?: OpenAIApiKey; - apiKey?: string; + apiKey?: OpenAIApiKey; organization?: string; diff --git a/libs/providers/langchain-openai/src/types.ts b/libs/providers/langchain-openai/src/types.ts index 47b9ad576ce6..22e041cb30a4 100644 --- a/libs/providers/langchain-openai/src/types.ts +++ b/libs/providers/langchain-openai/src/types.ts @@ -1,4 +1,4 @@ -import type { OpenAI as OpenAIClient } from "openai"; +import type { OpenAI as OpenAIClient, ClientOptions } from "openai"; import type { ResponseFormatText, ResponseFormatJSONObject, @@ -24,6 +24,8 @@ export type OpenAIChatModelId = export type OpenAIVerbosityParam = "low" | "medium" | "high" | null; +export type OpenAIApiKey = ClientOptions["apiKey"]; + export declare interface OpenAIBaseInput { /** Sampling temperature to use */ temperature: number; @@ -103,12 +105,12 @@ export declare interface OpenAIBaseInput { * `OPENAI_API_KEY` environment variable. * Alias for `apiKey` */ - openAIApiKey?: string; + openAIApiKey?: OpenAIApiKey; /** * API key to use when making requests to OpenAI. Defaults to the value of * `OPENAI_API_KEY` environment variable. */ - apiKey?: string; + apiKey?: OpenAIApiKey; /** * The verbosity of the model's response. @@ -285,7 +287,6 @@ export interface AzureOpenAIInput { export interface AzureOpenAIChatInput extends OpenAIChatInput, AzureOpenAIInput { - openAIApiKey?: string; openAIApiVersion?: string; openAIBasePath?: string; deploymentName?: string;