From 83842723ba458b3aacbc21b280b8684aa8667698 Mon Sep 17 00:00:00 2001 From: ilkay Date: Thu, 20 Nov 2025 20:01:06 +0100 Subject: [PATCH 1/2] fix: forward fetch and headers options to AI SDK providers --- .changeset/forward-fetch-headers-options.md | 5 ++ packages/core/examples/test-custom-fetch.ts | 82 +++++++++++++++++++++ packages/core/lib/v3/llm/LLMProvider.ts | 26 ++++++- 3 files changed, 111 insertions(+), 2 deletions(-) create mode 100644 .changeset/forward-fetch-headers-options.md create mode 100644 packages/core/examples/test-custom-fetch.ts diff --git a/.changeset/forward-fetch-headers-options.md b/.changeset/forward-fetch-headers-options.md new file mode 100644 index 000000000..b907a704f --- /dev/null +++ b/.changeset/forward-fetch-headers-options.md @@ -0,0 +1,5 @@ +--- +"@browserbasehq/stagehand": patch +--- + +fix: forward fetch and headers options to AI SDK providers to enable proxy authentication, request logging, and custom retry logic diff --git a/packages/core/examples/test-custom-fetch.ts b/packages/core/examples/test-custom-fetch.ts new file mode 100644 index 000000000..9ea6c9c50 --- /dev/null +++ b/packages/core/examples/test-custom-fetch.ts @@ -0,0 +1,82 @@ +import { Stagehand } from "../lib/v3"; + +/** + * Test script to verify custom fetch and headers are forwarded to AI SDK providers + * + * This demonstrates the fix for the bug where custom fetch functions and headers + * were being silently ignored when using AI SDK providers (e.g., "openai/gpt-4o-mini"). + * + * Expected behavior: + * - Custom fetch function should be called for all LLM API requests + * - Custom headers should be included in the requests + * - This enables use cases like: proxy authentication, request logging, retry logic + */ + +async function main() { + // Track if custom fetch was called + let fetchCallCount = 0; + const customHeaders: string[] = []; + + // Create custom fetch function + const customFetch: typeof fetch = async (url, options) => { + fetchCallCount++; + console.log(`✅ Custom fetch called (${fetchCallCount} times)`); + console.log(` URL: ${url}`); + + // Log custom headers if present + if (options?.headers) { + const headers = new Headers(options.headers); + headers.forEach((value, key) => { + if (key.toLowerCase().startsWith('x-custom')) { + customHeaders.push(`${key}: ${value}`); + console.log(` Custom header: ${key}: ${value}`); + } + }); + } + + return fetch(url, options); + }; + + // Initialize Stagehand with custom fetch and headers + console.log("Initializing Stagehand with custom fetch and headers...\n"); + + const stagehand = new Stagehand({ + model: { + modelName: "openai/gpt-4o-mini", + apiKey: process.env.OPENAI_API_KEY, + fetch: customFetch, + headers: { + "X-Custom-Header": "test-value", + "X-Custom-Proxy-Auth": "proxy-token-123" + } + } as any, + env: "LOCAL" + }); + + await stagehand.init(); + + try { + console.log("Making a simple LLM call via act()...\n"); + + // Navigate to a simple page + await stagehand.context.pages()[0].goto("https://example.com"); + + // Make an act() call that will use the LLM + await stagehand.act("find the heading on the page"); + + console.log("\n=== Test Results ==="); + if (fetchCallCount > 0) { + console.log(`✅ SUCCESS: Custom fetch was called ${fetchCallCount} times`); + console.log(`✅ Custom headers detected: ${customHeaders.length > 0 ? customHeaders.join(", ") : "None (may be overridden by SDK)"}`); + } else { + console.log("❌ FAILURE: Custom fetch was NOT called"); + console.log(" This indicates the bug still exists."); + } + } catch (error) { + console.error("\n❌ Error during test:", error); + } finally { + await stagehand.close(); + } +} + +main().catch(console.error); diff --git a/packages/core/lib/v3/llm/LLMProvider.ts b/packages/core/lib/v3/llm/LLMProvider.ts index 7c16f2118..2f1a46b12 100644 --- a/packages/core/lib/v3/llm/LLMProvider.ts +++ b/packages/core/lib/v3/llm/LLMProvider.ts @@ -16,6 +16,11 @@ import { GoogleClient } from "./GoogleClient"; import { GroqClient } from "./GroqClient"; import { LLMClient } from "./LLMClient"; import { OpenAIClient } from "./OpenAIClient"; + +interface ExtendedClientOptions { + headers?: Record; + fetch?: typeof globalThis.fetch; +} import { openai, createOpenAI } from "@ai-sdk/openai"; import { anthropic, createAnthropic } from "@ai-sdk/anthropic"; import { google, createGoogleGenerativeAI } from "@ai-sdk/google"; @@ -98,6 +103,8 @@ export function getAISDKLanguageModel( subModelName: string, apiKey?: string, baseURL?: string, + headers?: Record, + fetch?: typeof globalThis.fetch, ) { if (apiKey) { const creator = AISDKProvidersWithAPIKey[subProvider]; @@ -107,15 +114,28 @@ export function getAISDKLanguageModel( Object.keys(AISDKProvidersWithAPIKey), ); } - // Create the provider instance with the API key and baseURL if provided - const providerConfig: { apiKey: string; baseURL?: string } = { apiKey }; + // Create the provider instance with the API key and custom options + const providerConfig: { + apiKey: string; + baseURL?: string; + headers?: Record; + fetch?: typeof globalThis.fetch; + } = { apiKey }; if (baseURL) { providerConfig.baseURL = baseURL; } + if (headers) { + providerConfig.headers = headers; + } + if (fetch) { + providerConfig.fetch = fetch; + } const provider = creator(providerConfig); // Get the specific model from the provider return provider(subModelName); } else { + // When no apiKey is provided, use pre-configured provider (no custom options) + // Note: headers and fetch options require explicit apiKey to be forwarded const provider = AISDKProviders[subProvider]; if (!provider) { throw new UnsupportedAISDKModelProviderError( @@ -148,6 +168,8 @@ export class LLMProvider { subModelName, clientOptions?.apiKey, clientOptions?.baseURL, + (clientOptions as ExtendedClientOptions)?.headers, + (clientOptions as ExtendedClientOptions)?.fetch, ); return new AISdkClient({ From c438746f18a8b577ec30662aa89470c4e6eca84f Mon Sep 17 00:00:00 2001 From: ilkay Date: Fri, 21 Nov 2025 10:16:05 +0100 Subject: [PATCH 2/2] fix(llm): forward custom fetch/headers for all users Updated getAISDKLanguageModel() to always use creator functions with optional config object. This ensures custom fetch/headers work for ALL users, including those relying on environment variables. Changes: - Removed if/else branching (addresses bot feedback) - Build provider config with optional fields only - Creator functions automatically use env vars when apiKey not provided - Custom fetch/headers now forwarded in all scenarios Testing: - Verified with real website without explicit apiKey - Custom fetch called successfully - All custom headers forwarded correctly - Environment variable fallback works as expected Fixes #1296 --- packages/core/lib/v3/llm/LLMProvider.ts | 69 ++++++++++++------------- 1 file changed, 32 insertions(+), 37 deletions(-) diff --git a/packages/core/lib/v3/llm/LLMProvider.ts b/packages/core/lib/v3/llm/LLMProvider.ts index 2f1a46b12..fae3afa44 100644 --- a/packages/core/lib/v3/llm/LLMProvider.ts +++ b/packages/core/lib/v3/llm/LLMProvider.ts @@ -106,45 +106,40 @@ export function getAISDKLanguageModel( headers?: Record, fetch?: typeof globalThis.fetch, ) { + const creator = AISDKProvidersWithAPIKey[subProvider]; + if (!creator) { + throw new UnsupportedAISDKModelProviderError( + subProvider, + Object.keys(AISDKProvidersWithAPIKey), + ); + } + + // Build provider config - all fields are optional + // When apiKey is not provided, creator functions automatically use environment variables + const providerConfig: { + apiKey?: string; + baseURL?: string; + headers?: Record; + fetch?: typeof globalThis.fetch; + } = {}; + if (apiKey) { - const creator = AISDKProvidersWithAPIKey[subProvider]; - if (!creator) { - throw new UnsupportedAISDKModelProviderError( - subProvider, - Object.keys(AISDKProvidersWithAPIKey), - ); - } - // Create the provider instance with the API key and custom options - const providerConfig: { - apiKey: string; - baseURL?: string; - headers?: Record; - fetch?: typeof globalThis.fetch; - } = { apiKey }; - if (baseURL) { - providerConfig.baseURL = baseURL; - } - if (headers) { - providerConfig.headers = headers; - } - if (fetch) { - providerConfig.fetch = fetch; - } - const provider = creator(providerConfig); - // Get the specific model from the provider - return provider(subModelName); - } else { - // When no apiKey is provided, use pre-configured provider (no custom options) - // Note: headers and fetch options require explicit apiKey to be forwarded - const provider = AISDKProviders[subProvider]; - if (!provider) { - throw new UnsupportedAISDKModelProviderError( - subProvider, - Object.keys(AISDKProviders), - ); - } - return provider(subModelName); + providerConfig.apiKey = apiKey; } + if (baseURL) { + providerConfig.baseURL = baseURL; + } + if (headers) { + providerConfig.headers = headers; + } + if (fetch) { + providerConfig.fetch = fetch; + } + + // Type assertion needed: AI SDK types require apiKey, but runtime accepts optional apiKey + // At runtime, when apiKey is not provided, creators automatically use environment variables + const provider = creator(providerConfig as { apiKey: string }); + return provider(subModelName); } export class LLMProvider {