Skip to content

Commit 76cc39b

Browse files
committed
allow free modelclientoptions
1 parent 455b61f commit 76cc39b

File tree

8 files changed

+35
-37
lines changed

8 files changed

+35
-37
lines changed

lib/index.ts

Lines changed: 3 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -595,14 +595,11 @@ export class Stagehand {
595595
// Temporary add for legacy providers
596596
modelApiKey =
597597
LLMProvider.getModelProvider(this.modelName) === "openai"
598-
? process.env.OPENAI_API_KEY ||
599-
this.llmClient?.clientOptions?.apiKey
598+
? process.env.OPENAI_API_KEY
600599
: LLMProvider.getModelProvider(this.modelName) === "anthropic"
601-
? process.env.ANTHROPIC_API_KEY ||
602-
this.llmClient?.clientOptions?.apiKey
600+
? process.env.ANTHROPIC_API_KEY
603601
: LLMProvider.getModelProvider(this.modelName) === "google"
604-
? process.env.GOOGLE_API_KEY ||
605-
this.llmClient?.clientOptions?.apiKey
602+
? process.env.GOOGLE_API_KEY
606603
: undefined;
607604
}
608605
this.modelClientOptions = {

lib/llm/AnthropicClient.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
import { CreateChatCompletionResponseError } from "@/types/stagehandErrors";
12
import Anthropic, { ClientOptions } from "@anthropic-ai/sdk";
23
import {
34
ImageBlockParam,
@@ -14,14 +15,13 @@ import {
1415
LLMClient,
1516
LLMResponse,
1617
} from "./LLMClient";
17-
import { CreateChatCompletionResponseError } from "@/types/stagehandErrors";
1818

1919
export class AnthropicClient extends LLMClient {
2020
public type = "anthropic" as const;
2121
private client: Anthropic;
2222
private cache: LLMCache | undefined;
2323
private enableCaching: boolean;
24-
public clientOptions: ClientOptions;
24+
public clientOptions?: ClientOptions;
2525

2626
constructor({
2727
enableCaching = false,

lib/llm/CerebrasClient.ts

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
1-
import OpenAI from "openai";
1+
import { CreateChatCompletionResponseError } from "@/types/stagehandErrors";
22
import type { ClientOptions } from "openai";
3+
import OpenAI from "openai";
34
import { zodToJsonSchema } from "zod-to-json-schema";
45
import { LogLine } from "../../types/log";
56
import { AvailableModel } from "../../types/model";
@@ -10,14 +11,13 @@ import {
1011
LLMClient,
1112
LLMResponse,
1213
} from "./LLMClient";
13-
import { CreateChatCompletionResponseError } from "@/types/stagehandErrors";
1414

1515
export class CerebrasClient extends LLMClient {
1616
public type = "cerebras" as const;
1717
private client: OpenAI;
1818
private cache: LLMCache | undefined;
1919
private enableCaching: boolean;
20-
public clientOptions: ClientOptions;
20+
public clientOptions?: ClientOptions;
2121
public hasVision = false;
2222

2323
constructor({
@@ -31,7 +31,7 @@ export class CerebrasClient extends LLMClient {
3131
enableCaching?: boolean;
3232
cache?: LLMCache;
3333
modelName: AvailableModel;
34-
clientOptions?: ClientOptions;
34+
clientOptions?: OpenAI.ClientOptions;
3535
userProvidedInstructions?: string;
3636
}) {
3737
super(modelName, userProvidedInstructions);

lib/llm/LLMClient.ts

Lines changed: 10 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,18 +1,18 @@
1-
import { ZodType } from "zod/v3";
2-
import { LLMTool } from "../../types/llm";
3-
import { LogLine } from "../../types/log";
4-
import { AvailableModel, ClientOptions } from "../../types/model";
51
import {
6-
generateObject,
7-
generateText,
8-
streamText,
9-
streamObject,
10-
experimental_generateImage,
112
embed,
123
embedMany,
13-
experimental_transcribe,
4+
experimental_generateImage,
145
experimental_generateSpeech,
6+
experimental_transcribe,
7+
generateObject,
8+
generateText,
9+
streamObject,
10+
streamText,
1511
} from "ai";
12+
import { ZodType } from "zod/v3";
13+
import { LLMTool } from "../../types/llm";
14+
import { LogLine } from "../../types/log";
15+
import { AvailableModel } from "../../types/model";
1616

1717
export interface ChatMessage {
1818
role: "system" | "user" | "assistant";
@@ -100,7 +100,6 @@ export abstract class LLMClient {
100100
public type: "openai" | "anthropic" | "cerebras" | "groq" | (string & {});
101101
public modelName: AvailableModel | (string & {});
102102
public hasVision: boolean;
103-
public clientOptions: ClientOptions;
104103
public userProvidedInstructions?: string;
105104

106105
constructor(modelName: AvailableModel, userProvidedInstructions?: string) {

lib/llm/LLMProvider.ts

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -97,18 +97,18 @@ const modelToProviderMap: { [key in AvailableModel]: ModelProvider } = {
9797
export function getAISDKLanguageModel(
9898
subProvider: string,
9999
subModelName: string,
100-
apiKey?: string,
100+
modelClientOptions?: ClientOptions,
101101
) {
102-
if (apiKey) {
102+
if (modelClientOptions && Object.keys(modelClientOptions).length > 0) {
103103
const creator = AISDKProvidersWithAPIKey[subProvider];
104104
if (!creator) {
105105
throw new UnsupportedAISDKModelProviderError(
106106
subProvider,
107107
Object.keys(AISDKProvidersWithAPIKey),
108108
);
109109
}
110-
// Create the provider instance with the API key
111-
const provider = creator({ apiKey });
110+
// Create the provider instance with the custom configuration options
111+
const provider = creator(modelClientOptions);
112112
// Get the specific model from the provider
113113
return provider(subModelName);
114114
} else {
@@ -165,7 +165,7 @@ export class LLMProvider {
165165
const languageModel = getAISDKLanguageModel(
166166
subProvider,
167167
subModelName,
168-
clientOptions?.apiKey,
168+
clientOptions,
169169
);
170170

171171
return new AISdkClient({

lib/llm/OpenAIClient.ts

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,8 @@
1+
import {
2+
CreateChatCompletionResponseError,
3+
StagehandError,
4+
ZodSchemaValidationError,
5+
} from "@/types/stagehandErrors";
16
import OpenAI, { ClientOptions } from "openai";
27
import { zodResponseFormat } from "openai/helpers/zod";
38
import {
@@ -21,11 +26,6 @@ import {
2126
LLMClient,
2227
LLMResponse,
2328
} from "./LLMClient";
24-
import {
25-
CreateChatCompletionResponseError,
26-
StagehandError,
27-
ZodSchemaValidationError,
28-
} from "@/types/stagehandErrors";
2929

3030
export class OpenAIClient extends LLMClient {
3131
public type = "openai" as const;

types/llm.ts

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
import { LanguageModel } from "ai";
2+
import { ClientOptions } from "./model";
23

34
export interface LLMTool {
45
type: "function";
@@ -9,6 +10,4 @@ export interface LLMTool {
910

1011
export type AISDKProvider = (modelName: string) => LanguageModel;
1112
// Represents a function that takes options (like apiKey) and returns an AISDKProvider
12-
export type AISDKCustomProvider = (options: {
13-
apiKey: string;
14-
}) => AISDKProvider;
13+
export type AISDKCustomProvider = (options: ClientOptions) => AISDKProvider;

types/model.ts

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,10 @@ export type ModelProvider =
4444
| "google"
4545
| "aisdk";
4646

47-
export type ClientOptions = OpenAIClientOptions | AnthropicClientOptions;
47+
export type ClientOptions =
48+
| Record<string, string>
49+
| OpenAIClientOptions
50+
| AnthropicClientOptions;
4851

4952
export interface AnthropicJsonSchemaObject {
5053
definitions?: {

0 commit comments

Comments
 (0)