Skip to content
This repository was archived by the owner on Feb 23, 2026. It is now read-only.

Commit 3976701

Browse files
authored
Merge pull request #59 from runbasehq/dev
feat: add provider name prefix to model identifiers
2 parents e6967bf + 512cc57 commit 3976701

File tree

9 files changed

+159
-66
lines changed

9 files changed

+159
-66
lines changed

packages/mcp-check/CHANGELOG.md

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,11 @@
11
# mcp-testing-library
22

3+
## 0.4.3
4+
5+
### Patch Changes
6+
7+
- feat: add provider name prefix to model identifiers
8+
39
## 0.4.2
410

511
### Patch Changes

packages/mcp-check/package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
{
22
"name": "mcp-check",
33
"module": "dist/src/index.js",
4-
"version": "0.4.2",
4+
"version": "0.4.3",
55
"type": "module",
66
"main": "dist/src/index.js",
77
"types": "dist/src/index.d.ts",

packages/mcp-check/src/chunks/types.ts

Lines changed: 22 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,9 @@
1-
import type { BetaRawContentBlockDeltaEvent, BetaRawContentBlockStartEvent, BetaRawContentBlockStopEvent, BetaRawMessageStreamEvent } from "@anthropic-ai/sdk/resources/beta.js";
1+
import type {
2+
BetaRawContentBlockDeltaEvent,
3+
BetaRawContentBlockStartEvent,
4+
BetaRawContentBlockStopEvent,
5+
BetaRawMessageStreamEvent,
6+
} from "@anthropic-ai/sdk/resources/beta.js";
27
import type OpenAI from "openai";
38

49
export interface ToolCall {
@@ -273,7 +278,7 @@ export interface NormalizedChunkAnthropic extends BaseNormalizedChunk {
273278
}
274279

275280
export interface NormalizedChunkOpenAI extends BaseNormalizedChunk {
276-
provider: "openai";
281+
provider: "openai" | "openrouter";
277282
originalChunk: OpenAI.Responses.ResponseStreamEvent;
278283
}
279284

@@ -363,17 +368,27 @@ export interface ChunkHandlerConfig {
363368
/** Provider-specific handlers for Anthropic */
364369
anthropic?: {
365370
/** Handler for Anthropic content block delta chunks */
366-
onContentBlockDelta?: (chunk: BetaRawContentBlockDeltaEvent) => void | Promise<void>;
371+
onContentBlockDelta?: (
372+
chunk: BetaRawContentBlockDeltaEvent,
373+
) => void | Promise<void>;
367374
/** Handler for Anthropic content block start chunks */
368-
onContentBlockStart?: (chunk: BetaRawContentBlockStartEvent) => void | Promise<void>;
375+
onContentBlockStart?: (
376+
chunk: BetaRawContentBlockStartEvent,
377+
) => void | Promise<void>;
369378
/** Handler for Anthropic content block stop chunks */
370-
onContentBlockStop?: (chunk: BetaRawContentBlockStopEvent) => void | Promise<void>;
379+
onContentBlockStop?: (
380+
chunk: BetaRawContentBlockStopEvent,
381+
) => void | Promise<void>;
371382
};
372383
/** Provider-specific handlers for OpenAI */
373384
openai?: {
374385
/** Handler for OpenAI response output item added chunks */
375-
onResponseOutputItemAdded?: (chunk: OpenAI.Responses.ResponseOutputItemAddedEvent) => void | Promise<void>;
386+
onResponseOutputItemAdded?: (
387+
chunk: OpenAI.Responses.ResponseOutputItemAddedEvent,
388+
) => void | Promise<void>;
376389
/** Handler for OpenAI response output item done chunks */
377-
onResponseOutputItemDone?: (chunk: OpenAI.Responses.ResponseOutputItemDoneEvent) => void | Promise<void>;
390+
onResponseOutputItemDone?: (
391+
chunk: OpenAI.Responses.ResponseOutputItemDoneEvent,
392+
) => void | Promise<void>;
378393
};
379394
}

packages/mcp-check/src/providers/anthropic.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -112,7 +112,7 @@ export class AnthropicProvider extends Provider {
112112
this.currentModel = model;
113113

114114
const stream = this.client.beta.messages.stream({
115-
model: model as Anthropic.Model,
115+
model: model.replace("anthropic/", "") as Anthropic.Model,
116116
max_tokens: 1000,
117117
messages: [
118118
{

packages/mcp-check/src/providers/index.ts

Lines changed: 37 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -5,17 +5,18 @@ import { AnthropicProvider } from "./anthropic.js";
55
import { OpenAIProvider } from "./openai.js";
66
import type { ProviderConfig } from "./types.js";
77
import { Provider } from "./provider.js";
8+
import { type OpenRouterModel } from "./openrouter.js";
89

910
/**
1011
* @fileoverview Provider factory and type exports for MCP AI providers.
11-
*
12+
*
1213
* This module provides a factory function to create appropriate provider instances
1314
* based on model names, and exports type definitions for model names and providers.
1415
*/
1516

1617
/**
1718
* Type alias for Anthropic Claude model names.
18-
*
19+
*
1920
* @example
2021
* ```typescript
2122
* const model: AnthropicModel = "claude-3-haiku-20240307";
@@ -25,72 +26,85 @@ export type { AnthropicModel } from "./anthropic.js";
2526

2627
/**
2728
* Type alias for OpenAI model names.
28-
*
29+
*
2930
* @example
3031
* ```typescript
3132
* const model: OpenAIModel = "gpt-4";
3233
* ```
3334
*/
3435
export type { OpenAIModel } from "./openai.js";
3536

37+
/**
38+
* Type alias for OpenRouter model names.
39+
*
40+
* @example
41+
* ```typescript
42+
* const model: OpenRouterModel = "meta-llama/llama-3.3-70b-instruct:free";
43+
* ```
44+
*/
45+
export type { OpenRouterModel } from "./openrouter.js";
46+
3647
/**
3748
* Union type of all supported model names.
38-
*
49+
*
3950
* This type represents all available model identifiers that can be used
4051
* with the provider system.
41-
*
52+
*
4253
* @example
4354
* ```typescript
4455
* const models: ModelName[] = ["claude-3-haiku-20240307", "gpt-4"];
4556
* ```
4657
*/
47-
export type ModelName = AnthropicModel | OpenAIModel;
58+
export type ModelName =
59+
| `anthropic/${AnthropicModel}`
60+
| `openai/${OpenAIModel}`
61+
| `openrouter/${OpenRouterModel}`;
4862

4963
/**
5064
* Array type for model name collections.
51-
*
65+
*
5266
* @example
5367
* ```typescript
54-
* const models: Models = ["claude-3-sonnet-20240229", "gpt-3.5-turbo"];
68+
* const models: Models = ["anthropic/claude-3-sonnet-20240229", "openai/gpt-3.5-turbo"];
5569
* ```
5670
*/
5771
export type Models = ModelName[];
5872

5973
/**
6074
* Factory function to create appropriate provider instances based on model names.
61-
*
75+
*
6276
* This function automatically determines the correct provider (Anthropic or OpenAI)
6377
* based on the model name prefix and creates a configured provider instance.
64-
*
78+
*
6579
* @param model - The model name to create a provider for
6680
* @param mcpServer - The MCP server configuration
6781
* @param promptText - The prompt text to send to the model
6882
* @param config - Optional provider configuration (API keys, silent mode, etc.)
6983
* @returns A configured provider instance for the specified model
70-
*
84+
*
7185
* @throws {Error} When the model name is not recognized or supported
72-
*
86+
*
7387
* @example
7488
* ```typescript
7589
* // Create provider for Anthropic model
7690
* const anthropicProvider = createProvider(
77-
* "claude-3-haiku-20240307",
91+
* "anthropic/claude-3-haiku-20240307",
7892
* mcpServer,
7993
* "What tools are available?",
8094
* { anthropicApiKey: process.env.ANTHROPIC_API_KEY }
8195
* );
82-
*
96+
*
8397
* // Create provider for OpenAI model
8498
* const openaiProvider = createProvider(
85-
* "gpt-4",
99+
* "openai/gpt-4",
86100
* mcpServer,
87101
* "What tools are available?",
88102
* { openaiApiKey: process.env.OPENAI_API_KEY }
89103
* );
90-
*
104+
*
91105
* // Use the providers
92-
* const anthropicResult = await anthropicProvider.stream("claude-3-haiku-20240307");
93-
* const openaiResult = await openaiProvider.stream("gpt-4");
106+
* const anthropicResult = await anthropicProvider.stream("anthropic/claude-3-haiku-20240307");
107+
* const openaiResult = await openaiProvider.stream("openai/gpt-4");
94108
* ```
95109
*/
96110
export function createProvider(
@@ -99,15 +113,16 @@ export function createProvider(
99113
promptText: string,
100114
config: ProviderConfig = {},
101115
): Provider {
102-
// Anthropic models
103-
if (model.startsWith("claude-")) {
116+
if (model.startsWith("anthropic/")) {
104117
return new AnthropicProvider(mcpServer, promptText, config);
105118
}
106119

107-
// OpenAI models
108-
if (model.startsWith("gpt-") || model.startsWith("o") || model.startsWith("chatgpt-") || model.startsWith("codex-")) {
120+
if (model.startsWith("openai/")) {
109121
return new OpenAIProvider(mcpServer, promptText, config);
110122
}
111123

124+
if (model.startsWith("openrouter/")) {
125+
}
126+
112127
throw new Error(`Error: unknown provider for model: ${model}.`);
113128
}

packages/mcp-check/src/providers/openai.ts

Lines changed: 34 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -7,10 +7,10 @@ import type { McpServer } from "../index.js";
77

88
/**
99
* Type alias for OpenAI model names.
10-
*
10+
*
1111
* This type represents all available OpenAI model identifiers
1212
* that can be used with the OpenAIProvider.
13-
*
13+
*
1414
* @example
1515
* ```typescript
1616
* const model: OpenAIModel = "gpt-4";
@@ -20,18 +20,18 @@ export type OpenAIModel = ChatModel;
2020

2121
/**
2222
* Provider for OpenAI models.
23-
*
23+
*
2424
* This class handles interactions with OpenAI's models through their
2525
* official SDK. It supports streaming responses, MCP tool calls,
2626
* and chunk normalization for the OpenAI API format.
27-
*
27+
*
2828
* @example
2929
* ```typescript
3030
* const provider = new OpenAIProvider(mcpServer, "What tools are available?", {
3131
* openaiApiKey: process.env.OPENAI_API_KEY,
3232
* silent: true
3333
* });
34-
*
34+
*
3535
* const result = await provider.stream("gpt-4");
3636
* console.log("Content:", result.content);
3737
* console.log("Used tools:", result.usedTools);
@@ -51,11 +51,11 @@ export class OpenAIProvider extends Provider {
5151

5252
/**
5353
* Creates a new OpenAIProvider instance.
54-
*
54+
*
5555
* @param mcpServer - The MCP server configuration
5656
* @param promptText - The prompt text to send to the OpenAI model
5757
* @param config - Optional provider configuration including API key
58-
*
58+
*
5959
* @example
6060
* ```typescript
6161
* const provider = new OpenAIProvider(mcpServer, "Hello GPT!", {
@@ -67,24 +67,28 @@ export class OpenAIProvider extends Provider {
6767
* });
6868
* ```
6969
*/
70-
constructor(mcpServer: McpServer, promptText: string, config: ProviderConfig = {}) {
70+
constructor(
71+
mcpServer: McpServer,
72+
promptText: string,
73+
config: ProviderConfig = {},
74+
) {
7175
super(mcpServer, promptText, config);
7276
const apiKey = config.openaiApiKey || process.env.OPENAI_API_KEY;
7377
this.client = apiKey ? new OpenAI({ apiKey }) : null;
7478
}
7579

7680
/**
7781
* Streams a response from the specified OpenAI model.
78-
*
82+
*
7983
* This method establishes a streaming connection to the OpenAI API,
8084
* processes the response chunks, tracks tool usage, and returns the
8185
* final result with content and tool call information.
82-
*
86+
*
8387
* @param model - The OpenAI model name to use (e.g., "gpt-4", "gpt-3.5-turbo")
8488
* @returns Promise that resolves to a StreamResult containing the response
85-
*
89+
*
8690
* @throws {Error} When the OpenAI client is not initialized (missing API key)
87-
*
91+
*
8892
* @example
8993
* ```typescript
9094
* const result = await provider.stream("gpt-4");
@@ -96,7 +100,7 @@ export class OpenAIProvider extends Provider {
96100
async stream(model: string): Promise<StreamResult> {
97101
if (!this.client) {
98102
throw new Error(
99-
"OpenAI client not initialized. Please set OPENAI_API_KEY environment variable or pass openaiApiKey in config."
103+
"OpenAI client not initialized. Please set OPENAI_API_KEY environment variable or pass openaiApiKey in config.",
100104
);
101105
}
102106

@@ -106,7 +110,7 @@ export class OpenAIProvider extends Provider {
106110
this.currentModel = model;
107111

108112
const response = await this.client.responses.create({
109-
model: model as ChatModel,
113+
model: model.replace("openai/", "") as ChatModel,
110114
tools: [
111115
{
112116
type: "mcp",
@@ -155,9 +159,13 @@ export class OpenAIProvider extends Provider {
155159
if (item.type === "mcp_call") {
156160
const toolName = item.name;
157161
if (this.toolCalls[toolName] && this.toolCalls[toolName].length > 0) {
158-
const lastCall = this.toolCalls[toolName][this.toolCalls[toolName].length - 1];
162+
const lastCall =
163+
this.toolCalls[toolName][this.toolCalls[toolName].length - 1];
159164
if (lastCall) {
160-
lastCall.result = (item as any).result || { id: `result_${Date.now()}`, status: "completed" };
165+
lastCall.result = (item as any).result || {
166+
id: `result_${Date.now()}`,
167+
status: "completed",
168+
};
161169
}
162170
}
163171
}
@@ -166,18 +174,22 @@ export class OpenAIProvider extends Provider {
166174
await this.processChunk(chunk);
167175
}
168176

169-
return { usedTools: this.usedTools, content: this.content, toolCalls: this.toolCalls };
177+
return {
178+
usedTools: this.usedTools,
179+
content: this.content,
180+
toolCalls: this.toolCalls,
181+
};
170182
}
171183

172184
/**
173185
* Normalizes OpenAI-specific chunks into the unified NormalizedChunk format.
174-
*
186+
*
175187
* This method converts OpenAI's streaming response chunks into a standardized
176188
* format that can be processed by the chunk handling system.
177-
*
189+
*
178190
* @param chunk - The raw chunk from OpenAI's streaming API
179191
* @returns NormalizedChunk if the chunk can be normalized, null otherwise
180-
*
192+
*
181193
* @example
182194
* ```typescript
183195
* const normalized = this.normalizeChunk(openaiChunk);
@@ -287,7 +299,7 @@ export class OpenAIProvider extends Provider {
287299
type: "model_stream",
288300
model: this.currentModel,
289301
text: `Calling tool: ${toolName}\n`,
290-
}) + "\n"
302+
}) + "\n",
291303
);
292304
}
293305
}
@@ -299,7 +311,7 @@ export class OpenAIProvider extends Provider {
299311
type: "model_stream",
300312
model: this.currentModel,
301313
text: `Tool ${toolName} completed\n`,
302-
}) + "\n"
314+
}) + "\n",
303315
);
304316
}
305317
}

0 commit comments

Comments
 (0)