Skip to content

Commit d647735

Browse files
committed
feat: add ModelScope as a new LLM provider
Add ModelScope (modelscope.cn) as a provider for accessing open-source models including Qwen/Qwen3-14B, Qwen/Qwen3-32B, and Qwen/Qwen3-235B-A22B via an OpenAI-compatible API at api.modelscope.cn/v1. Supports both static model list and dynamic model discovery via the /v1/models endpoint.
1 parent 2e254ac commit d647735

File tree

4 files changed

+128
-0
lines changed

4 files changed

+128
-0
lines changed

.env.example

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -83,6 +83,10 @@ HYPERBOLIC_API_KEY=your_hyperbolic_api_key_here
8383
# Get your API key from: https://openrouter.ai/keys
8484
OPEN_ROUTER_API_KEY=your_openrouter_api_key_here
8585

86+
# ModelScope (Open-source model platform by Alibaba)
87+
# Get your API key from: https://modelscope.cn/my/myaccesstoken
88+
MODELSCOPE_API_KEY=your_modelscope_api_key_here
89+
8690
# ======================================
8791
# CUSTOM PROVIDER BASE URLS (Optional)
8892
# ======================================
Lines changed: 121 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,121 @@
1+
import { BaseProvider } from '~/lib/modules/llm/base-provider';
2+
import type { ModelInfo } from '~/lib/modules/llm/types';
3+
import type { IProviderSetting } from '~/types/model';
4+
import type { LanguageModelV1 } from 'ai';
5+
import { createOpenAI } from '@ai-sdk/openai';
6+
7+
export default class ModelScopeProvider extends BaseProvider {
8+
name = 'ModelScope';
9+
getApiKeyLink = 'https://modelscope.cn/my/myaccesstoken';
10+
11+
config = {
12+
apiTokenKey: 'MODELSCOPE_API_KEY',
13+
};
14+
15+
staticModels: ModelInfo[] = [
16+
// Qwen3-14B via ModelScope: 128k context
17+
{
18+
name: 'Qwen/Qwen3-14B',
19+
label: 'Qwen/Qwen3-14B',
20+
provider: 'ModelScope',
21+
maxTokenAllowed: 128000,
22+
},
23+
24+
// Qwen3-32B via ModelScope: 128k context
25+
{
26+
name: 'Qwen/Qwen3-32B',
27+
label: 'Qwen/Qwen3-32B',
28+
provider: 'ModelScope',
29+
maxTokenAllowed: 128000,
30+
},
31+
32+
// Qwen/Qwen3-235B-A22B via ModelScope: 128k context
33+
{
34+
name: 'Qwen/Qwen3-235B-A22B',
35+
label: 'Qwen/Qwen3-235B-A22B',
36+
provider: 'ModelScope',
37+
maxTokenAllowed: 128000,
38+
},
39+
];
40+
41+
async getDynamicModels(
42+
apiKeys?: Record<string, string>,
43+
settings?: IProviderSetting,
44+
serverEnv?: Record<string, string>,
45+
): Promise<ModelInfo[]> {
46+
const { apiKey } = this.getProviderBaseUrlAndKey({
47+
apiKeys,
48+
providerSettings: settings,
49+
serverEnv: serverEnv as any,
50+
defaultBaseUrlKey: '',
51+
defaultApiTokenKey: 'MODELSCOPE_API_KEY',
52+
});
53+
console.error(`apiKey: ${apiKey}`);
54+
55+
if (!apiKey) {
56+
return [];
57+
}
58+
59+
try {
60+
const response = await fetch('https://api-inference.modelscope.cn/v1/models', {
61+
headers: {
62+
Authorization: `Bearer ${apiKey}`,
63+
},
64+
signal: this.createTimeoutSignal(5000),
65+
});
66+
67+
if (!response.ok) {
68+
console.error(`ModelScope API error: ${response.statusText}`);
69+
return [];
70+
}
71+
72+
const data = (await response.json()) as any;
73+
const staticModelIds = this.staticModels.map((m) => m.name);
74+
75+
// Filter out models we already have in staticModels
76+
const dynamicModels =
77+
data.data
78+
?.filter((model: any) => !staticModelIds.includes(model.id))
79+
.map((m: any) => ({
80+
name: m.id,
81+
label: `${m.id} (Dynamic)`,
82+
provider: this.name,
83+
maxTokenAllowed: 64000, // Default, adjust per model if available
84+
maxCompletionTokens: 8192,
85+
})) || [];
86+
87+
return dynamicModels;
88+
} catch (error) {
89+
console.error(`Failed to fetch ModelScope models:`, error);
90+
return [];
91+
}
92+
}
93+
94+
getModelInstance(options: {
95+
model: string;
96+
serverEnv: Env;
97+
apiKeys?: Record<string, string>;
98+
providerSettings?: Record<string, IProviderSetting>;
99+
}): LanguageModelV1 {
100+
const { model, serverEnv, apiKeys, providerSettings } = options;
101+
102+
const { apiKey } = this.getProviderBaseUrlAndKey({
103+
apiKeys,
104+
providerSettings: providerSettings?.[this.name],
105+
serverEnv: serverEnv as any,
106+
defaultBaseUrlKey: '',
107+
defaultApiTokenKey: 'MODELSCOPE_API_KEY',
108+
});
109+
110+
if (!apiKey) {
111+
throw new Error(`Missing API key for ${this.name} provider`);
112+
}
113+
114+
const openai = createOpenAI({
115+
apiKey,
116+
baseURL: 'https://api-inference.modelscope.cn/v1/',
117+
});
118+
119+
return openai(model);
120+
}
121+
}

app/lib/modules/llm/registry.ts

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@ import AmazonBedrockProvider from './providers/amazon-bedrock';
2020
import GithubProvider from './providers/github';
2121
import MoonshotProvider from './providers/moonshot';
2222
import ZaiProvider from './providers/z-ai';
23+
import ModelScope from './providers/modelscope';
2324

2425
export {
2526
AnthropicProvider,
@@ -44,4 +45,5 @@ export {
4445
AmazonBedrockProvider,
4546
GithubProvider,
4647
ZaiProvider,
48+
ModelScope,
4749
};

worker-configuration.d.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,4 +19,5 @@ interface Env {
1919
XAI_API_KEY: string;
2020
PERPLEXITY_API_KEY: string;
2121
AWS_BEDROCK_CONFIG: string;
22+
MODELSCOPE_API_KEY: string;
2223
}

0 commit comments

Comments
 (0)