Skip to content

Commit ff870c0

Browse files
authored
Add new models: sonnet-4.5 and haiku-4.5 (#492)
* feat: Update chat models and implement migration from v11 to v12 - Updated default chat model ID to 'claude-sonnet-4.5' and added new recommended models. - Introduced migration logic to transition settings from version 11 to 12, merging existing chat models with new defaults and adding 'claude-sonnet-4.5' and 'claude-haiku-4.5'. - Removed outdated models: 'claude-sonnet-4.0', 'claude-3.7-sonnet', 'claude-3.5-sonnet', and 'claude-3.5-haiku'. - Added tests to verify migration functionality. * docs: Update README.md with maintenance notice and contribution invitation - Added a maintenance notice indicating the plugin is not under active development and may have delayed responses to issues and feature requests. - Included an invitation for community contributions and collaboration.
1 parent a364873 commit ff870c0

File tree

5 files changed

+335
-20
lines changed

5 files changed

+335
-20
lines changed

README.md

Lines changed: 11 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,8 +8,18 @@
88
<a href="https://github.com/glowingjade/obsidian-smart-composer/discussions">Discussions</a>
99
</p>
1010

11+
> [!WARNING]
12+
> **⚠️ Maintenance Notice**
13+
>
14+
> This plugin is currently maintained by a single developer and is **not under active development**.
15+
> Occasional updates or bug fixes may still be released, but **issues and feature requests may not be reviewed promptly**.
16+
>
17+
> **Interested in helping?**
18+
> If you'd like to contribute or are maintaining an updated fork, please get in touch.
19+
> I’d be happy to add collaborators or highlight community-maintained versions.
20+
1121
> [!NOTE]
12-
> **🚀 New Feature: Model Context Protocol (MCP) is now available!**
22+
> **🚀 New Feature: Model Context Protocol (MCP) is now available!**
1323
> You can now connect Smart Composer to external AI tools and using the open MCP standard.
1424
1525
![SC1_Title.gif](https://github.com/user-attachments/assets/a50a1f80-39ff-4eba-8090-e3d75e7be98c)

src/constants.ts

Lines changed: 8 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -8,11 +8,11 @@ export const APPLY_VIEW_TYPE = 'smtcmp-apply-view'
88
export const PGLITE_DB_PATH = '.smtcmp_vector_db.tar.gz'
99

1010
// Default model ids
11-
export const DEFAULT_CHAT_MODEL_ID = 'claude-sonnet-4.0'
11+
export const DEFAULT_CHAT_MODEL_ID = 'claude-sonnet-4.5'
1212
export const DEFAULT_APPLY_MODEL_ID = 'gpt-4.1-mini'
1313

1414
// Recommended model ids
15-
export const RECOMMENDED_MODELS_FOR_CHAT = ['claude-sonnet-4.0', 'gpt-4.1']
15+
export const RECOMMENDED_MODELS_FOR_CHAT = ['claude-sonnet-4.5', 'gpt-4.1']
1616
export const RECOMMENDED_MODELS_FOR_APPLY = ['gpt-4.1-mini']
1717
export const RECOMMENDED_MODELS_FOR_EMBEDDING = [
1818
'openai/text-embedding-3-small',
@@ -224,12 +224,6 @@ export const DEFAULT_PROVIDERS: readonly LLMProvider[] = [
224224
* 2. If there's same model id in user's settings, it's data should be overwritten by default model
225225
*/
226226
export const DEFAULT_CHAT_MODELS: readonly ChatModel[] = [
227-
{
228-
providerType: 'anthropic',
229-
providerId: PROVIDER_TYPES_INFO.anthropic.defaultProviderId,
230-
id: 'claude-sonnet-4.0',
231-
model: 'claude-sonnet-4-0',
232-
},
233227
{
234228
providerType: 'anthropic',
235229
providerId: PROVIDER_TYPES_INFO.anthropic.defaultProviderId,
@@ -239,20 +233,14 @@ export const DEFAULT_CHAT_MODELS: readonly ChatModel[] = [
239233
{
240234
providerType: 'anthropic',
241235
providerId: PROVIDER_TYPES_INFO.anthropic.defaultProviderId,
242-
id: 'claude-3.7-sonnet',
243-
model: 'claude-3-7-sonnet-latest',
244-
},
245-
{
246-
providerType: 'anthropic',
247-
providerId: PROVIDER_TYPES_INFO.anthropic.defaultProviderId,
248-
id: 'claude-3.5-sonnet',
249-
model: 'claude-3-5-sonnet-latest',
236+
id: 'claude-sonnet-4.5',
237+
model: 'claude-sonnet-4-5',
250238
},
251239
{
252240
providerType: 'anthropic',
253241
providerId: PROVIDER_TYPES_INFO.anthropic.defaultProviderId,
254-
id: 'claude-3.5-haiku',
255-
model: 'claude-3-5-haiku-latest',
242+
id: 'claude-haiku-4.5',
243+
model: 'claude-haiku-4-5',
256244
},
257245
{
258246
providerType: 'openai',
@@ -492,9 +480,11 @@ export const OPENAI_PRICES: Record<string, ModelPricing> = {
492480
export const ANTHROPIC_PRICES: Record<string, ModelPricing> = {
493481
'claude-opus-4-1': { input: 15, output: 75 },
494482
'claude-opus-4-0': { input: 15, output: 75 },
483+
'claude-sonnet-4-5': { input: 3, output: 15 },
495484
'claude-sonnet-4-0': { input: 3, output: 15 },
496485
'claude-3-5-sonnet-latest': { input: 3, output: 15 },
497486
'claude-3-7-sonnet-latest': { input: 3, output: 15 },
487+
'claude-haiku-4-5': { input: 1, output: 5 },
498488
'claude-3-5-haiku-latest': { input: 1, output: 5 },
499489
}
500490

Lines changed: 85 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,85 @@
1+
import { DEFAULT_CHAT_MODELS_V12, migrateFrom11To12 } from './11_to_12'
2+
3+
describe('Migration from v11 to v12', () => {
4+
it('should increment version to 12', () => {
5+
const oldSettings = {
6+
version: 11,
7+
}
8+
const result = migrateFrom11To12(oldSettings)
9+
expect(result.version).toBe(12)
10+
})
11+
12+
it('should merge existing chat models with new default models', () => {
13+
const oldSettings = {
14+
version: 11,
15+
chatModels: [
16+
{
17+
id: 'gpt-4o',
18+
providerType: 'openai',
19+
providerId: 'openai',
20+
model: 'gpt-4o',
21+
enable: false,
22+
},
23+
{
24+
id: 'custom-model',
25+
providerType: 'custom',
26+
providerId: 'custom',
27+
model: 'custom-model',
28+
},
29+
],
30+
}
31+
const result = migrateFrom11To12(oldSettings)
32+
33+
expect(result.chatModels).toEqual([
34+
...DEFAULT_CHAT_MODELS_V12.map((model) =>
35+
model.id === 'gpt-4o'
36+
? {
37+
...model,
38+
enable: false,
39+
}
40+
: model,
41+
),
42+
{
43+
id: 'custom-model',
44+
providerType: 'custom',
45+
providerId: 'custom',
46+
model: 'custom-model',
47+
},
48+
])
49+
})
50+
51+
it('should add new Claude Sonnet 4.5 and Haiku 4.5 models', () => {
52+
const oldSettings = {
53+
version: 11,
54+
chatModels: [
55+
{
56+
id: 'gpt-4o',
57+
providerType: 'openai',
58+
providerId: 'openai',
59+
model: 'gpt-4o',
60+
},
61+
],
62+
}
63+
const result = migrateFrom11To12(oldSettings)
64+
65+
const chatModels = result.chatModels as { id: string }[]
66+
const sonnet45 = chatModels.find((m) => m.id === 'claude-sonnet-4.5')
67+
const haiku45 = chatModels.find((m) => m.id === 'claude-haiku-4.5')
68+
69+
expect(sonnet45).toBeDefined()
70+
expect(sonnet45).toEqual({
71+
id: 'claude-sonnet-4.5',
72+
providerType: 'anthropic',
73+
providerId: 'anthropic',
74+
model: 'claude-sonnet-4-5',
75+
})
76+
77+
expect(haiku45).toBeDefined()
78+
expect(haiku45).toEqual({
79+
id: 'claude-haiku-4.5',
80+
providerType: 'anthropic',
81+
providerId: 'anthropic',
82+
model: 'claude-haiku-4-5',
83+
})
84+
})
85+
})
Lines changed: 224 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,224 @@
1+
import { SettingMigration } from '../setting.types'
2+
3+
import { getMigratedChatModels } from './migrationUtils'
4+
5+
/**
6+
* Migration from version 11 to version 12
7+
* - Add following models:
8+
* - claude-sonnet-4.5
9+
* - claude-haiku-4.5
10+
* - Remove following models:
11+
* - claude-sonnet-4.0
12+
* - claude-3.7-sonnet
13+
* - claude-3.5-sonnet
14+
* - claude-3.5-haiku
15+
*/
16+
export const migrateFrom11To12: SettingMigration['migrate'] = (data) => {
17+
const newData = { ...data }
18+
newData.version = 12
19+
20+
newData.chatModels = getMigratedChatModels(newData, DEFAULT_CHAT_MODELS_V12)
21+
22+
return newData
23+
}
24+
25+
type DefaultChatModelsV12 = {
26+
id: string
27+
providerType: string
28+
providerId: string
29+
model: string
30+
reasoning?: {
31+
enabled: boolean
32+
reasoning_effort?: string
33+
}
34+
thinking?: {
35+
enabled: boolean
36+
budget_tokens: number
37+
}
38+
web_search_options?: {
39+
search_context_size?: string
40+
}
41+
enable?: boolean
42+
}[]
43+
44+
export const DEFAULT_CHAT_MODELS_V12: DefaultChatModelsV12 = [
45+
{
46+
providerType: 'anthropic',
47+
providerId: 'anthropic',
48+
id: 'claude-opus-4.1',
49+
model: 'claude-opus-4-1',
50+
},
51+
{
52+
providerType: 'anthropic',
53+
providerId: 'anthropic',
54+
id: 'claude-sonnet-4.5',
55+
model: 'claude-sonnet-4-5',
56+
},
57+
{
58+
providerType: 'anthropic',
59+
providerId: 'anthropic',
60+
id: 'claude-haiku-4.5',
61+
model: 'claude-haiku-4-5',
62+
},
63+
{
64+
providerType: 'openai',
65+
providerId: 'openai',
66+
id: 'gpt-5',
67+
model: 'gpt-5',
68+
},
69+
{
70+
providerType: 'openai',
71+
providerId: 'openai',
72+
id: 'gpt-5-mini',
73+
model: 'gpt-5-mini',
74+
},
75+
{
76+
providerType: 'openai',
77+
providerId: 'openai',
78+
id: 'gpt-5-nano',
79+
model: 'gpt-5-nano',
80+
},
81+
{
82+
providerType: 'openai',
83+
providerId: 'openai',
84+
id: 'gpt-4.1',
85+
model: 'gpt-4.1',
86+
},
87+
{
88+
providerType: 'openai',
89+
providerId: 'openai',
90+
id: 'gpt-4.1-mini',
91+
model: 'gpt-4.1-mini',
92+
},
93+
{
94+
providerType: 'openai',
95+
providerId: 'openai',
96+
id: 'gpt-4.1-nano',
97+
model: 'gpt-4.1-nano',
98+
},
99+
{
100+
providerType: 'openai',
101+
providerId: 'openai',
102+
id: 'gpt-4o',
103+
model: 'gpt-4o',
104+
},
105+
{
106+
providerType: 'openai',
107+
providerId: 'openai',
108+
id: 'gpt-4o-mini',
109+
model: 'gpt-4o-mini',
110+
},
111+
{
112+
providerType: 'openai',
113+
providerId: 'openai',
114+
id: 'o4-mini',
115+
model: 'o4-mini',
116+
reasoning: {
117+
enabled: true,
118+
reasoning_effort: 'medium',
119+
},
120+
},
121+
{
122+
providerType: 'openai',
123+
providerId: 'openai',
124+
id: 'o3',
125+
model: 'o3',
126+
reasoning: {
127+
enabled: true,
128+
reasoning_effort: 'medium',
129+
},
130+
},
131+
{
132+
providerType: 'gemini',
133+
providerId: 'gemini',
134+
id: 'gemini-2.5-pro',
135+
model: 'gemini-2.5-pro',
136+
},
137+
{
138+
providerType: 'gemini',
139+
providerId: 'gemini',
140+
id: 'gemini-2.5-flash',
141+
model: 'gemini-2.5-flash',
142+
},
143+
{
144+
providerType: 'gemini',
145+
providerId: 'gemini',
146+
id: 'gemini-2.5-flash-lite',
147+
model: 'gemini-2.5-flash-lite',
148+
},
149+
{
150+
providerType: 'gemini',
151+
providerId: 'gemini',
152+
id: 'gemini-2.0-flash',
153+
model: 'gemini-2.0-flash',
154+
},
155+
{
156+
providerType: 'gemini',
157+
providerId: 'gemini',
158+
id: 'gemini-2.0-flash-lite',
159+
model: 'gemini-2.0-flash-lite',
160+
},
161+
{
162+
providerType: 'deepseek',
163+
providerId: 'deepseek',
164+
id: 'deepseek-chat',
165+
model: 'deepseek-chat',
166+
},
167+
{
168+
providerType: 'deepseek',
169+
providerId: 'deepseek',
170+
id: 'deepseek-reasoner',
171+
model: 'deepseek-reasoner',
172+
},
173+
{
174+
providerType: 'perplexity',
175+
providerId: 'perplexity',
176+
id: 'sonar',
177+
model: 'sonar',
178+
web_search_options: {
179+
search_context_size: 'low',
180+
},
181+
},
182+
{
183+
providerType: 'perplexity',
184+
providerId: 'perplexity',
185+
id: 'sonar-pro',
186+
model: 'sonar',
187+
web_search_options: {
188+
search_context_size: 'low',
189+
},
190+
},
191+
{
192+
providerType: 'perplexity',
193+
providerId: 'perplexity',
194+
id: 'sonar-deep-research',
195+
model: 'sonar-deep-research',
196+
web_search_options: {
197+
search_context_size: 'low',
198+
},
199+
},
200+
{
201+
providerType: 'perplexity',
202+
providerId: 'perplexity',
203+
id: 'sonar-reasoning',
204+
model: 'sonar',
205+
web_search_options: {
206+
search_context_size: 'low',
207+
},
208+
},
209+
{
210+
providerType: 'perplexity',
211+
providerId: 'perplexity',
212+
id: 'sonar-reasoning-pro',
213+
model: 'sonar',
214+
web_search_options: {
215+
search_context_size: 'low',
216+
},
217+
},
218+
{
219+
providerType: 'morph',
220+
providerId: 'morph',
221+
id: 'morph-v0',
222+
model: 'morph-v0',
223+
},
224+
]

0 commit comments

Comments
 (0)