Skip to content

Commit 7d62c20

Browse files
feat(openrouter): add open router to model block (#1172)
* feat(openrouter): add open router to model block * improvement(openrouter): streaming fix, temperature fix * pr comments --------- Co-authored-by: waleedlatif1 <[email protected]>
1 parent df64625 commit 7d62c20

File tree

15 files changed

+812
-123
lines changed

15 files changed

+812
-123
lines changed
Lines changed: 46 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,46 @@
1+
import { type NextRequest, NextResponse } from 'next/server'
2+
import { createLogger } from '@/lib/logs/console/logger'
3+
4+
const logger = createLogger('OpenRouterModelsAPI')
5+
6+
export const dynamic = 'force-dynamic'
7+
8+
export async function GET(_request: NextRequest) {
9+
try {
10+
const response = await fetch('https://openrouter.ai/api/v1/models', {
11+
headers: { 'Content-Type': 'application/json' },
12+
cache: 'no-store',
13+
})
14+
15+
if (!response.ok) {
16+
logger.warn('Failed to fetch OpenRouter models', {
17+
status: response.status,
18+
statusText: response.statusText,
19+
})
20+
return NextResponse.json({ models: [] })
21+
}
22+
23+
const data = await response.json()
24+
const models = Array.isArray(data?.data)
25+
? Array.from(
26+
new Set(
27+
data.data
28+
.map((m: any) => m?.id)
29+
.filter((id: unknown): id is string => typeof id === 'string' && id.length > 0)
30+
.map((id: string) => `openrouter/${id}`)
31+
)
32+
)
33+
: []
34+
35+
logger.info('Successfully fetched OpenRouter models', {
36+
count: models.length,
37+
})
38+
39+
return NextResponse.json({ models })
40+
} catch (error) {
41+
logger.error('Error fetching OpenRouter models', {
42+
error: error instanceof Error ? error.message : 'Unknown error',
43+
})
44+
return NextResponse.json({ models: [] })
45+
}
46+
}

apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/components/sub-block/components/combobox.tsx

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,7 @@ interface ComboBoxProps {
2828
placeholder?: string
2929
isConnecting: boolean
3030
config: SubBlockConfig
31+
isWide?: boolean
3132
}
3233

3334
export function ComboBox({
@@ -42,6 +43,7 @@ export function ComboBox({
4243
placeholder = 'Type or select an option...',
4344
isConnecting,
4445
config,
46+
isWide = false,
4547
}: ComboBoxProps) {
4648
const [storeValue, setStoreValue] = useSubBlockValue<string>(blockId, subBlockId)
4749
const [storeInitialized, setStoreInitialized] = useState(false)
@@ -446,7 +448,12 @@ export function ComboBox({
446448

447449
{/* Dropdown */}
448450
{open && (
449-
<div className='absolute top-full left-0 z-[100] mt-1 w-full min-w-[286px]'>
451+
<div
452+
className={cn(
453+
'absolute top-full left-0 z-[100] mt-1 w-full overflow-visible',
454+
isWide ? 'min-w-[350px]' : 'min-w-[286px]'
455+
)}
456+
>
450457
<div className='allow-scroll fade-in-0 zoom-in-95 animate-in rounded-md border bg-popover text-popover-foreground shadow-lg'>
451458
<div
452459
ref={dropdownRef}

apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/components/sub-block/components/tool-input/tool-input.tsx

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1457,19 +1457,19 @@ export function ToolInput({
14571457
{tool.usageControl === 'auto' && (
14581458
<span>
14591459
{' '}
1460-
<span className='font-medium'>Auto:</span> Let the model decide
1460+
<span className='font-medium'> Auto:</span> Let the model decide
14611461
when to use the tool
14621462
</span>
14631463
)}
14641464
{tool.usageControl === 'force' && (
14651465
<span>
1466-
<span className='font-medium'>Force:</span> Always use this tool
1466+
<span className='font-medium'> Force:</span> Always use this tool
14671467
in the response
14681468
</span>
14691469
)}
14701470
{tool.usageControl === 'none' && (
14711471
<span>
1472-
<span className='font-medium'>Deny:</span> Never use this tool
1472+
<span className='font-medium'> Deny:</span> Never use this tool
14731473
</span>
14741474
)}
14751475
</p>

apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/components/sub-block/sub-block.tsx

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -45,6 +45,7 @@ interface SubBlockProps {
4545
disabled?: boolean
4646
fieldDiffStatus?: FieldDiffStatus
4747
allowExpandInPreview?: boolean
48+
isWide?: boolean
4849
}
4950

5051
export function SubBlock({
@@ -56,6 +57,7 @@ export function SubBlock({
5657
disabled = false,
5758
fieldDiffStatus,
5859
allowExpandInPreview,
60+
isWide = false,
5961
}: SubBlockProps) {
6062
const [isValidJson, setIsValidJson] = useState(true)
6163

@@ -148,6 +150,7 @@ export function SubBlock({
148150
disabled={isDisabled}
149151
isConnecting={isConnecting}
150152
config={config}
153+
isWide={isWide}
151154
/>
152155
</div>
153156
)

apps/sim/app/workspace/[workspaceId]/w/[workflowId]/components/workflow-block/workflow-block.tsx

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -995,6 +995,7 @@ export function WorkflowBlock({ id, data }: NodeProps<WorkflowBlockProps>) {
995995
: undefined
996996
}
997997
allowExpandInPreview={currentWorkflow.isDiffMode}
998+
isWide={displayIsWide}
998999
/>
9991000
</div>
10001001
))}

apps/sim/blocks/blocks/agent.ts

Lines changed: 21 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -6,13 +6,12 @@ import {
66
getAllModelProviders,
77
getBaseModelProviders,
88
getHostedModels,
9+
getMaxTemperature,
910
getProviderIcon,
10-
MODELS_TEMP_RANGE_0_1,
11-
MODELS_TEMP_RANGE_0_2,
1211
MODELS_WITH_REASONING_EFFORT,
13-
MODELS_WITH_TEMPERATURE_SUPPORT,
1412
MODELS_WITH_VERBOSITY,
1513
providers,
14+
supportsTemperature,
1615
} from '@/providers/utils'
1716

1817
// Get current Ollama models dynamically
@@ -21,6 +20,7 @@ const getCurrentOllamaModels = () => {
2120
}
2221

2322
import { useOllamaStore } from '@/stores/ollama/store'
23+
import { useOpenRouterStore } from '@/stores/openrouter/store'
2424
import type { ToolResponse } from '@/tools/types'
2525

2626
const logger = createLogger('AgentBlock')
@@ -159,8 +159,9 @@ Create a system prompt appropriately detailed for the request, using clear langu
159159
required: true,
160160
options: () => {
161161
const ollamaModels = useOllamaStore.getState().models
162+
const openrouterModels = useOpenRouterStore.getState().models
162163
const baseModels = Object.keys(getBaseModelProviders())
163-
const allModels = [...baseModels, ...ollamaModels]
164+
const allModels = Array.from(new Set([...baseModels, ...ollamaModels, ...openrouterModels]))
164165

165166
return allModels.map((model) => {
166167
const icon = getProviderIcon(model)
@@ -175,10 +176,15 @@ Create a system prompt appropriately detailed for the request, using clear langu
175176
layout: 'half',
176177
min: 0,
177178
max: 1,
178-
condition: {
179+
condition: () => ({
179180
field: 'model',
180-
value: MODELS_TEMP_RANGE_0_1,
181-
},
181+
value: (() => {
182+
const allModels = Object.keys(getAllModelProviders())
183+
return allModels.filter(
184+
(model) => supportsTemperature(model) && getMaxTemperature(model) === 1
185+
)
186+
})(),
187+
}),
182188
},
183189
{
184190
id: 'temperature',
@@ -187,30 +193,15 @@ Create a system prompt appropriately detailed for the request, using clear langu
187193
layout: 'half',
188194
min: 0,
189195
max: 2,
190-
condition: {
191-
field: 'model',
192-
value: MODELS_TEMP_RANGE_0_2,
193-
},
194-
},
195-
{
196-
id: 'temperature',
197-
title: 'Temperature',
198-
type: 'slider',
199-
layout: 'full',
200-
min: 0,
201-
max: 2,
202-
condition: {
196+
condition: () => ({
203197
field: 'model',
204-
value: [...MODELS_TEMP_RANGE_0_1, ...MODELS_TEMP_RANGE_0_2],
205-
not: true,
206-
and: {
207-
field: 'model',
208-
value: Object.keys(getBaseModelProviders()).filter(
209-
(model) => !MODELS_WITH_TEMPERATURE_SUPPORT.includes(model)
210-
),
211-
not: true,
212-
},
213-
},
198+
value: (() => {
199+
const allModels = Object.keys(getAllModelProviders())
200+
return allModels.filter(
201+
(model) => supportsTemperature(model) && getMaxTemperature(model) === 2
202+
)
203+
})(),
204+
}),
214205
},
215206
{
216207
id: 'reasoningEffort',

apps/sim/components/icons.tsx

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3436,3 +3436,19 @@ export function MySQLIcon(props: SVGProps<SVGSVGElement>) {
34363436
</svg>
34373437
)
34383438
}
3439+
3440+
export function OpenRouterIcon(props: SVGProps<SVGSVGElement>) {
3441+
return (
3442+
<svg
3443+
{...props}
3444+
fill='currentColor'
3445+
fillRule='evenodd'
3446+
height='1em'
3447+
viewBox='0 0 24 24'
3448+
width='1em'
3449+
xmlns='http://www.w3.org/2000/svg'
3450+
>
3451+
<path d='M16.804 1.957l7.22 4.105v.087L16.73 10.21l.017-2.117-.821-.03c-1.059-.028-1.611.002-2.268.11-1.064.175-2.038.577-3.147 1.352L8.345 11.03c-.284.195-.495.336-.68.455l-.515.322-.397.234.385.23.53.338c.476.314 1.17.796 2.701 1.866 1.11.775 2.083 1.177 3.147 1.352l.3.045c.694.091 1.375.094 2.825.033l.022-2.159 7.22 4.105v.087L16.589 22l.014-1.862-.635.022c-1.386.042-2.137.002-3.138-.162-1.694-.28-3.26-.926-4.881-2.059l-2.158-1.5a21.997 21.997 0 00-.755-.498l-.467-.28a55.927 55.927 0 00-.76-.43C2.908 14.73.563 14.116 0 14.116V9.888l.14.004c.564-.007 2.91-.622 3.809-1.124l1.016-.58.438-.274c.428-.28 1.072-.726 2.686-1.853 1.621-1.133 3.186-1.78 4.881-2.059 1.152-.19 1.974-.213 3.814-.138l.02-1.907z' />
3452+
</svg>
3453+
)
3454+
}

apps/sim/providers/index.ts

Lines changed: 18 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@ import {
55
calculateCost,
66
generateStructuredOutputInstructions,
77
getProvider,
8+
shouldBillModelUsage,
89
supportsTemperature,
910
} from '@/providers/utils'
1011

@@ -85,7 +86,23 @@ export async function executeProviderRequest(
8586
const { prompt: promptTokens = 0, completion: completionTokens = 0 } = response.tokens
8687
const useCachedInput = !!request.context && request.context.length > 0
8788

88-
response.cost = calculateCost(response.model, promptTokens, completionTokens, useCachedInput)
89+
if (shouldBillModelUsage(response.model, request.apiKey)) {
90+
response.cost = calculateCost(response.model, promptTokens, completionTokens, useCachedInput)
91+
} else {
92+
response.cost = {
93+
input: 0,
94+
output: 0,
95+
total: 0,
96+
pricing: {
97+
input: 0,
98+
output: 0,
99+
updatedAt: new Date().toISOString(),
100+
},
101+
}
102+
logger.debug(
103+
`Not billing model usage for ${response.model} - user provided API key or not hosted model`
104+
)
105+
}
89106
}
90107

91108
return response

0 commit comments

Comments
 (0)