Skip to content

Commit 10db46b

Browse files
committed
[Components] openrouter #15025
Actions - Send Completion Request - Send Chat Completion Request - Retrieve Available Models
1 parent 943fec5 commit 10db46b

File tree

7 files changed

+592
-5
lines changed

7 files changed

+592
-5
lines changed
Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,20 @@
1+
import openrouter from "../../openrouter.app.mjs";
2+
3+
export default {
4+
key: "openrouter-retrieve-available-models",
5+
name: "Retrieve Available Models",
6+
version: "0.0.1",
7+
description: "Returns a list of models available through the API. [See the documentation](https://openrouter.ai/docs/api-reference/list-available-models)",
8+
type: "action",
9+
props: {
10+
openrouter,
11+
},
12+
async run({ $ }) {
13+
const response = await this.openrouter.listModels({
14+
$,
15+
});
16+
17+
$.export("$summary", `Successfully retrieved ${response.data.length} available model(s)!`);
18+
return response;
19+
},
20+
};
Lines changed: 190 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,190 @@
1+
import { ConfigurationError } from "@pipedream/platform";
2+
import { parseObject } from "../../common/utils.mjs";
3+
import openrouter from "../../openrouter.app.mjs";
4+
5+
export default {
6+
key: "openrouter-send-chat-completion-request",
7+
name: "Send Chat Completion Request",
8+
version: "0.0.1",
9+
description: "Send a chat completion request to a selected model. [See the documentation](https://openrouter.ai/docs/api-reference/chat-completion)",
10+
type: "action",
11+
props: {
12+
openrouter,
13+
model: {
14+
propDefinition: [
15+
openrouter,
16+
"model",
17+
],
18+
},
19+
messages: {
20+
type: "string[]",
21+
label: "Messages",
22+
description: "A list of objects containing role and content. E.g. **{\"role\":\"user\", \"content\":\"text\"}**. [See the documentation](https://openrouter.ai/docs/api-reference/chat-completion#request.body.messages) for further details.",
23+
},
24+
maxTokens: {
25+
propDefinition: [
26+
openrouter,
27+
"maxTokens",
28+
],
29+
},
30+
temperature: {
31+
propDefinition: [
32+
openrouter,
33+
"temperature",
34+
],
35+
},
36+
seed: {
37+
propDefinition: [
38+
openrouter,
39+
"seed",
40+
],
41+
},
42+
topP: {
43+
propDefinition: [
44+
openrouter,
45+
"topP",
46+
],
47+
},
48+
topK: {
49+
propDefinition: [
50+
openrouter,
51+
"topK",
52+
],
53+
},
54+
frequencyPenalty: {
55+
propDefinition: [
56+
openrouter,
57+
"frequencyPenalty",
58+
],
59+
},
60+
presencePenalty: {
61+
propDefinition: [
62+
openrouter,
63+
"presencePenalty",
64+
],
65+
},
66+
repetitionPenalty: {
67+
propDefinition: [
68+
openrouter,
69+
"repetitionPenalty",
70+
],
71+
},
72+
logitBias: {
73+
propDefinition: [
74+
openrouter,
75+
"logitBias",
76+
],
77+
},
78+
togLogprobs: {
79+
propDefinition: [
80+
openrouter,
81+
"togLogprobs",
82+
],
83+
},
84+
minP: {
85+
propDefinition: [
86+
openrouter,
87+
"minP",
88+
],
89+
},
90+
topA: {
91+
propDefinition: [
92+
openrouter,
93+
"topA",
94+
],
95+
},
96+
transforms: {
97+
propDefinition: [
98+
openrouter,
99+
"transforms",
100+
],
101+
},
102+
models: {
103+
propDefinition: [
104+
openrouter,
105+
"model",
106+
],
107+
type: "string[]",
108+
label: "Models",
109+
description: "Alternate list of models for routing overrides.",
110+
},
111+
sort: {
112+
propDefinition: [
113+
openrouter,
114+
"sort",
115+
],
116+
},
117+
effort: {
118+
propDefinition: [
119+
openrouter,
120+
"effort",
121+
],
122+
},
123+
reasoningMaxTokens: {
124+
propDefinition: [
125+
openrouter,
126+
"reasoningMaxTokens",
127+
],
128+
},
129+
exclude: {
130+
propDefinition: [
131+
openrouter,
132+
"exclude",
133+
],
134+
},
135+
},
136+
async run({ $ }) {
137+
if (this.effort && this.reasoningMaxTokens) {
138+
throw new ConfigurationError("**Reasoning Effort** and **Reasoning Max Tokens** cannot be used simultaneously.");
139+
}
140+
const data = {
141+
model: this.model,
142+
messages: parseObject(this.prompt),
143+
stream: false,
144+
maxTokens: this.maxTokens,
145+
temperature: this.temperature && parseFloat(this.temperature),
146+
seed: this.seed,
147+
topP: this.topP && parseFloat(this.topP),
148+
topK: this.topK,
149+
frequencyPenalty: this.frequencyPenalty && parseFloat(this.frequencyPenalty),
150+
presencePenalty: this.presencePenalty && parseFloat(this.presencePenalty),
151+
repetitionPenalty: this.repetitionPenalty && parseFloat(this.repetitionPenalty),
152+
logitBias: this.logitBias,
153+
togLogprobs: this.togLogprobs,
154+
minP: this.minP && parseFloat(this.minP),
155+
topA: this.topA && parseFloat(this.topA),
156+
transforms: this.transforms,
157+
models: this.models,
158+
provider: this.provider,
159+
reasoning: this.reasoning,
160+
};
161+
if (this.sort) {
162+
data.provider = {
163+
sort: this.sort,
164+
};
165+
}
166+
const reasoning = {};
167+
if (this.effort) {
168+
reasoning.effort = this.effort;
169+
}
170+
if (this.reasoningMaxTokens) {
171+
reasoning.max_tokens = parseFloat(this.reasoningMaxTokens);
172+
}
173+
if (this.exclude) {
174+
reasoning.exclude = this.exclude;
175+
}
176+
if (Object.entries(reasoning).length) {
177+
data.reasoning = reasoning;
178+
}
179+
const response = await this.openrouter.sendChatCompetionRequest({
180+
$,
181+
data,
182+
timeout: 1000 * 60 * 5,
183+
});
184+
if (response.error) {
185+
throw new ConfigurationError(response.error.message);
186+
}
187+
$.export("$summary", `A new chat completion request with Id: ${response.id} was successfully created!`);
188+
return response;
189+
},
190+
};

0 commit comments

Comments
 (0)