Skip to content

Commit df26e8a

Browse files
feature/add-perplexity-node (#4376)
* added perplexity node * last stable release * Update ChatPerplexity.ts * update * Update ChatPerplexity.ts * Update ChatPerplexity.ts * pnpm lint fix * feat: update @langchain/community from 0.3.24 to 0.3.29 --------- Co-authored-by: Marvin <[email protected]>
1 parent b55fe07 commit df26e8a

File tree

7 files changed

+36342
-36492
lines changed

7 files changed

+36342
-36492
lines changed
Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,27 @@
1+
import { INodeParams, INodeCredential } from '../src/Interface'
2+
3+
class PerplexityApi implements INodeCredential {
4+
label: string
5+
name: string
6+
version: number
7+
description: string
8+
inputs: INodeParams[]
9+
10+
constructor() {
11+
this.label = 'Perplexity API'
12+
this.name = 'perplexityApi'
13+
this.version = 1.0
14+
this.description =
15+
'Refer to <a target="_blank" href="https://docs.perplexity.ai/docs/getting-started">official guide</a> on how to get API key'
16+
this.inputs = [
17+
{
18+
label: 'Perplexity API Key',
19+
name: 'perplexityApiKey',
20+
type: 'password',
21+
placeholder: '<PERPLEXITY_API_KEY>'
22+
}
23+
]
24+
}
25+
}
26+
27+
module.exports = { credClass: PerplexityApi }

packages/components/models.json

Lines changed: 42 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626
},
2727
{
2828
"label": "anthropic.claude-3.5-sonnet-20240620-v1:0",
29-
"name": "anthropic.claude-3-5-sonnet-20240620-v1:0",
29+
"name": "anthropic.claude-3.5-sonnet-20240620-v1:0",
3030
"description": "(20240620-v1:0) specific version of Claude Sonnet 3.5 - most intelligent model",
3131
"input_cost": 3e-6,
3232
"output_cost": 0.000015
@@ -1074,6 +1074,47 @@
10741074
}
10751075
]
10761076
},
1077+
{
1078+
"name": "chatPerplexity",
1079+
"models": [
1080+
{
1081+
"label": "sonar",
1082+
"name": "sonar",
1083+
"input_cost": 1e-6,
1084+
"output_cost": 1e-6
1085+
},
1086+
{
1087+
"label": "sonar-pro",
1088+
"name": "sonar-pro",
1089+
"input_cost": 3e-6,
1090+
"output_cost": 1.5e-5
1091+
},
1092+
{
1093+
"label": "sonar-reasoning",
1094+
"name": "sonar-reasoning",
1095+
"input_cost": 1e-6,
1096+
"output_cost": 5e-6
1097+
},
1098+
{
1099+
"label": "sonar-reasoning-pro",
1100+
"name": "sonar-reasoning-pro",
1101+
"input_cost": 2e-6,
1102+
"output_cost": 8e-6
1103+
},
1104+
{
1105+
"label": "sonar-deep-research",
1106+
"name": "sonar",
1107+
"input_cost": 2e-6,
1108+
"output_cost": 8e-6
1109+
},
1110+
{
1111+
"label": "r1-1776",
1112+
"name": "r1-1776",
1113+
"input_cost": 2e-6,
1114+
"output_cost": 8e-6
1115+
}
1116+
]
1117+
},
10771118
{
10781119
"name": "chatMistralAI",
10791120
"models": [
Lines changed: 237 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,237 @@
1+
import { ChatPerplexity as LangchainChatPerplexity, PerplexityChatInput } from '@langchain/community/chat_models/perplexity'
2+
import { BaseCache } from '@langchain/core/caches'
3+
import { ICommonObject, INode, INodeData, INodeOptionsValue, INodeParams } from '../../../src/Interface'
4+
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
5+
import { ChatPerplexity } from './FlowiseChatPerplexity'
6+
import { getModels, MODEL_TYPE } from '../../../src/modelLoader'
7+
8+
class ChatPerplexity_ChatModels implements INode {
9+
label: string
10+
name: string
11+
version: number
12+
type: string
13+
icon: string
14+
category: string
15+
description: string
16+
baseClasses: string[]
17+
credential: INodeParams
18+
inputs: INodeParams[]
19+
20+
constructor() {
21+
this.label = 'ChatPerplexity'
22+
this.name = 'chatPerplexity'
23+
this.version = 0.1
24+
this.type = 'ChatPerplexity'
25+
this.icon = 'perplexity.svg'
26+
this.category = 'Chat Models'
27+
this.description = 'Wrapper around Perplexity large language models that use the Chat endpoint'
28+
this.baseClasses = [this.type, ...getBaseClasses(LangchainChatPerplexity)]
29+
this.credential = {
30+
label: 'Connect Credential',
31+
name: 'credential',
32+
type: 'credential',
33+
credentialNames: ['perplexityApi']
34+
}
35+
this.inputs = [
36+
{
37+
label: 'Cache',
38+
name: 'cache',
39+
type: 'BaseCache',
40+
optional: true
41+
},
42+
{
43+
label: 'Model Name',
44+
name: 'model',
45+
type: 'asyncOptions',
46+
loadMethod: 'listModels',
47+
default: 'sonar'
48+
},
49+
{
50+
label: 'Temperature',
51+
name: 'temperature',
52+
type: 'number',
53+
step: 0.1,
54+
default: 1,
55+
optional: true
56+
},
57+
{
58+
label: 'Max Tokens',
59+
name: 'maxTokens',
60+
type: 'number',
61+
step: 1,
62+
optional: true,
63+
additionalParams: true
64+
},
65+
{
66+
label: 'Top P',
67+
name: 'topP',
68+
type: 'number',
69+
step: 0.1,
70+
optional: true,
71+
additionalParams: true
72+
},
73+
{
74+
label: 'Top K',
75+
name: 'topK',
76+
type: 'number',
77+
step: 1,
78+
optional: true,
79+
additionalParams: true
80+
},
81+
{
82+
label: 'Presence Penalty',
83+
name: 'presencePenalty',
84+
type: 'number',
85+
step: 0.1,
86+
optional: true,
87+
additionalParams: true
88+
},
89+
{
90+
label: 'Frequency Penalty',
91+
name: 'frequencyPenalty',
92+
type: 'number',
93+
step: 0.1,
94+
optional: true,
95+
additionalParams: true
96+
},
97+
{
98+
label: 'Streaming',
99+
name: 'streaming',
100+
type: 'boolean',
101+
default: true,
102+
optional: true,
103+
additionalParams: true
104+
},
105+
{
106+
label: 'Timeout',
107+
name: 'timeout',
108+
type: 'number',
109+
step: 1,
110+
optional: true,
111+
additionalParams: true
112+
},
113+
// {
114+
// label: 'Search Domain Filter',
115+
// name: 'searchDomainFilter',
116+
// type: 'json',
117+
// optional: true,
118+
// additionalParams: true,
119+
// description: 'Limit citations to URLs from specified domains (e.g., ["example.com", "anotherexample.org"])'
120+
// },
121+
// Currently disabled as output is stored as additional_kwargs
122+
// {
123+
// label: 'Return Images',
124+
// name: 'returnImages',
125+
// type: 'boolean',
126+
// optional: true,
127+
// additionalParams: true,
128+
// description: 'Whether the model should return images (if supported by the model)'
129+
// },
130+
// Currently disabled as output is stored as additional_kwargs
131+
// {
132+
// label: 'Return Related Questions',
133+
// name: 'returnRelatedQuestions',
134+
// type: 'boolean',
135+
// optional: true,
136+
// additionalParams: true,
137+
// description: 'Whether the online model should return related questions'
138+
// },
139+
// {
140+
// label: 'Search Recency Filter',
141+
// name: 'searchRecencyFilter',
142+
// type: 'options',
143+
// options: [
144+
// { label: 'Not Set', name: '' },
145+
// { label: 'Month', name: 'month' },
146+
// { label: 'Week', name: 'week' },
147+
// { label: 'Day', name: 'day' },
148+
// { label: 'Hour', name: 'hour' }
149+
// ],
150+
// default: '',
151+
// optional: true,
152+
// additionalParams: true,
153+
// description: 'Filter search results by time interval (does not apply to images)'
154+
// },
155+
{
156+
label: 'Proxy Url',
157+
name: 'proxyUrl',
158+
type: 'string',
159+
optional: true,
160+
additionalParams: true
161+
}
162+
// LangchainJS currently does not has a web_search_options, search_after_date_filter or search_before_date_filter parameter.
163+
// To add web_search_options (user_location, search_context_size) and search_after_date_filter, search_before_date_filter as a modelKwargs parameter.
164+
]
165+
}
166+
167+
//@ts-ignore
168+
loadMethods = {
169+
async listModels(): Promise<INodeOptionsValue[]> {
170+
return await getModels(MODEL_TYPE.CHAT, 'chatPerplexity')
171+
}
172+
}
173+
174+
async init(nodeData: INodeData, _: string, options: ICommonObject): Promise<any> {
175+
const model = nodeData.inputs?.model as string
176+
const temperature = nodeData.inputs?.temperature as string
177+
const maxTokens = nodeData.inputs?.maxTokens as string
178+
const topP = nodeData.inputs?.topP as string
179+
const topK = nodeData.inputs?.topK as string
180+
const presencePenalty = nodeData.inputs?.presencePenalty as string
181+
const frequencyPenalty = nodeData.inputs?.frequencyPenalty as string
182+
const streaming = nodeData.inputs?.streaming as boolean
183+
const timeout = nodeData.inputs?.timeout as string
184+
const searchDomainFilterRaw = nodeData.inputs?.searchDomainFilter
185+
const returnImages = nodeData.inputs?.returnImages as boolean
186+
const returnRelatedQuestions = nodeData.inputs?.returnRelatedQuestions as boolean
187+
const searchRecencyFilter = nodeData.inputs?.searchRecencyFilter as string
188+
const proxyUrl = nodeData.inputs?.proxyUrl as string
189+
const cache = nodeData.inputs?.cache as BaseCache
190+
191+
if (nodeData.inputs?.credentialId) {
192+
nodeData.credential = nodeData.inputs?.credentialId
193+
}
194+
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
195+
const apiKey = getCredentialParam('perplexityApiKey', credentialData, nodeData)
196+
197+
if (!apiKey) {
198+
throw new Error('Perplexity API Key missing from credential')
199+
}
200+
201+
const obj: PerplexityChatInput = {
202+
model,
203+
apiKey,
204+
streaming: streaming ?? true
205+
}
206+
207+
if (temperature) obj.temperature = parseFloat(temperature)
208+
if (maxTokens) obj.maxTokens = parseInt(maxTokens, 10)
209+
if (topP) obj.topP = parseFloat(topP)
210+
if (topK) obj.topK = parseInt(topK, 10)
211+
if (presencePenalty) obj.presencePenalty = parseFloat(presencePenalty)
212+
if (frequencyPenalty) obj.frequencyPenalty = parseFloat(frequencyPenalty)
213+
if (timeout) obj.timeout = parseInt(timeout, 10)
214+
if (returnImages) obj.returnImages = returnImages
215+
if (returnRelatedQuestions) obj.returnRelatedQuestions = returnRelatedQuestions
216+
if (searchRecencyFilter && searchRecencyFilter !== '') obj.searchRecencyFilter = searchRecencyFilter
217+
if (cache) obj.cache = cache
218+
219+
if (searchDomainFilterRaw) {
220+
try {
221+
obj.searchDomainFilter =
222+
typeof searchDomainFilterRaw === 'object' ? searchDomainFilterRaw : JSON.parse(searchDomainFilterRaw)
223+
} catch (exception) {
224+
throw new Error('Invalid JSON in Search Domain Filter: ' + exception)
225+
}
226+
}
227+
228+
if (proxyUrl) {
229+
console.warn('Proxy configuration for ChatPerplexity might require adjustments to FlowiseChatPerplexity wrapper.')
230+
}
231+
232+
const perplexityModel = new ChatPerplexity(nodeData.id, obj)
233+
return perplexityModel
234+
}
235+
}
236+
237+
module.exports = { nodeClass: ChatPerplexity_ChatModels }
Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,32 @@
1+
import { ChatPerplexity as LangchainChatPerplexity, type PerplexityChatInput } from '@langchain/community/chat_models/perplexity'
2+
import { IMultiModalOption, IVisionChatModal } from '../../../src'
3+
4+
// Extend the Langchain ChatPerplexity class to include Flowise-specific properties and methods
5+
export class ChatPerplexity extends LangchainChatPerplexity implements IVisionChatModal {
6+
configuredModel: string
7+
configuredMaxToken?: number
8+
multiModalOption: IMultiModalOption
9+
id: string
10+
11+
constructor(id: string, fields: PerplexityChatInput) {
12+
super(fields)
13+
this.id = id
14+
this.configuredModel = fields?.model ?? '' // Use model from fields
15+
this.configuredMaxToken = fields?.maxTokens
16+
}
17+
18+
// Method to revert to the original model configuration
19+
revertToOriginalModel(): void {
20+
this.model = this.configuredModel
21+
this.maxTokens = this.configuredMaxToken
22+
}
23+
24+
// Method to set multimodal options
25+
setMultiModalOption(multiModalOption: IMultiModalOption): void {
26+
this.multiModalOption = multiModalOption
27+
}
28+
29+
setVisionModel(): void {
30+
// pass
31+
}
32+
}
Lines changed: 8 additions & 0 deletions
Loading

packages/components/package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@
4242
"@langchain/aws": "0.1.4",
4343
"@langchain/baidu-qianfan": "^0.1.0",
4444
"@langchain/cohere": "^0.0.7",
45-
"@langchain/community": "^0.3.24",
45+
"@langchain/community": "^0.3.29",
4646
"@langchain/core": "0.3.37",
4747
"@langchain/exa": "^0.0.5",
4848
"@langchain/google-genai": "0.2.3",

0 commit comments

Comments
 (0)