Skip to content

Commit cc87d85

Browse files
Chore: Update @langchain/community (#3787)
* update package versions * fix updated field defs due to lib update * Merge branch 'main' into chore/Upgrade-LC-version # Conflicts: # packages/components/package.json # pnpm-lock.yaml * lintfix * fix follow up prompt dialog * lintfix --------- Co-authored-by: Henry <[email protected]>
1 parent 16aa3a0 commit cc87d85

File tree

6 files changed

+513
-204
lines changed

6 files changed

+513
-204
lines changed

package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -68,7 +68,7 @@
6868
},
6969
"resolutions": {
7070
"@google/generative-ai": "^0.15.0",
71-
"@langchain/core": "0.3.18",
71+
"@langchain/core": "0.3.29",
7272
"@qdrant/openapi-typescript-fetch": "1.2.6",
7373
"openai": "4.57.3",
7474
"protobufjs": "7.4.0"

packages/components/nodes/embeddings/JinaAIEmbedding/JinaAIEmbedding.ts

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
import { ICommonObject, INode, INodeData, INodeParams } from '../../../src/Interface'
22
import { getBaseClasses, getCredentialData, getCredentialParam } from '../../../src/utils'
3-
import { JinaEmbeddings, JinaEmbeddingsParams } from '@langchain/community/embeddings/jina'
3+
import { JinaEmbeddings } from '@langchain/community/embeddings/jina'
44

55
class JinaAIEmbedding_Embeddings implements INode {
66
label: string
@@ -45,12 +45,11 @@ class JinaAIEmbedding_Embeddings implements INode {
4545
const credentialData = await getCredentialData(nodeData.credential ?? '', options)
4646
const apiKey = getCredentialParam('jinaAIAPIKey', credentialData, nodeData)
4747

48-
const obj: JinaEmbeddingsParams = {
48+
const model = new JinaEmbeddings({
4949
apiKey: apiKey,
5050
model: modelName
51-
}
51+
})
5252

53-
const model = new JinaEmbeddings(obj)
5453
return model
5554
}
5655
}

packages/components/package.json

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -41,14 +41,14 @@
4141
"@langchain/aws": "0.1.2",
4242
"@langchain/baidu-qianfan": "^0.1.0",
4343
"@langchain/cohere": "^0.0.7",
44-
"@langchain/community": "^0.3.11",
45-
"@langchain/core": "0.3.18",
44+
"@langchain/community": "^0.3.24",
45+
"@langchain/core": "0.3.29",
4646
"@langchain/exa": "^0.0.5",
4747
"@langchain/google-genai": "0.1.3",
4848
"@langchain/google-vertexai": "^0.1.2",
4949
"@langchain/groq": "0.1.2",
5050
"@langchain/langgraph": "^0.0.22",
51-
"@langchain/mistralai": "^0.0.26",
51+
"@langchain/mistralai": "^0.2.0",
5252
"@langchain/mongodb": "^0.0.1",
5353
"@langchain/ollama": "0.1.2",
5454
"@langchain/openai": "0.3.13",

packages/components/src/followUpPrompts.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -96,6 +96,7 @@ export const generateFollowUpPrompts = async (
9696
model: providerConfig.modelName,
9797
temperature: parseFloat(`${providerConfig.temperature}`)
9898
})
99+
// @ts-ignore
99100
const structuredLLM = model.withStructuredOutput(FollowUpPromptType)
100101
const structuredResponse = await structuredLLM.invoke(followUpPromptsPrompt)
101102
return structuredResponse

packages/ui/src/ui-component/extended/FollowUpPrompts.jsx

Lines changed: 13 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -354,16 +354,20 @@ const FollowUpPrompts = ({ dialogProps }) => {
354354
chatbotConfig.followUpPrompts = value.followUpPrompts
355355

356356
// if the prompt is not set, save the default prompt
357-
if (!followUpPromptsConfig[followUpPromptsConfig.selectedProvider].prompt) {
358-
followUpPromptsConfig[followUpPromptsConfig.selectedProvider].prompt = followUpPromptsOptions[
359-
followUpPromptsConfig.selectedProvider
360-
].inputs.find((input) => input.name === 'prompt').default
361-
}
357+
const selectedProvider = followUpPromptsConfig.selectedProvider
358+
359+
if (selectedProvider && followUpPromptsConfig[selectedProvider] && followUpPromptsOptions[selectedProvider]) {
360+
if (!followUpPromptsConfig[selectedProvider].prompt) {
361+
followUpPromptsConfig[selectedProvider].prompt = followUpPromptsOptions[selectedProvider].inputs.find(
362+
(input) => input.name === 'prompt'
363+
)?.default
364+
}
362365

363-
if (!followUpPromptsConfig[followUpPromptsConfig.selectedProvider].temperature) {
364-
followUpPromptsConfig[followUpPromptsConfig.selectedProvider].temperature = followUpPromptsOptions[
365-
followUpPromptsConfig.selectedProvider
366-
].inputs.find((input) => input.name === 'temperature').default
366+
if (!followUpPromptsConfig[selectedProvider].temperature) {
367+
followUpPromptsConfig[selectedProvider].temperature = followUpPromptsOptions[selectedProvider].inputs.find(
368+
(input) => input.name === 'temperature'
369+
)?.default
370+
}
367371
}
368372

369373
const saveResp = await chatflowsApi.updateChatflow(dialogProps.chatflow.id, {
@@ -462,7 +466,6 @@ const FollowUpPrompts = ({ dialogProps }) => {
462466
<Typography variant='h5'>Providers</Typography>
463467
<FormControl fullWidth>
464468
<Select size='small' value={selectedProvider} onChange={handleSelectedProviderChange}>
465-
<MenuItem value='none'>None</MenuItem>
466469
{Object.values(followUpPromptsOptions).map((provider) => (
467470
<MenuItem key={provider.name} value={provider.name}>
468471
{provider.label}

0 commit comments

Comments
 (0)