Skip to content

Commit 5ca0e95

Browse files
authored
Merge pull request #1033 from majiayu000/fix/langchain-v1-compatibility
fix: update langchain imports for v1.0+ compatibility
2 parents f3dbbca + ebd909a commit 5ca0e95

File tree

4 files changed

+12
-9
lines changed

4 files changed

+12
-9
lines changed

pyproject.toml

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@ authors = [
1111

1212
dependencies = [
1313
"langchain>=1.2.0",
14+
"langchain-classic>=1.0.0",
1415
"langchain-openai>=1.1.6",
1516
"langchain-mistralai>=1.1.1",
1617
"langchain_community>=0.3.31",
@@ -26,7 +27,7 @@ dependencies = [
2627
"playwright>=1.57.0",
2728
"undetected-playwright>=0.3.0",
2829
"semchunk>=3.2.5",
29-
"async-timeout>=5.0.1",
30+
"async-timeout>=4.0.0",
3031
"simpleeval>=1.0.3",
3132
"jsonschema>=4.25.1",
3233
"duckduckgo-search>=8.1.1",

scrapegraphai/builders/graph_builder.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
GraphBuilder Module
33
"""
44

5-
from langchain.chains import create_extraction_chain
5+
from langchain_classic.chains import create_extraction_chain
66
from langchain_community.chat_models import ErnieBotChat
77
from langchain_core.prompts import ChatPromptTemplate
88
from langchain_openai import ChatOpenAI
@@ -113,7 +113,9 @@ def _create_extraction_chain(self):
113113
{nodes_description}
114114
115115
Based on the user's input: "{input}", identify the essential nodes required for the task and suggest a graph configuration that outlines the flow between the chosen nodes.
116-
""".format(nodes_description=self.nodes_description, input="{input}")
116+
""".format(
117+
nodes_description=self.nodes_description, input="{input}"
118+
)
117119
extraction_prompt = ChatPromptTemplate.from_template(
118120
create_graph_prompt_template
119121
)

scrapegraphai/nodes/generate_answer_node_k_level.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -4,10 +4,10 @@
44

55
from typing import List, Optional
66

7-
from langchain_core.prompts import PromptTemplate
87
from langchain_aws import ChatBedrock
98
from langchain_community.chat_models import ChatOllama
109
from langchain_core.output_parsers import JsonOutputParser
10+
from langchain_core.prompts import PromptTemplate
1111
from langchain_core.runnables import RunnableParallel
1212
from langchain_mistralai import ChatMistralAI
1313
from langchain_openai import ChatOpenAI
@@ -151,7 +151,7 @@ def execute(self, state: dict) -> dict:
151151
template=template_chunks_prompt,
152152
input_variables=["format_instructions"],
153153
partial_variables={
154-
"context": chunk.get("document"),
154+
"content": chunk.get("document"),
155155
"chunk_id": i + 1,
156156
},
157157
)
@@ -163,14 +163,14 @@ def execute(self, state: dict) -> dict:
163163

164164
merge_prompt = PromptTemplate(
165165
template=template_merge_prompt,
166-
input_variables=["context", "question"],
166+
input_variables=["content", "question"],
167167
partial_variables={"format_instructions": format_instructions},
168168
)
169169

170170
merge_chain = merge_prompt | self.llm_model
171171
if output_parser:
172172
merge_chain = merge_chain | output_parser
173-
answer = merge_chain.invoke({"context": batch_results, "question": user_prompt})
173+
answer = merge_chain.invoke({"content": batch_results, "question": user_prompt})
174174

175175
state["answer"] = answer
176176

scrapegraphai/nodes/generate_code_node.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -12,10 +12,10 @@
1212
from bs4 import BeautifulSoup
1313
from jsonschema import ValidationError as JSONSchemaValidationError
1414
from jsonschema import validate
15-
from langchain_core.output_parsers import ResponseSchema, StructuredOutputParser
16-
from langchain_core.prompts import PromptTemplate
15+
from langchain_classic.output_parsers import ResponseSchema, StructuredOutputParser
1716
from langchain_community.chat_models import ChatOllama
1817
from langchain_core.output_parsers import StrOutputParser
18+
from langchain_core.prompts import PromptTemplate
1919

2020
from ..prompts import TEMPLATE_INIT_CODE_GENERATION, TEMPLATE_SEMANTIC_COMPARISON
2121
from ..utils import (

0 commit comments

Comments
 (0)