1
1
[build-system ]
2
- requires = [" pdm-backend " ]
3
- build-backend = " pdm.backend "
2
+ requires = [" hatchling " ]
3
+ build-backend = " hatchling.build "
4
4
5
5
[project ]
6
6
authors = []
7
7
license = { text = " MIT" }
8
8
requires-python = " >=3.10.0,<4.0.0"
9
9
dependencies = [
10
- " langchain-core>=0.3.72,<1 .0.0" ,
10
+ " langchain-core>=0.3.72,<2 .0.0" ,
11
11
" langchain-text-splitters>=0.3.9,<1.0.0" ,
12
12
" langsmith>=0.1.17,<1.0.0" ,
13
13
" pydantic>=2.7.4,<3.0.0" ,
@@ -16,31 +16,29 @@ dependencies = [
16
16
" PyYAML>=5.3.0,<7.0.0" ,
17
17
" async-timeout>=4.0.0,<5.0.0; python_version < \" 3.11\" " ,
18
18
]
19
- name = " langchain"
19
+ name = " langchain-classic "
20
20
version = " 0.3.27"
21
21
description = " Building applications with LLMs through composability"
22
22
readme = " README.md"
23
23
24
24
[project .optional-dependencies ]
25
- community = [" langchain-community" ]
25
+ # community = ["langchain-community"]
26
26
anthropic = [" langchain-anthropic" ]
27
27
openai = [" langchain-openai" ]
28
- # azure-ai = ["langchain-azure-ai"]
29
- # cohere = ["langchain-cohere"]
28
+ # azure-ai = ["langchain-azure-ai"]
29
+ # cohere = ["langchain-cohere"]
30
30
google-vertexai = [" langchain-google-vertexai" ]
31
31
google-genai = [" langchain-google-genai" ]
32
- # fireworks = ["langchain-fireworks"]
33
- # ollama = ["langchain-ollama"]
32
+ fireworks = [" langchain-fireworks" ]
33
+ ollama = [" langchain-ollama" ]
34
34
together = [" langchain-together" ]
35
- # mistralai = ["langchain-mistralai"]
36
- # huggingface = ["langchain-huggingface"]
37
- # groq = ["langchain-groq"]
38
- # aws = ["langchain-aws"]
39
- # deepseek = ["langchain-deepseek"]
40
- # xai = ["langchain-xai"]
41
- # perplexity = ["langchain-perplexity"]
42
- # hallucination = ["transformers", "torch"]
43
-
35
+ mistralai = [" langchain-mistralai" ]
36
+ # huggingface = ["langchain-huggingface"]
37
+ groq = [" langchain-groq" ]
38
+ aws = [" langchain-aws" ]
39
+ deepseek = [" langchain-deepseek" ]
40
+ xai = [" langchain-xai" ]
41
+ perplexity = [" langchain-perplexity" ]
44
42
45
43
[project .urls ]
46
44
"Source Code" = " https://github.com/langchain-ai/langchain/tree/master/libs/langchain"
@@ -52,31 +50,29 @@ test = [
52
50
" pytest>=8.0.0,<9.0.0" ,
53
51
" pytest-cov>=4.0.0,<5.0.0" ,
54
52
" pytest-dotenv>=0.5.2,<1.0.0" ,
55
- " duckdb-engine>=0.9.2,<1.0.0" ,
56
53
" pytest-watcher>=0.2.6,<1.0.0" ,
54
+ " pytest-asyncio>=0.23.2,<1.0.0" ,
55
+ " pytest-mock>=3.10.0,<4.0.0" ,
56
+ " pytest-socket>=0.6.0,<1.0.0" ,
57
+ " pytest-xdist<4.0.0,>=3.6.1" ,
58
+ " numpy>=1.26.4; python_version<'3.13'" ,
59
+ " numpy>=2.1.0; python_version>='3.13'" ,
60
+ " cffi<1.17.1; python_version < \" 3.10\" " ,
61
+ " cffi; python_version >= \" 3.10\" " ,
62
+ " duckdb-engine>=0.9.2,<1.0.0" ,
57
63
" freezegun>=1.2.2,<2.0.0" ,
58
64
" responses>=0.22.0,<1.0.0" ,
59
- " pytest-asyncio>=0.23.2,<1.0.0" ,
60
65
" lark>=1.1.5,<2.0.0" ,
61
66
" pandas>=2.0.0,<3.0.0" ,
62
- " pytest-mock>=3.10.0,<4.0.0" ,
63
- " pytest-socket>=0.6.0,<1.0.0" ,
64
67
" syrupy>=4.0.2,<5.0.0" ,
65
68
" requests-mock>=1.11.0,<2.0.0" ,
66
- " pytest-xdist<4.0.0,>=3.6.1" ,
67
69
" blockbuster>=1.5.18,<1.6.0" ,
68
- " cffi<1.17.1; python_version < \" 3.10 \" " ,
69
- " cffi; python_version >= \" 3.10 \" " ,
70
+ " toml>=0.10.2,<1.0.0 " ,
71
+ " packaging>=24.2.0,<26.0.0 " ,
70
72
" langchain-tests" ,
71
73
" langchain-core" ,
72
74
" langchain-text-splitters" ,
73
75
" langchain-openai" ,
74
- " toml>=0.10.2,<1.0.0" ,
75
- " packaging>=24.2.0,<26.0.0" ,
76
- " numpy>=1.26.4; python_version<'3.13'" ,
77
- " numpy>=2.1.0; python_version>='3.13'" ,
78
- " transformers>=4.35.0,<5.0.0" ,
79
- " torch>=2.1.0,<3.0.0" ,
80
76
]
81
77
test_integration = [
82
78
" vcrpy>=7.0.0,<8.0.0" ,
@@ -87,8 +83,6 @@ test_integration = [
87
83
" langchainhub>=0.1.16,<1.0.0" ,
88
84
" langchain-core" ,
89
85
" langchain-text-splitters" ,
90
- " transformers>=4.35.0,<5.0.0" ,
91
- " torch>=2.1.0,<3.0.0" ,
92
86
]
93
87
lint = [
94
88
" ruff>=0.13.1,<0.14.0" ,
@@ -97,17 +91,17 @@ lint = [
97
91
]
98
92
typing = [
99
93
" mypy>=1.15.0,<1.16.0" ,
94
+ " mypy-protobuf>=3.0.0,<4.0.0" ,
100
95
" types-pyyaml>=6.0.12.2,<7.0.0.0" ,
101
96
" types-requests>=2.28.11.5,<3.0.0.0" ,
102
97
" types-toml>=0.10.8.1,<1.0.0.0" ,
103
98
" types-redis>=4.3.21.6,<5.0.0.0" ,
104
99
" types-pytz>=2023.3.0.0,<2024.0.0.0" ,
105
100
" types-chardet>=5.0.4.6,<6.0.0.0" ,
106
- " mypy-protobuf>=3.0.0,<4.0.0" ,
107
- " langchain-core" ,
108
- " langchain-text-splitters" ,
109
101
" numpy>=1.26.4; python_version < '3.13'" ,
110
102
" numpy>=2.1.0; python_version >= '3.13'" ,
103
+ " langchain-core" ,
104
+ " langchain-text-splitters" ,
111
105
]
112
106
dev = [
113
107
" jupyter>=1.0.0,<2.0.0" ,
@@ -125,7 +119,6 @@ langchain-text-splitters = { path = "../text-splitters", editable = true }
125
119
langchain-openai = { path = " ../partners/openai" , editable = true }
126
120
127
121
[tool .ruff ]
128
- target-version = " py39"
129
122
exclude = [" tests/integration_tests/examples/non-utf8-encoding.py" ]
130
123
131
124
[tool .mypy ]
@@ -197,7 +190,7 @@ ignore-var-parameters = true # ignore missing documentation for *args and **kwa
197
190
"scripts/*.py" = [
198
191
" INP001" , # Not a package
199
192
]
200
- "langchain /chains/constitutional_ai/principles.py" = [
193
+ "langchain_classic /chains/constitutional_ai/principles.py" = [
201
194
" E501" , # Line too long
202
195
]
203
196
"**/retrievers/*time_weighted_retriever.py" = [
0 commit comments