@@ -4,25 +4,64 @@ build-backend = "setuptools.build_meta"
44
55[project ]
66name = " vector-mcp"
7- version = " 1.1.19 "
7+ version = " 1.1.20 "
88description = " Integrate RAG into AI Agents via MCP Server. Supports multiple Vector database technologies."
99readme = " README.md"
10+ authors = [{ name = " Audel Rouhi" , email = " knucklessg1@gmail.com" }]
11+ license = { text = " MIT" }
1012classifiers = [ " Development Status :: 5 - Production/Stable" , " License :: Public Domain" , " Environment :: Console" , " Operating System :: POSIX :: Linux" , " Programming Language :: Python :: 3" ,]
1113requires-python = " >=3.10"
12- dependencies = [ " tree-sitter>=0.23.2" , " fastmcp>=3.0.0b1" , " markdownify>=1.2.2" , " beautifulsoup4>=4.14.3" , " ebooklib>=0.19" , " html2text>=2025.4.15" , " ipython>=9.9.0" , " pypdf>=6.6.2" , " protobuf>=6.33.4" , " llama-index-core>=0.14.13" , " llama-index-llms-langchain>=0.7.1" , " llama-index-vector-stores-chroma>=0.5.5" , " chromadb>=1.4.1" , " llama-index-readers-file>=0.2.0" , " eunomia-mcp>=0.3.10" ,]
13- [[project .authors ]]
14- name = " Audel Rouhi"
15- email = " knucklessg1@gmail.com"
14+ dependencies = [
15+ " agent-utilities[embeddings,mcp]>=0.1.9" ,
16+ " chromadb>=1.5.1" ,
17+ " markdownify>=1.2.2" ,
18+ " beautifulsoup4>=4.14.3" ,
19+ " ebooklib>=0.20" ,
20+ " html2text>=2025.4.15" ,
21+ " ipython>=9.10.0" ,
22+ " pypdf>=6.7.2" ,
23+ " protobuf>=6.33.5" ,
24+ " llama-index-core>=0.14.15" ,
25+ " llama-index-llms-langchain>=0.7.2" ,
26+ " llama-index-vector-stores-chroma>=0.5.5" ,
27+ " llama-index-readers-file>=0.5.6" ,
28+ ]
1629
1730[project .optional-dependencies ]
18- postgres = [ " pgvector>=0.4.2" , " psycopg>=3.3.2" , " llama-index-vector-stores-postgres>=0.7.3" ,]
19- chromadb = [ " chromadb>=1.4.1" , " llama-index-vector-stores-chroma>=0.5.5" , " opentelemetry-api>=1.25.0" , " opentelemetry-sdk>=1.25.0" , " opentelemetry-exporter-otlp>=1.25.0" ,]
20- couchbase = [ " couchbase>=4.5.0" , " llama-index-vector-stores-couchbase>=0.6.0" ,]
21- qdrant = [ " qdrant-client>=1.16.2" , " fastembed>=0.7.4" , " llama-index-vector-stores-qdrant>=0.9.1" ,]
22- mongodb = [ " pymongo>=4.16.0" , " llama-index-vector-stores-mongodb>=0.9.1" ,]
23- huggingface = [ " llama-index-embeddings-huggingface>=0.6.1" , " sentence_transformers>=5.2.2" ,]
24- a2a = [ " pydantic-ai-slim[fastmcp,openai,anthropic,groq,mistral,google,huggingface,a2a,ag-ui,web]>=1.48.0" , " pydantic-ai-skills>=0.4.0" , " fastapi>=0.128.0" , " pydantic-ai-slim[fastmcp,openai,groq,anthropic,mistral,google,huggingface,a2a,ag-ui,web]>=1.60.0" , " pydantic-ai-skills>=v0.4.0" ,]
25- all = [ " fastmcp>=3.0.0b1" , " sentence_transformers>=5.2.2" , " markdownify>=1.2.2" , " beautifulsoup4>=4.14.3" , " ebooklib>=0.19" , " html2text>=2025.4.15" , " ipython>=9.9.0" , " pypdf>=6.6.2" , " protobuf>=6.33.4" , " llama-index-core>=0.14.13" , " llama-index-llms-langchain>=0.7.1" , " llama-index-embeddings-huggingface>=0.6.1" , " llama-index-vector-stores-mongodb>=0.9.1" , " llama-index-vector-stores-qdrant>=0.9.1" , " llama-index-vector-stores-couchbase>=0.6.0" , " llama-index-vector-stores-chroma>=0.5.5" , " llama-index-vector-stores-postgres>=0.7.3" , " pgvector>=0.4.2" , " psycopg>=3.3.2" , " chromadb>=1.4.1" , " couchbase>=4.5.0" , " qdrant-client>=1.16.2" , " fastembed>=0.7.4" , " pymongo>=4.16.0" , " pydantic-ai-slim[fastmcp,openai,anthropic,groq,mistral,google,huggingface,a2a,ag-ui,web]>=1.48.0" , " pydantic-ai-skills>=v0.4.0" , " fastapi>=0.128.0" , " opentelemetry-api>=1.25.0" , " opentelemetry-sdk>=1.25.0" , " opentelemetry-exporter-otlp>=1.25.0" , " pydantic-ai-slim[fastmcp,openai,groq,anthropic,mistral,google,huggingface,a2a,ag-ui,web]>=1.60.0" ,]
31+ postgres = [
32+ " psycopg>=3.3.3" ,
33+ " llama-index-vector-stores-postgres>=0.7.3" ,
34+ ]
35+ chromadb = [
36+ " chromadb>=1.5.1" ,
37+ " llama-index-vector-stores-chroma>=0.5.5" ,
38+ " opentelemetry-api>=1.39.1" ,
39+ " opentelemetry-sdk>=1.39.1" ,
40+ " opentelemetry-exporter-otlp>=1.39.1" ,
41+ ]
42+ couchbase = [
43+ " couchbase>=4.5.0" ,
44+ " llama-index-vector-stores-couchbase>=0.6.0" ,
45+ ]
46+ qdrant = [
47+ " qdrant-client>=1.16.2" ,
48+ " fastembed>=0.7.4" ,
49+ " llama-index-vector-stores-qdrant>=0.9.1" ,
50+ ]
51+ mongodb = [
52+ " pymongo>=4.16.0" ,
53+ " llama-index-vector-stores-mongodb>=0.9.1" ,
54+ ]
55+ huggingface = [
56+ " llama-index-embeddings-huggingface>=0.6.1" ,
57+ " sentence_transformers>=5.2.2" ,
58+ ]
59+ agent = [
60+ " agent-utilities[agent]>=0.1.9" ,
61+ ]
62+ all = [
63+ " vector-mcp[postgres,chromadb,couchbase,qdrant,mongodb,huggingface,agent]>=1.1.19"
64+ ]
2665
2766[project .scripts ]
2867vector-mcp = " vector_mcp.mcp:mcp_server"
0 commit comments