Skip to content

Commit 0b204d8

Browse files
Harrison/quadrant (#665)
Co-authored-by: Kacper Łukawski <[email protected]>
1 parent 983b73f commit 0b204d8

File tree

6 files changed

+1054
-344
lines changed

6 files changed

+1054
-344
lines changed

docs/modules/utils/combine_docs_examples/vectorstores.ipynb

Lines changed: 56 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616
},
1717
{
1818
"cell_type": "code",
19-
"execution_count": 6,
19+
"execution_count": 1,
2020
"id": "965eecee",
2121
"metadata": {
2222
"pycharm": {
@@ -27,12 +27,12 @@
2727
"source": [
2828
"from langchain.embeddings.openai import OpenAIEmbeddings\n",
2929
"from langchain.text_splitter import CharacterTextSplitter\n",
30-
"from langchain.vectorstores import ElasticVectorSearch, Pinecone, Weaviate, FAISS"
30+
"from langchain.vectorstores import ElasticVectorSearch, Pinecone, Weaviate, FAISS, Qdrant"
3131
]
3232
},
3333
{
3434
"cell_type": "code",
35-
"execution_count": 7,
35+
"execution_count": 2,
3636
"id": "68481687",
3737
"metadata": {
3838
"pycharm": {
@@ -514,10 +514,62 @@
514514
"docs[0]"
515515
]
516516
},
517+
{
518+
"cell_type": "markdown",
519+
"id": "9b852079",
520+
"metadata": {},
521+
"source": [
522+
"## Qdrant"
523+
]
524+
},
525+
{
526+
"cell_type": "code",
527+
"execution_count": null,
528+
"id": "e5ec70ce",
529+
"metadata": {},
530+
"outputs": [],
531+
"source": [
532+
"host = \"<---host name here --->\"\n",
533+
"api_key = \"<---api key here--->\"\n",
534+
"qdrant = Qdrant.from_texts(texts, embeddings, host=host, prefer_grpc=True, api_key=api_key)\n",
535+
"query = \"What did the president say about Ketanji Brown Jackson\""
536+
]
537+
},
538+
{
539+
"cell_type": "code",
540+
"execution_count": 21,
541+
"id": "9805ad1f",
542+
"metadata": {},
543+
"outputs": [],
544+
"source": [
545+
"docs = qdrant.similarity_search(query)"
546+
]
547+
},
548+
{
549+
"cell_type": "code",
550+
"execution_count": 22,
551+
"id": "bd097a0e",
552+
"metadata": {},
553+
"outputs": [
554+
{
555+
"data": {
556+
"text/plain": [
557+
"Document(page_content='In state after state, new laws have been passed, not only to suppress the vote, but to subvert entire elections. \\n\\nWe cannot let this happen. \\n\\nTonight. I call on the Senate to: Pass the Freedom to Vote Act. Pass the John Lewis Voting Rights Act. And while you’re at it, pass the Disclose Act so Americans can know who is funding our elections. \\n\\nTonight, I’d like to honor someone who has dedicated his life to serve this country: Justice Stephen Breyer—an Army veteran, Constitutional scholar, and retiring Justice of the United States Supreme Court. Justice Breyer, thank you for your service. \\n\\nOne of the most serious constitutional responsibilities a President has is nominating someone to serve on the United States Supreme Court. \\n\\nAnd I did that 4 days ago, when I nominated Circuit Court of Appeals Judge Ketanji Brown Jackson. One of our nation’s top legal minds, who will continue Justice Breyer’s legacy of excellence.', lookup_str='', metadata={}, lookup_index=0)"
558+
]
559+
},
560+
"execution_count": 22,
561+
"metadata": {},
562+
"output_type": "execute_result"
563+
}
564+
],
565+
"source": [
566+
"docs[0]"
567+
]
568+
},
517569
{
518570
"cell_type": "code",
519571
"execution_count": null,
520-
"id": "e7d74bd2",
572+
"id": "8ffd66e2",
521573
"metadata": {},
522574
"outputs": [],
523575
"source": []

langchain/vectorstores/__init__.py

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,14 @@
33
from langchain.vectorstores.elastic_vector_search import ElasticVectorSearch
44
from langchain.vectorstores.faiss import FAISS
55
from langchain.vectorstores.pinecone import Pinecone
6+
from langchain.vectorstores.qdrant import Qdrant
67
from langchain.vectorstores.weaviate import Weaviate
78

8-
__all__ = ["ElasticVectorSearch", "FAISS", "VectorStore", "Pinecone", "Weaviate"]
9+
__all__ = [
10+
"ElasticVectorSearch",
11+
"FAISS",
12+
"VectorStore",
13+
"Pinecone",
14+
"Weaviate",
15+
"Qdrant",
16+
]

langchain/vectorstores/qdrant.py

Lines changed: 225 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,225 @@
1+
"""Wrapper around Qdrant vector database."""
2+
import uuid
3+
from operator import itemgetter
4+
from typing import Any, Callable, Iterable, List, Optional, Tuple
5+
6+
from langchain.docstore.document import Document
7+
from langchain.embeddings.base import Embeddings
8+
from langchain.utils import get_from_dict_or_env
9+
from langchain.vectorstores import VectorStore
10+
from langchain.vectorstores.utils import maximal_marginal_relevance
11+
12+
13+
class Qdrant(VectorStore):
14+
"""Wrapper around Qdrant vector database.
15+
16+
To use you should have the ``qdrant-client`` package installed.
17+
18+
Example:
19+
.. code-block:: python
20+
21+
from langchain import Qdrant
22+
23+
client = QdrantClient()
24+
collection_name = "MyCollection"
25+
qdrant = Qdrant(client, collection_name, embedding_function)
26+
"""
27+
28+
def __init__(self, client: Any, collection_name: str, embedding_function: Callable):
29+
"""Initialize with necessary components."""
30+
try:
31+
import qdrant_client
32+
except ImportError:
33+
raise ValueError(
34+
"Could not import qdrant-client python package. "
35+
"Please it install it with `pip install qdrant-client`."
36+
)
37+
38+
if not isinstance(client, qdrant_client.QdrantClient):
39+
raise ValueError(
40+
f"client should be an instance of qdrant_client.QdrantClient, "
41+
f"got {type(client)}"
42+
)
43+
44+
self.client: qdrant_client.QdrantClient = client
45+
self.collection_name = collection_name
46+
self.embedding_function = embedding_function
47+
48+
def add_texts(
49+
self, texts: Iterable[str], metadatas: Optional[List[dict]] = None
50+
) -> List[str]:
51+
"""Run more texts through the embeddings and add to the vectorstore.
52+
53+
Args:
54+
texts: Iterable of strings to add to the vectorstore.
55+
metadatas: Optional list of metadatas associated with the texts.
56+
57+
Returns:
58+
List of ids from adding the texts into the vectorstore.
59+
"""
60+
from qdrant_client.http import models as rest
61+
62+
ids = [uuid.uuid4().hex for _ in texts]
63+
self.client.upsert(
64+
collection_name=self.collection_name,
65+
points=rest.Batch(
66+
ids=ids,
67+
vectors=[self.embedding_function(text) for text in texts],
68+
payloads=self._build_payloads(texts, metadatas),
69+
),
70+
)
71+
72+
return ids
73+
74+
def similarity_search(
75+
self, query: str, k: int = 4, **kwargs: Any
76+
) -> List[Document]:
77+
"""Return docs most similar to query.
78+
79+
Args:
80+
query: Text to look up documents similar to.
81+
k: Number of Documents to return. Defaults to 4.
82+
83+
Returns:
84+
List of Documents most similar to the query.
85+
"""
86+
results = self.similarity_search_with_score(query, k)
87+
return list(map(itemgetter(0), results))
88+
89+
def similarity_search_with_score(
90+
self, query: str, k: int = 4
91+
) -> List[Tuple[Document, float]]:
92+
"""Return docs most similar to query.
93+
94+
Args:
95+
query: Text to look up documents similar to.
96+
k: Number of Documents to return. Defaults to 4.
97+
98+
Returns:
99+
List of Documents most similar to the query and score for each
100+
"""
101+
embedding = self.embedding_function(query)
102+
results = self.client.search(
103+
collection_name=self.collection_name,
104+
query_vector=embedding,
105+
with_payload=True,
106+
limit=k,
107+
)
108+
return [
109+
(
110+
self._document_from_scored_point(result),
111+
result.score,
112+
)
113+
for result in results
114+
]
115+
116+
def max_marginal_relevance_search(
117+
self, query: str, k: int = 4, fetch_k: int = 20
118+
) -> List[Document]:
119+
"""Return docs selected using the maximal marginal relevance.
120+
121+
Maximal marginal relevance optimizes for similarity to query AND diversity
122+
among selected documents.
123+
124+
Args:
125+
query: Text to look up documents similar to.
126+
k: Number of Documents to return. Defaults to 4.
127+
fetch_k: Number of Documents to fetch to pass to MMR algorithm.
128+
129+
Returns:
130+
List of Documents selected by maximal marginal relevance.
131+
"""
132+
embedding = self.embedding_function(query)
133+
results = self.client.search(
134+
collection_name=self.collection_name,
135+
query_vector=embedding,
136+
with_payload=True,
137+
with_vectors=True,
138+
limit=k,
139+
)
140+
embeddings = [result.vector for result in results]
141+
mmr_selected = maximal_marginal_relevance(embedding, embeddings, k=k)
142+
return [self._document_from_scored_point(results[i]) for i in mmr_selected]
143+
144+
@classmethod
145+
def from_texts(
146+
cls,
147+
texts: List[str],
148+
embedding: Embeddings,
149+
metadatas: Optional[List[dict]] = None,
150+
**kwargs: Any,
151+
) -> "Qdrant":
152+
"""Construct Qdrant wrapper from raw documents.
153+
154+
This is a user friendly interface that:
155+
1. Embeds documents.
156+
2. Creates an in memory docstore
157+
3. Initializes the Qdrant database
158+
159+
This is intended to be a quick way to get started.
160+
161+
Example:
162+
.. code-block:: python
163+
164+
from langchain import Qdrant
165+
from langchain.embeddings import OpenAIEmbeddings
166+
embeddings = OpenAIEmbeddings()
167+
qdrant = Qdrant.from_texts(texts, embeddings)
168+
"""
169+
try:
170+
import qdrant_client
171+
except ImportError:
172+
raise ValueError(
173+
"Could not import qdrant-client python package. "
174+
"Please it install it with `pip install qdrant-client`."
175+
)
176+
177+
from qdrant_client.http import models as rest
178+
179+
# Just do a single quick embedding to get vector size
180+
embeddings = embedding.embed_documents(texts[:1])
181+
vector_size = len(embeddings[0])
182+
183+
qdrant_host = get_from_dict_or_env(kwargs, "host", "QDRANT_HOST")
184+
kwargs.pop("host")
185+
client = qdrant_client.QdrantClient(host=qdrant_host, **kwargs)
186+
187+
collection_name = kwargs.get("collection_name", uuid.uuid4().hex)
188+
distance_func = kwargs.pop("distance_func", "Cosine").upper()
189+
client.recreate_collection(
190+
collection_name=collection_name,
191+
vectors_config=rest.VectorParams(
192+
size=vector_size,
193+
distance=rest.Distance[distance_func],
194+
),
195+
)
196+
197+
client.upsert(
198+
collection_name=collection_name,
199+
points=rest.Batch(
200+
ids=[uuid.uuid4().hex for _ in texts],
201+
vectors=embeddings,
202+
payloads=cls._build_payloads(texts, metadatas),
203+
),
204+
)
205+
206+
return cls(client, collection_name, embedding.embed_query)
207+
208+
@classmethod
209+
def _build_payloads(
210+
cls, texts: Iterable[str], metadatas: Optional[List[dict]]
211+
) -> List[dict]:
212+
return [
213+
{
214+
"page_content": text,
215+
"metadata": metadatas[i] if metadatas is not None else None,
216+
}
217+
for i, text in enumerate(texts)
218+
]
219+
220+
@classmethod
221+
def _document_from_scored_point(cls, scored_point: Any) -> Document:
222+
return Document(
223+
page_content=scored_point.payload.get("page_content"),
224+
metadata=scored_point.payload.get("metadata") or {},
225+
)

0 commit comments

Comments
 (0)