Skip to content

Commit f5fada9

Browse files
committed
feat: Vllm reranker model bge reranker v2 m3
1 parent b89c4e3 commit f5fada9

File tree

4 files changed

+107
-0
lines changed

4 files changed

+107
-0
lines changed
Lines changed: 50 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,50 @@
1+
import traceback
2+
from typing import Dict
3+
4+
from langchain_core.documents import Document
5+
6+
from common import forms
7+
from common.exception.app_exception import AppApiException
8+
from common.forms import BaseForm
9+
from models_provider.base_model_provider import BaseModelCredential, ValidCode
10+
from django.utils.translation import gettext_lazy as _
11+
12+
from models_provider.impl.vllm_model_provider.model.reranker import VllmBgeReranker
13+
14+
15+
class VllmRerankerCredential(BaseForm, BaseModelCredential):
16+
api_url = forms.TextInputField('API URL', required=True)
17+
api_key = forms.PasswordInputField('API Key', required=True)
18+
19+
def is_valid(self, model_type: str, model_name, model_credential: Dict[str, object], model_params, provider,
20+
raise_exception=True):
21+
model_type_list = provider.get_model_type_list()
22+
if not any(list(filter(lambda mt: mt.get('value') == model_type, model_type_list))):
23+
raise AppApiException(ValidCode.valid_error.value,
24+
_('{model_type} Model type is not supported').format(model_type=model_type))
25+
26+
for key in ['api_url', 'api_key']:
27+
if key not in model_credential:
28+
if raise_exception:
29+
raise AppApiException(ValidCode.valid_error.value, _('{key} is required').format(key=key))
30+
else:
31+
return False
32+
try:
33+
model: VllmBgeReranker = provider.get_model(model_type, model_name, model_credential)
34+
model.compress_documents([Document(page_content=_('Hello'))], _('Hello'))
35+
except Exception as e:
36+
traceback.print_exc()
37+
if isinstance(e, AppApiException):
38+
raise e
39+
if raise_exception:
40+
raise AppApiException(
41+
ValidCode.valid_error.value,
42+
_('Verification failed, please check whether the parameters are correct: {error}').format(
43+
error=str(e))
44+
)
45+
return False
46+
47+
return True
48+
49+
def encryption_dict(self, model_info: Dict[str, object]):
50+
return {**model_info, 'api_key': super().encryption(model_info.get('api_key', ''))}
Lines changed: 47 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,47 @@
1+
from typing import Sequence, Optional, Dict, Any
2+
3+
import cohere
4+
from langchain_core.callbacks import Callbacks
5+
from langchain_core.documents import BaseDocumentCompressor, Document
6+
7+
from models_provider.base_model_provider import MaxKBBaseModel
8+
9+
10+
class VllmBgeReranker(MaxKBBaseModel, BaseDocumentCompressor):
11+
api_key: str
12+
api_url: str
13+
model: str
14+
params: dict
15+
client: Any = None
16+
17+
def __init__(self, **kwargs):
18+
super().__init__(**kwargs)
19+
self.api_key = kwargs.get('api_key')
20+
self.model = kwargs.get('model')
21+
self.params = kwargs.get('params')
22+
self.api_url = kwargs.get('api_url')
23+
self.client = cohere.ClientV2(kwargs.get('api_key'), base_url=kwargs.get('api_url'))
24+
25+
@staticmethod
26+
def is_cache_model():
27+
return False
28+
29+
@staticmethod
30+
def new_instance(model_type, model_name, model_credential: Dict[str, object], **model_kwargs):
31+
return VllmBgeReranker(
32+
model=model_name,
33+
api_key=model_credential.get('api_key'),
34+
api_url=model_credential.get('api_url'),
35+
params=model_kwargs,
36+
**model_kwargs
37+
)
38+
39+
def compress_documents(self, documents: Sequence[Document], query: str, callbacks: Optional[Callbacks] = None) -> \
40+
Sequence[Document]:
41+
if documents is None or len(documents) == 0:
42+
return []
43+
44+
ds = [d.page_content for d in documents]
45+
result = self.client.rerank(model=self.model, query=query, documents=ds)
46+
return [Document(page_content=d.document.get('text'), metadata={'relevance_score': d.relevance_score}) for d in
47+
result.results]

apps/models_provider/impl/vllm_model_provider/vllm_model_provider.py

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,19 +10,22 @@
1010
from models_provider.impl.vllm_model_provider.credential.embedding import VllmEmbeddingCredential
1111
from models_provider.impl.vllm_model_provider.credential.image import VllmImageModelCredential
1212
from models_provider.impl.vllm_model_provider.credential.llm import VLLMModelCredential
13+
from models_provider.impl.vllm_model_provider.credential.reranker import VllmRerankerCredential
1314
from models_provider.impl.vllm_model_provider.credential.whisper_stt import VLLMWhisperModelCredential
1415
from models_provider.impl.vllm_model_provider.model.embedding import VllmEmbeddingModel
1516
from models_provider.impl.vllm_model_provider.model.image import VllmImage
1617
from models_provider.impl.vllm_model_provider.model.llm import VllmChatModel
1718
from maxkb.conf import PROJECT_DIR
1819
from django.utils.translation import gettext as _
1920

21+
from models_provider.impl.vllm_model_provider.model.reranker import VllmBgeReranker
2022
from models_provider.impl.vllm_model_provider.model.whisper_sst import VllmWhisperSpeechToText
2123

2224
v_llm_model_credential = VLLMModelCredential()
2325
image_model_credential = VllmImageModelCredential()
2426
embedding_model_credential = VllmEmbeddingCredential()
2527
whisper_model_credential = VLLMWhisperModelCredential()
28+
rerank_model_credential = VllmRerankerCredential()
2629

2730
model_info_list = [
2831
ModelInfo('facebook/opt-125m', _('Facebook’s 125M parameter model'), ModelTypeConst.LLM, v_llm_model_credential,
@@ -50,6 +53,10 @@
5053
ModelInfo('whisper-large-v3', '', ModelTypeConst.STT, whisper_model_credential, VllmWhisperSpeechToText),
5154
]
5255

56+
reranker_model_info_list = [
57+
ModelInfo('bge-reranker-v2-m3', '', ModelTypeConst.RERANKER, rerank_model_credential, VllmBgeReranker),
58+
]
59+
5360
model_info_manage = (
5461
ModelInfoManage.builder()
5562
.append_model_info_list(model_info_list)
@@ -62,6 +69,8 @@
6269
.append_default_model_info(embedding_model_info_list[0])
6370
.append_model_info_list(whisper_model_info_list)
6471
.append_default_model_info(whisper_model_info_list[0])
72+
.append_model_info_list(reranker_model_info_list)
73+
.append_default_model_info(reranker_model_info_list[0])
6574
.build()
6675
)
6776

pyproject.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -57,6 +57,7 @@ dependencies = [
5757
"python-daemon==3.1.2",
5858
"websockets==15.0.1",
5959
"pylint==3.3.7",
60+
"cohere>=5.17.0",
6061
]
6162

6263
[tool.uv]

0 commit comments

Comments
 (0)