Skip to content

Commit 7caaab8

Browse files
committed
feat: support Tencent Cloud
--story=1017838 --user=王孝刚 【模型设置】支持腾讯云的大语言模型 issue#2230 https://www.tapd.cn/57709429/s/1653533
1 parent de1d6bd commit 7caaab8

File tree

9 files changed

+211
-2
lines changed

9 files changed

+211
-2
lines changed

apps/locales/en_US/LC_MESSAGES/django.po

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6715,3 +6715,6 @@ msgstr ""
67156715
#: .\apps\xpack\views\system_api_key_views.py:58
67166716
msgid "Add personal system API_KEY"
67176717
msgstr ""
6718+
6719+
msgid "Tencent Cloud"
6720+
msgstr ""

apps/locales/zh_CN/LC_MESSAGES/django.po

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6853,3 +6853,6 @@ msgstr "删除个人系统 API_KEY"
68536853
#: .\apps\xpack\views\system_api_key_views.py:58
68546854
msgid "Add personal system API_KEY"
68556855
msgstr "添加个人系统 API_KEY"
6856+
6857+
msgid "Tencent Cloud"
6858+
msgstr "腾讯云"

apps/locales/zh_Hant/LC_MESSAGES/django.po

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6867,3 +6867,6 @@ msgstr "刪除個人系統 API_KEY"
68676867
#: .\apps\xpack\views\system_api_key_views.py:58
68686868
msgid "Add personal system API_KEY"
68696869
msgstr "添加個人系統 API_KEY"
6870+
6871+
msgid "Tencent Cloud"
6872+
msgstr "腾訊云"

apps/setting/models_provider/constants/model_provider_constants.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,8 @@
2121
from setting.models_provider.impl.qwen_model_provider.qwen_model_provider import QwenModelProvider
2222
from setting.models_provider.impl.siliconCloud_model_provider.siliconCloud_model_provider import \
2323
SiliconCloudModelProvider
24+
from setting.models_provider.impl.tencent_cloud_model_provider.tencent_cloud_model_provider import \
25+
TencentCloudModelProvider
2426
from setting.models_provider.impl.tencent_model_provider.tencent_model_provider import TencentModelProvider
2527
from setting.models_provider.impl.vllm_model_provider.vllm_model_provider import VllmModelProvider
2628
from setting.models_provider.impl.volcanic_engine_model_provider.volcanic_engine_model_provider import \
@@ -45,6 +47,7 @@ class ModelProvideConstants(Enum):
4547
model_gemini_provider = GeminiModelProvider()
4648
model_volcanic_engine_provider = VolcanicEngineModelProvider()
4749
model_tencent_provider = TencentModelProvider()
50+
model_tencent_cloud_provider = TencentCloudModelProvider()
4851
model_aws_bedrock_provider = BedrockModelProvider()
4952
model_local_provider = LocalModelProvider()
5053
model_xinference_provider = XinferenceModelProvider()
Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
# coding=utf-8
2+
"""
3+
@project: maxkb
4+
@Author:虎
5+
@file: __init__.py.py
6+
@date:2024/3/28 16:25
7+
@desc:
8+
"""
Lines changed: 77 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,77 @@
1+
# coding=utf-8
2+
"""
3+
@project: MaxKB
4+
@Author:虎
5+
@file: llm.py
6+
@date:2024/7/11 18:32
7+
@desc:
8+
"""
9+
from typing import Dict
10+
11+
from django.utils.translation import gettext_lazy as _, gettext
12+
from langchain_core.messages import HumanMessage
13+
14+
from common import forms
15+
from common.exception.app_exception import AppApiException
16+
from common.forms import BaseForm, TooltipLabel
17+
from setting.models_provider.base_model_provider import BaseModelCredential, ValidCode
18+
19+
20+
class TencentCloudLLMModelParams(BaseForm):
21+
temperature = forms.SliderField(TooltipLabel(_('Temperature'),
22+
_('Higher values make the output more random, while lower values make it more focused and deterministic')),
23+
required=True, default_value=0.7,
24+
_min=0.1,
25+
_max=1.0,
26+
_step=0.01,
27+
precision=2)
28+
29+
max_tokens = forms.SliderField(
30+
TooltipLabel(_('Output the maximum Tokens'),
31+
_('Specify the maximum number of tokens that the model can generate')),
32+
required=True, default_value=800,
33+
_min=1,
34+
_max=100000,
35+
_step=1,
36+
precision=0)
37+
38+
39+
class TencentCloudLLMModelCredential(BaseForm, BaseModelCredential):
40+
41+
def is_valid(self, model_type: str, model_name, model_credential: Dict[str, object], model_params, provider,
42+
raise_exception=False):
43+
model_type_list = provider.get_model_type_list()
44+
if not any(list(filter(lambda mt: mt.get('value') == model_type, model_type_list))):
45+
raise AppApiException(ValidCode.valid_error.value,
46+
gettext('{model_type} Model type is not supported').format(model_type=model_type))
47+
48+
for key in ['api_base', 'api_key']:
49+
if key not in model_credential:
50+
if raise_exception:
51+
raise AppApiException(ValidCode.valid_error.value, gettext('{key} is required').format(key=key))
52+
else:
53+
return False
54+
try:
55+
56+
model = provider.get_model(model_type, model_name, model_credential, **model_params)
57+
model.invoke([HumanMessage(content=gettext('Hello'))])
58+
except Exception as e:
59+
if isinstance(e, AppApiException):
60+
raise e
61+
if raise_exception:
62+
raise AppApiException(ValidCode.valid_error.value,
63+
gettext(
64+
'Verification failed, please check whether the parameters are correct: {error}').format(
65+
error=str(e)))
66+
else:
67+
return False
68+
return True
69+
70+
def encryption_dict(self, model: Dict[str, object]):
71+
return {**model, 'api_key': super().encryption(model.get('api_key', ''))}
72+
73+
api_base = forms.TextInputField('API URL', required=True)
74+
api_key = forms.PasswordInputField('API Key', required=True)
75+
76+
def get_model_params_setting_form(self, model_name):
77+
return TencentCloudLLMModelParams()
Lines changed: 53 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,53 @@
1+
# coding=utf-8
2+
"""
3+
@project: maxkb
4+
@Author:虎
5+
@file: llm.py
6+
@date:2024/4/18 15:28
7+
@desc:
8+
"""
9+
from typing import List, Dict
10+
11+
from langchain_core.messages import BaseMessage, get_buffer_string
12+
13+
from common.config.tokenizer_manage_config import TokenizerManage
14+
from setting.models_provider.base_model_provider import MaxKBBaseModel
15+
from setting.models_provider.impl.base_chat_open_ai import BaseChatOpenAI
16+
17+
18+
def custom_get_token_ids(text: str):
19+
tokenizer = TokenizerManage.get_tokenizer()
20+
return tokenizer.encode(text)
21+
22+
23+
class TencentCloudChatModel(MaxKBBaseModel, BaseChatOpenAI):
24+
25+
@staticmethod
26+
def is_cache_model():
27+
return False
28+
29+
@staticmethod
30+
def new_instance(model_type, model_name, model_credential: Dict[str, object], **model_kwargs):
31+
optional_params = MaxKBBaseModel.filter_optional_params(model_kwargs)
32+
azure_chat_open_ai = TencentCloudChatModel(
33+
model=model_name,
34+
openai_api_base=model_credential.get('api_base'),
35+
openai_api_key=model_credential.get('api_key'),
36+
**optional_params,
37+
custom_get_token_ids=custom_get_token_ids
38+
)
39+
return azure_chat_open_ai
40+
41+
def get_num_tokens_from_messages(self, messages: List[BaseMessage]) -> int:
42+
try:
43+
return super().get_num_tokens_from_messages(messages)
44+
except Exception as e:
45+
tokenizer = TokenizerManage.get_tokenizer()
46+
return sum([len(tokenizer.encode(get_buffer_string([m]))) for m in messages])
47+
48+
def get_num_tokens(self, text: str) -> int:
49+
try:
50+
return super().get_num_tokens(text)
51+
except Exception as e:
52+
tokenizer = TokenizerManage.get_tokenizer()
53+
return len(tokenizer.encode(text))
Lines changed: 61 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,61 @@
1+
# coding=utf-8
2+
"""
3+
@project: maxkb
4+
@Author:虎
5+
@file: openai_model_provider.py
6+
@date:2024/3/28 16:26
7+
@desc:
8+
"""
9+
import os
10+
11+
from common.util.file_util import get_file_content
12+
from setting.models_provider.base_model_provider import IModelProvider, ModelProvideInfo, ModelInfo, \
13+
ModelTypeConst, ModelInfoManage
14+
from setting.models_provider.impl.openai_model_provider.credential.embedding import OpenAIEmbeddingCredential
15+
from setting.models_provider.impl.openai_model_provider.credential.image import OpenAIImageModelCredential
16+
from setting.models_provider.impl.openai_model_provider.credential.llm import OpenAILLMModelCredential
17+
from setting.models_provider.impl.openai_model_provider.credential.stt import OpenAISTTModelCredential
18+
from setting.models_provider.impl.openai_model_provider.credential.tti import OpenAITextToImageModelCredential
19+
from setting.models_provider.impl.openai_model_provider.credential.tts import OpenAITTSModelCredential
20+
from setting.models_provider.impl.openai_model_provider.model.embedding import OpenAIEmbeddingModel
21+
from setting.models_provider.impl.openai_model_provider.model.image import OpenAIImage
22+
from setting.models_provider.impl.openai_model_provider.model.llm import OpenAIChatModel
23+
from setting.models_provider.impl.openai_model_provider.model.stt import OpenAISpeechToText
24+
from setting.models_provider.impl.openai_model_provider.model.tti import OpenAITextToImage
25+
from setting.models_provider.impl.openai_model_provider.model.tts import OpenAITextToSpeech
26+
from setting.models_provider.impl.tencent_cloud_model_provider.credential.llm import TencentCloudLLMModelCredential
27+
from setting.models_provider.impl.tencent_cloud_model_provider.model.llm import TencentCloudChatModel
28+
from smartdoc.conf import PROJECT_DIR
29+
from django.utils.translation import gettext_lazy as _
30+
31+
openai_llm_model_credential = TencentCloudLLMModelCredential()
32+
model_info_list = [
33+
ModelInfo('deepseek-v3', '', ModelTypeConst.LLM,
34+
openai_llm_model_credential, TencentCloudChatModel
35+
),
36+
ModelInfo('deepseek-r1', '', ModelTypeConst.IMAGE,
37+
openai_llm_model_credential, TencentCloudChatModel
38+
),
39+
]
40+
41+
model_info_manage = (
42+
ModelInfoManage.builder()
43+
.append_model_info_list(model_info_list)
44+
.append_default_model_info(
45+
ModelInfo('deepseek-v3', _('The latest gpt-3.5-turbo, updated with OpenAI adjustments'), ModelTypeConst.LLM,
46+
openai_llm_model_credential, TencentCloudChatModel
47+
))
48+
.build()
49+
)
50+
51+
52+
class TencentCloudModelProvider(IModelProvider):
53+
54+
def get_model_info_manage(self):
55+
return model_info_manage
56+
57+
def get_model_provide_info(self):
58+
return ModelProvideInfo(provider='model_tencent_cloud_provider', name=_('Tencent Cloud'), icon=get_file_content(
59+
os.path.join(PROJECT_DIR, "apps", "setting", 'models_provider', 'impl', 'tencent_cloud_model_provider',
60+
'icon',
61+
'tencent_cloud_icon_svg')))

apps/setting/models_provider/impl/volcanic_engine_model_provider/volcanic_engine_model_provider.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -11,9 +11,7 @@
1111
from common.util.file_util import get_file_content
1212
from setting.models_provider.base_model_provider import IModelProvider, ModelProvideInfo, ModelInfo, ModelTypeConst, \
1313
ModelInfoManage
14-
from setting.models_provider.impl.openai_model_provider.credential.embedding import OpenAIEmbeddingCredential
1514
from setting.models_provider.impl.openai_model_provider.credential.llm import OpenAILLMModelCredential
16-
from setting.models_provider.impl.openai_model_provider.model.embedding import OpenAIEmbeddingModel
1715
from setting.models_provider.impl.volcanic_engine_model_provider.credential.embedding import VolcanicEmbeddingCredential
1816
from setting.models_provider.impl.volcanic_engine_model_provider.credential.image import \
1917
VolcanicEngineImageModelCredential

0 commit comments

Comments
 (0)