diff --git a/apps/models_provider/constants/model_provider_constants.py b/apps/models_provider/constants/model_provider_constants.py index 58c06fed9e7..80b8752ff8b 100644 --- a/apps/models_provider/constants/model_provider_constants.py +++ b/apps/models_provider/constants/model_provider_constants.py @@ -7,6 +7,7 @@ from models_provider.impl.aws_bedrock_model_provider.aws_bedrock_model_provider import BedrockModelProvider from models_provider.impl.azure_model_provider.azure_model_provider import AzureModelProvider from models_provider.impl.deepseek_model_provider.deepseek_model_provider import DeepSeekModelProvider +from models_provider.impl.bailing_model_provider.bailing_model_provider import BailingModelProvider from models_provider.impl.gemini_model_provider.gemini_model_provider import GeminiModelProvider from models_provider.impl.kimi_model_provider.kimi_model_provider import KimiModelProvider from models_provider.impl.local_model_provider.local_model_provider import LocalModelProvider @@ -46,3 +47,4 @@ class ModelProvideConstants(Enum): model_anthropic_provider = AnthropicModelProvider() model_siliconCloud_provider = SiliconCloudModelProvider() model_regolo_provider = RegoloModelProvider() + model_bailing_provider = BailingModelProvider() diff --git a/apps/models_provider/impl/bailing_model_provider/__init__.py b/apps/models_provider/impl/bailing_model_provider/__init__.py new file mode 100644 index 00000000000..5b710f7161d --- /dev/null +++ b/apps/models_provider/impl/bailing_model_provider/__init__.py @@ -0,0 +1,8 @@ +# coding=utf-8 +""" + @project: maxkb + @Author: Su Shi + @file: __init__.py + @date: 2025/11/25 18:00 + @desc: Bailing Model Provider Package +""" diff --git a/apps/models_provider/impl/bailing_model_provider/bailing_model_provider.py b/apps/models_provider/impl/bailing_model_provider/bailing_model_provider.py new file mode 100644 index 00000000000..8502f67d129 --- /dev/null +++ b/apps/models_provider/impl/bailing_model_provider/bailing_model_provider.py @@ -0,0 +1,48 @@ +# coding=utf-8 +""" + @project: maxkb + @Author: Su Shi + @file: bailing_model_provider.py + @date: 2025/11/25 18:00 + @desc: Bailing Model Provider Implementation +""" +import os + +from common.utils.common import get_file_content +from models_provider.base_model_provider import IModelProvider, ModelProvideInfo, ModelInfo, \ + ModelTypeConst, ModelInfoManage +from models_provider.impl.bailing_model_provider.credential.llm import BailingLLMModelCredential +from models_provider.impl.bailing_model_provider.model.llm import BailingChatModel +from maxkb.conf import PROJECT_DIR +from django.utils.translation import gettext_lazy as _ + +# LLM Model Credential +bailing_llm_model_credential = BailingLLMModelCredential() + +# LLM Model Info List - Only supports Ling-1T and Ring-1T +llm_model_info_list = [ + ModelInfo('Ling-1T', _('Ling-1T is a flagship large language model of the Bailing MoE architecture series with trillion parameters, pre-trained on 20T+ high-quality corpora. This model is the latest trillion-parameter open-source model with excellent performance across various benchmark datasets, making it an ideal choice as the most user-friendly and best-experience open-source foundation model for next-generation applications.'), ModelTypeConst.LLM, + bailing_llm_model_credential, BailingChatModel), + ModelInfo('Ring-1T', _("Ring-1T is the world's first open-source trillion-parameter reasoning large model, and also the largest and most powerful flagship model in the Bailing MoE reasoning model Ring series. Based on the icepop method for RLVR training, this model has excellent natural language reasoning capabilities and achieves SOTA performance on benchmarks such as AIME 25, CodeForces, HMMT25, LiveCodeBench, and ARC-AGI-v1, with multiple metrics ranking first among open-source models."), ModelTypeConst.LLM, + bailing_llm_model_credential, BailingChatModel), +] + +# Model Info Manager +model_info_manage = ( + ModelInfoManage.builder() + .append_model_info_list(llm_model_info_list) + .append_default_model_info(llm_model_info_list[0]) # Default LLM Model + .build() +) + + +class BailingModelProvider(IModelProvider): + + def get_model_info_manage(self): + return model_info_manage + + def get_model_provide_info(self): + return ModelProvideInfo(provider='model_bailing_provider', name=_('Bailing'), icon=get_file_content( + os.path.join(PROJECT_DIR, "apps", 'models_provider', 'impl', 'bailing_model_provider', 'icon', + 'bailing_icon_svg'))) + diff --git a/apps/models_provider/impl/bailing_model_provider/credential/__init__.py b/apps/models_provider/impl/bailing_model_provider/credential/__init__.py new file mode 100644 index 00000000000..8481001f6b8 --- /dev/null +++ b/apps/models_provider/impl/bailing_model_provider/credential/__init__.py @@ -0,0 +1,8 @@ +# coding=utf-8 +""" + @project: maxkb + @Author: Su Shi + @file: __init__.py + @date: 2025/11/25 18:00 + @desc: Bailing Model Provider Credential Package +""" diff --git a/apps/models_provider/impl/bailing_model_provider/credential/llm.py b/apps/models_provider/impl/bailing_model_provider/credential/llm.py new file mode 100644 index 00000000000..d5182822741 --- /dev/null +++ b/apps/models_provider/impl/bailing_model_provider/credential/llm.py @@ -0,0 +1,111 @@ +# coding=utf-8 +""" + @project: maxkb + @Author: Su Shi + @file: llm.py + @date: 2025/11/25 18:00 + @desc: Bailing LLM Model Credential Implementation +""" +from typing import Dict + +from langchain_core.messages import HumanMessage +from django.utils.translation import gettext_lazy as _, gettext + +from common import forms +from common.exception.app_exception import AppApiException +from common.forms import BaseForm, TooltipLabel +from models_provider.base_model_provider import BaseModelCredential, ValidCode +from common.utils.logger import maxkb_logger + +class BailingLLMModelParams(BaseForm): + temperature = forms.SliderField( + TooltipLabel( + _('Temperature'), + _('Higher values make the output more random, while lower values make it more focused and deterministic') + ), + required=True, + default_value=0.7, + _min=0.1, + _max=1.0, + _step=0.01, + precision=2 + ) + + max_tokens = forms.SliderField( + TooltipLabel( + _('Output the maximum Tokens'), + _('Specify the maximum number of tokens that the model can generate.') + ), + required=True, + default_value=800, + _min=1, + _max=100000, + _step=1, + precision=0 + ) + + +class BailingLLMModelCredential(BaseForm, BaseModelCredential): + api_base = forms.TextInputField(_('API Base'), required=True, default_value='https://api.tbox.cn/api/llm/v1') + api_key = forms.PasswordInputField(_('API Key'), required=True) + + def is_valid( + self, + model_type: str, + model_name: str, + model_credential: Dict[str, object], + model_params: dict, + provider, + raise_exception: bool = False + ) -> bool: + model_type_list = provider.get_model_type_list() + if not any(mt.get('value') == model_type for mt in model_type_list): + raise AppApiException( + ValidCode.valid_error.value, + gettext('{model_type} Model type is not supported').format(model_type=model_type) + ) + + # Validate model name + allowed_models = ['Ling-1T', 'Ring-1T'] + if model_name not in allowed_models: + raise AppApiException( + ValidCode.valid_error.value, + gettext('{model_name} Model is not supported').format(model_name=model_name) + ) + + for key in ['api_base', 'api_key']: + if key not in model_credential: + if raise_exception: + raise AppApiException( + ValidCode.valid_error.value, + gettext('{key} is required').format(key=key) + ) + return False + + try: + model = provider.get_model(model_type, model_name, model_credential, **model_params) + if model_params.get('stream'): + for res in model.stream([HumanMessage(content=gettext('Hello'))]): + pass + else: + model.invoke([HumanMessage(content=gettext('Hello'))]) + except Exception as e: + maxkb_logger.error(f'Exception: {e}', exc_info=True) + if isinstance(e, AppApiException): + raise e + if raise_exception: + raise AppApiException( + ValidCode.valid_error.value, + gettext('Verification failed, please check whether the parameters are correct: {error}').format( + error=str(e) + ) + ) + return False + + return True + + def encryption_dict(self, model: Dict[str, object]) -> Dict[str, object]: + return {**model, 'api_key': super().encryption(model.get('api_key', '')), 'api_base': model.get('api_base', '')} + + def get_model_params_setting_form(self, model_name: str) -> BailingLLMModelParams: + return BailingLLMModelParams() diff --git a/apps/models_provider/impl/bailing_model_provider/model/__init__.py b/apps/models_provider/impl/bailing_model_provider/model/__init__.py new file mode 100644 index 00000000000..ab0ceac3293 --- /dev/null +++ b/apps/models_provider/impl/bailing_model_provider/model/__init__.py @@ -0,0 +1,8 @@ +# coding=utf-8 +""" + @project: maxkb + @Author: Su Shi + @file: __init__.py + @date: 2025/11/25 18:00 + @desc: Bailing Model Provider Model Package +""" diff --git a/apps/models_provider/impl/bailing_model_provider/model/llm.py b/apps/models_provider/impl/bailing_model_provider/model/llm.py new file mode 100644 index 00000000000..22f9270de2a --- /dev/null +++ b/apps/models_provider/impl/bailing_model_provider/model/llm.py @@ -0,0 +1,30 @@ +#!/usr/bin/env python +# -*- coding: UTF-8 -*- +""" + @project: maxkb + @Author: Su Shi + @file: llm.py + @date: 2025/11/25 18:00 + @desc: Bailing Chat Model Implementation +""" +from typing import Dict + +from models_provider.base_model_provider import MaxKBBaseModel +from models_provider.impl.base_chat_open_ai import BaseChatOpenAI + + +class BailingChatModel(MaxKBBaseModel, BaseChatOpenAI): + @staticmethod + def is_cache_model(): + return False + + @staticmethod + def new_instance(model_type, model_name, model_credential: Dict[str, object], **model_kwargs): + optional_params = MaxKBBaseModel.filter_optional_params(model_kwargs) + return BailingChatModel( + model=model_name, + openai_api_base=model_credential.get('api_base'), + openai_api_key=model_credential.get('api_key'), + streaming=True, + extra_body=optional_params + )