Skip to content

Commit 487355f

Browse files
committed
♻️ Refactor: Partial constant extraction in the backend
1 parent 81fd5c0 commit 487355f

File tree

2 files changed

+8
-4
lines changed

2 files changed

+8
-4
lines changed

backend/consts/const.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -188,6 +188,10 @@
188188
"SYSTEM": "system"
189189
}
190190

191+
# Knowledge summary max token limits
192+
KNOWLEDGE_SUMMARY_MAX_TOKENS_ZH = 300
193+
KNOWLEDGE_SUMMARY_MAX_TOKENS_EN = 120
194+
191195
# Host Configuration Constants
192196
LOCALHOST_IP = "127.0.0.1"
193197
LOCALHOST_NAME = "localhost"

backend/services/elasticsearch_service.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626
from openai import OpenAI
2727
from openai.types.chat import ChatCompletionMessageParam
2828

29-
from consts.const import ES_API_KEY, ES_HOST, LANGUAGE, MODEL_CONFIG_MAPPING
29+
from consts.const import ES_API_KEY, ES_HOST, LANGUAGE, MODEL_CONFIG_MAPPING, MESSAGE_ROLE, KNOWLEDGE_SUMMARY_MAX_TOKENS_ZH, KNOWLEDGE_SUMMARY_MAX_TOKENS_EN
3030
from database.attachment_db import delete_file
3131
from database.knowledge_db import (
3232
create_knowledge_record,
@@ -69,8 +69,8 @@ def generate_knowledge_summary_stream(keywords: str, language: str, tenant_id: s
6969

7070
# Build messages
7171
messages: List[ChatCompletionMessageParam] = [
72-
{"role": "system", "content": system_prompt},
73-
{"role": "user", "content": user_prompt}
72+
{"role": MESSAGE_ROLE["SYSTEM"], "content": system_prompt},
73+
{"role": MESSAGE_ROLE["USER"], "content": user_prompt}
7474
]
7575

7676
# Get model configuration from tenant config manager
@@ -83,7 +83,7 @@ def generate_knowledge_summary_stream(keywords: str, language: str, tenant_id: s
8383

8484
try:
8585
# Create stream chat completion request
86-
max_tokens = 300 if language == LANGUAGE["ZH"] else 120
86+
max_tokens = KNOWLEDGE_SUMMARY_MAX_TOKENS_ZH if language == LANGUAGE["ZH"] else KNOWLEDGE_SUMMARY_MAX_TOKENS_EN
8787
stream = client.chat.completions.create(
8888
model=get_model_name_from_config(model_config) if model_config.get(
8989
"model_name") else "", # use model name from config

0 commit comments

Comments
 (0)