Skip to content

Commit 18b40f3

Browse files
committed
fix: remove unnecessary print statement from model invocation
1 parent 05b59a4 commit 18b40f3

File tree

2 files changed

+3
-4
lines changed

2 files changed

+3
-4
lines changed

apps/models_provider/impl/base_chat_open_ai.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616
from langchain_openai.chat_models.base import _create_usage_metadata
1717

1818
from common.config.tokenizer_manage_config import TokenizerManage
19-
19+
from common.utils.logger import maxkb_logger
2020

2121
def custom_get_token_ids(text: str):
2222
tokenizer = TokenizerManage.get_tokenizer()
@@ -103,13 +103,13 @@ def get_num_tokens_from_messages(
103103
future = executor.submit(super().get_num_tokens_from_messages, messages, tools)
104104
try:
105105
response = future.result()
106-
print("请求成功(未超时)")
106+
maxkb_logger.info("请求成功(未超时)")
107107
return response
108108
except Exception as e:
109109
if isinstance(e, ReadTimeout):
110110
raise # 继续抛出
111111
else:
112-
print("except:", e)
112+
maxkb_logger.error("except:", e)
113113
tokenizer = TokenizerManage.get_tokenizer()
114114
return sum([len(tokenizer.encode(get_buffer_string([m]))) for m in messages])
115115

apps/models_provider/impl/xf_model_provider/model/stt.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -159,7 +159,6 @@ async def send(self, ws, file):
159159
"audio": str(base64.b64encode(buf), 'utf-8'),
160160
"encoding": "lame"}
161161
}
162-
print(d)
163162
d = json.dumps(d)
164163
await ws.send(d)
165164
status = STATUS_CONTINUE_FRAME

0 commit comments

Comments
 (0)