Skip to content

Commit a5517a1

Browse files
committed
更新版本V0.0.66
更新版本V0.0.66
1 parent e8259eb commit a5517a1

File tree

29 files changed

+3677
-558
lines changed

29 files changed

+3677
-558
lines changed

README.md

Lines changed: 15 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -28,20 +28,31 @@
2828

2929
# 版本更新介绍
3030

31-
> 最新更新:2025-09-23
31+
> 最新更新:2025-10-12
3232
33-
> 0.0.65 公测版本介绍 如果你要使用本插件请务必进我们的官方 QQ 群(1018231382)!
33+
> 0.0.66 公测版本介绍 如果你要使用本插件请务必进我们的官方 QQ 群(1018231382)!
3434
>
3535
> 1. 修复了一些已知的BUG
3636
>
37-
> 2. 合并PR [#40](https://github.com/weilin9999/WeiLin-Comfyui-Tools/pull/40) 感谢各位小伙伴的帮助
37+
> 2. 合并PR [#44](https://github.com/weilin9999/WeiLin-Comfyui-Tools/pull/44) 感谢各位小伙伴的帮助
3838
>
39-
> 3. 合并PR [#43](https://github.com/weilin9999/WeiLin-Comfyui-Tools/pull/43) 感谢各位小伙伴的帮助
39+
> 3. 新增功能:在Tag的控制栏中可以一键收藏Tag,还添加了添加换行符的功能
4040
>
41+
> 4. 翻译大修改!本次更新将翻译功能全部重改了,使用AI进行翻译,对接硅基AI进行调用API翻译,如果你想要对接其它的AI平台你可以进群提交建议,后续更新会添加新的平台
42+
>
4143
4244
<details>
4345
<summary>点击查看往期更多更新内容</summary>
4446

47+
> 0.0.65 公测版本介绍 2025-09-23
48+
>
49+
> 1. 修复了一些已知的BUG
50+
>
51+
> 2. 合并PR [#40](https://github.com/weilin9999/WeiLin-Comfyui-Tools/pull/40) 感谢各位小伙伴的帮助
52+
>
53+
> 3. 合并PR [#43](https://github.com/weilin9999/WeiLin-Comfyui-Tools/pull/43) 感谢各位小伙伴的帮助
54+
>
55+
4556
> 0.0.64 公测版本介绍 2025-09-14
4657
>
4758
> 1. 修复了已知BUG(Comfyui中执行栏拖拽图标错位问题)

README_EN.md

Lines changed: 16 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -24,20 +24,31 @@ Due to limited personal time, the frequency of updating plug-ins will not be ver
2424

2525
# Version update introduction
2626

27-
> Last updated: 2025-09-23
27+
> Last updated: 2025-10-12
2828
29-
> 0.0.65 Public Beta Version Introduction: If you want to use this plugin, please be sure to join our official QQ group (1018231382)!
29+
> 0.0.66 Public Beta Version Introduction: If you want to use this plugin, please be sure to join our official QQ group (1018231382)!
3030
>
31-
> 1. Fixed a known BUG
31+
> 1. Fixed some known bugs
3232
>
33-
> 2. Merge PR [#40](https://github.com/weilin9999/WeiLin-Comfyui-Tools/pull/40) thank you for your friend's help
33+
> 2. Merger PR # [44] (https://github.com/weilin9999/WeiLin-Comfyui-Tools/pull/44) thank you for your friend's help
3434
>
35-
> 3. Merge PR [#43](https://github.com/weilin9999/WeiLin-Comfyui-Tools/pull/43) thank you for your friend's help
35+
> 3. New features: Tags can be bookmarked with one click in the Tag control bar. The ability to add a newline character has also been added
36+
>
37+
> 4. Translation overhaul! This update has changed all the translation functions, using AI for translation, and docking silicon based AI for API translation. If you want to connect with other AI platforms, you can submit suggestions in the group, and a new platform will be added in the subsequent update
3638
>
3739
3840
<details>
3941
<summary>Click here for more updates from the past</summary>
4042

43+
> 0.0.65 Public Beta Version 2025-09-23
44+
>
45+
> 1. Fixed a known BUG
46+
>
47+
> 2. Merge PR [#40](https://github.com/weilin9999/WeiLin-Comfyui-Tools/pull/40) thank you for your friend's help
48+
>
49+
> 3. Merge PR [#43](https://github.com/weilin9999/WeiLin-Comfyui-Tools/pull/43) thank you for your friend's help
50+
>
51+
4152
> 0.0.64 Public Beta Version 2025-09-14
4253
>
4354
> 1. Fixed a known BUG (the issue of misaligned drag ICONS in the execution bar in Comfyui)

__init__.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -257,7 +257,7 @@ def INPUT_TYPES(self):
257257
"lora_str": ("STRING", {
258258
"multiline": True,
259259
"default": "",
260-
"placeholder": placeholder_lora_text,
260+
"placeholder": "lora info box",
261261
}),
262262
"temp_lora_str": ("STRING", {
263263
"multiline": True,
@@ -318,6 +318,8 @@ def load_lora_ing(self, clip=None, model=None, lora_str="", temp_lora_str=""):
318318

319319
model_lora_secondA, clip_lora_secondA = load_lora_for_models(
320320
model_lora_secondA, clip_lora_secondA, lora, strength_model, strength_clip)
321+
else:
322+
print("Lora堆没有可用的Lora信息")
321323

322324
return (clip_lora_secondA, model_lora_secondA)
323325
# return (model_lora_second, clip_lora_second)

app/server/ai_server/__init__.py

Whitespace-only changes.
Lines changed: 82 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,82 @@
1+
# 硅基AI对接
2+
import requests
3+
from ..user_init.user_init import get_ai_info_setting
4+
5+
_LANG_NAME = {
6+
"zh": "Chinese ", "zh_CN": " Chinese ", "zh_TW": "Chinese (traditional) ",
7+
"en": "English ", "ja": " Japanese ", "ko": "Korean ",
8+
"fr": "French ", "de": " German ", "es": "Spanish ",
9+
"ru": "Russian ", "it": " Italian ", "pt": "Portuguese"
10+
}
11+
12+
13+
def _lang_to_name(code: str) -> str:
14+
if not code:
15+
return "Chinese"
16+
return _LANG_NAME.get(code, code)
17+
18+
19+
# 硅基AI翻译接口
20+
async def translateObject(objectData: str, target_lang_code: str = "zh") -> str:
21+
ai_info_setting = get_ai_info_setting()
22+
api_key = ai_info_setting.get("api_key", "")
23+
base_url = ai_info_setting.get(
24+
"base_url", "https://api.siliconflow.cn/v1").rstrip("/")
25+
model = ai_info_setting.get("model", "THUDM/glm-4-9b-chat")
26+
27+
print("💡[WeiLin-Comfyui-Tools] 硅基AI翻译接口-使用模型:", model, " 目标语种:", target_lang_code, " 正在翻译中...")
28+
29+
url = f"{base_url}/chat/completions"
30+
31+
payload = {
32+
"model": model,
33+
"messages": [
34+
{
35+
"role": "system",
36+
"content": f"You are an expert in data translation processing. translate the text field of the JSON string data passed by the user into {_lang_to_name(target_lang_code)} and fill it in the corresponding translate field. You only need to return the corresponding JSON string data and do not modify any other data or parameters"
37+
},
38+
{
39+
"content": objectData,
40+
"role": "user"
41+
}
42+
],
43+
"stream": False,
44+
"max_tokens": 4096,
45+
"enable_thinking": False,
46+
"thinking_budget": 4096,
47+
"min_p": 0.05,
48+
"stop": None,
49+
"temperature": 0,
50+
"top_p": 0.7,
51+
"top_k": 50,
52+
"frequency_penalty": 0.5,
53+
"n": 1,
54+
"response_format": {"type": "json_object"},
55+
"tools": []
56+
}
57+
headers = {
58+
"Authorization": f"Bearer {api_key}",
59+
"Content-Type": "application/json"
60+
}
61+
62+
response = requests.post(url, json=payload, headers=headers)
63+
dataResponse = response.json()
64+
print(f"💡[WeiLin-Comfyui-Tools] 硅基AI翻译接口-翻译完成,以下是使用信息:\n - PromptToken:{dataResponse['usage']['total_tokens']}\n - CompletionTokens:{dataResponse['usage']['completion_tokens']}\n - TotalTokens:{dataResponse['usage']['total_tokens']}")
65+
return dataResponse["choices"][0]["message"]["content"]
66+
67+
68+
# 硅基AI模型列表接口
69+
def getModelList() -> dict:
70+
ai_info_setting = get_ai_info_setting()
71+
api_key = ai_info_setting["api_key"]
72+
base_url = ai_info_setting["base_url"]
73+
74+
url = f"{base_url}/models"
75+
76+
headers = {"Authorization": f"Bearer {api_key}"}
77+
78+
querystring = {"type": "text"}
79+
80+
response = requests.get(url, headers=headers, params=querystring)
81+
82+
return response.json()

app/server/prompt_server.py

Lines changed: 44 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@
2323
)
2424
from .prompt_api.random_tag_template import *
2525
from .prompt_api.danbooru import *
26+
from .ai_server.siliconflow import translateObject,getModelList
2627

2728

2829
static_path = os.path.join(os.path.dirname(__file__), "../../dist/")
@@ -790,9 +791,14 @@ async def _tanslater_text(request):
790791
data_setting = get_translate_settings() # service/source_lang/target_lang
791792

792793
text = data['text']
794+
strObjectData = data['str_object']
793795
if current_setting == "openai":
794796
# OpenAI 只需要目标语种来写 prompt
795-
result = await openai_translate(text, data_setting['translate_target_lang'])
797+
result = await openai_translate(strObjectData, data_setting['translate_target_lang'])
798+
elif current_setting == "other_ai_plate":
799+
aiInfoData = get_ai_info_setting()
800+
if aiInfoData.get("base_url") == "https://api.siliconflow.cn/v1":
801+
result = await translateObject(strObjectData, data_setting['translate_target_lang'])
796802
else:
797803
# 仍然走 translators(含 'translater' 与历史的 'network')
798804
result = translateText(
@@ -801,7 +807,7 @@ async def _tanslater_text(request):
801807
data_setting['translate_source_lang'],
802808
data_setting['translate_target_lang']
803809
)
804-
return web.json_response({"text": result})
810+
return web.json_response({"data": result})
805811
except Exception as e:
806812
print(f"Error: {e}")
807813
return web.Response(status=500)
@@ -814,17 +820,22 @@ async def _tanslater_input_text(request):
814820
current_setting = get_translate_setting()
815821
data_setting = get_translate_settings()
816822
text = data['text']
823+
strObjectData = data['str_object']
817824
if current_setting == "openai":
818825
# 反向时把“源语种代码”当成目标,复用同一 prompt 模板
819-
result = await openai_translate(text, data_setting['translate_source_lang'])
826+
result = await openai_translate(strObjectData, data_setting['translate_source_lang'])
827+
elif current_setting == "other_ai_plate":
828+
aiInfoData = get_ai_info_setting()
829+
if aiInfoData.get("base_url") == "https://api.siliconflow.cn/v1":
830+
result = await translateObject(strObjectData, data_setting['translate_source_lang'])
820831
else:
821832
result = translateText(
822833
text,
823834
data_setting['translate_service'],
824835
data_setting['translate_target_lang'],
825836
data_setting['translate_source_lang']
826837
)
827-
return web.json_response({"text": result})
838+
return web.json_response({"data": result})
828839
except Exception as e:
829840
print(f"Error: {e}")
830841
return web.Response(status=500)
@@ -1036,6 +1047,35 @@ async def _run_danbooru_sql_text(request):
10361047
return web.json_response(result)
10371048
# ===================================================================================
10381049

1050+
# ============================================ AI平台对接 ============================================
1051+
1052+
@PromptServer.instance.routes.post(baseUrl+"ai_server/get_settings")
1053+
async def _get_ai_server_setting(request):
1054+
return web.json_response({"data":get_ai_info_setting()})
1055+
1056+
1057+
@PromptServer.instance.routes.post(baseUrl+"ai_server/update_settings")
1058+
async def _update_ai_server_settings(request):
1059+
data = await request.json()
1060+
try:
1061+
# 如果前端传递 {"ai_info_setting": {...}},则取内部 dict
1062+
if "ai_info_setting" in data:
1063+
update_ai_info_setting(data["ai_info_setting"])
1064+
else:
1065+
update_ai_info_setting(data)
1066+
except Exception as e:
1067+
print(f"Error: {e}")
1068+
return web.Response(status=500)
1069+
return web.json_response({"info": 'ok'})
1070+
1071+
@PromptServer.instance.routes.post(baseUrl+"ai_server/get_ai_models")
1072+
def _get_ai_server_get_ai_models(request):
1073+
return web.json_response({"data": getModelList()})
1074+
1075+
1076+
1077+
# ============================================ AI平台对接 End ============================================
1078+
10391079
print("======== WeiLin插件服务已启动 ========")
10401080
print("======== WeiLin Server Init ========")
10411081

app/server/translate/openai_translate.py

Lines changed: 19 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -9,11 +9,13 @@
99
"ru": "俄语", "it": "意大利语", "pt": "葡萄牙语"
1010
}
1111

12+
1213
def _lang_to_name(code: str) -> str:
1314
if not code:
1415
return "中文"
1516
return _LANG_NAME.get(code, code)
1617

18+
1719
async def openai_translate(text: str, target_lang_code: str) -> str:
1820
cfg = initialize_config()
1921
api_key = cfg.get("api_key", "")
@@ -23,21 +25,26 @@ async def openai_translate(text: str, target_lang_code: str) -> str:
2325
if not api_key:
2426
raise RuntimeError("OpenAI api_key 未配置")
2527

26-
target_lang_name = _lang_to_name(target_lang_code)
27-
prompt = f"将以下AI绘画提示词翻译成{target_lang_name},只输出翻译结果:{text}"
28-
2928
url = f"{base_url}/chat/completions"
3029
headers = {
3130
"Authorization": f"Bearer {api_key}",
3231
"Content-Type": "application/json"
3332
}
3433
payload = {
3534
"model": model,
36-
"messages": [{"role": "user", "content": prompt}],
35+
"messages": [
36+
{
37+
"role": "system",
38+
"content": f"You are an expert in data translation processing. translate the text field of the JSON string data passed by the user into {_lang_to_name(target_lang_code)} and fill it in the corresponding translate field. You only need to return the corresponding JSON string data and do not modify any other data or parameters"
39+
},
40+
{"role": "user", "content": text}
41+
],
42+
"response_format": {"type": "json_object"},
3743
"temperature": 0
3844
}
3945

40-
async with aiohttp.ClientSession() as session:
46+
# 配置会话以使用系统环境变量中的代理设置
47+
async with aiohttp.ClientSession(trust_env=True) as session:
4148
async with session.post(url, headers=headers, json=payload, timeout=60) as resp:
4249
data = await resp.json()
4350
if resp.status != 200:
@@ -47,11 +54,14 @@ async def openai_translate(text: str, target_lang_code: str) -> str:
4754
usage = data.get("usage", {})
4855
prompt_tokens = usage.get("prompt_tokens", 0)
4956
completion_tokens = usage.get("completion_tokens", 0)
50-
total_tokens = usage.get("total_tokens", prompt_tokens + completion_tokens)
57+
total_tokens = usage.get(
58+
"total_tokens", prompt_tokens + completion_tokens)
5159

5260
# 控制台日志
53-
print(f"🤖 正在使用OpenAI翻译: {text}")
54-
print(f"📊 OpenAI翻译tokens使用: {prompt_tokens}+{completion_tokens}={total_tokens}")
55-
print(f"✅ OpenAI翻译成功: {text} -> {result}")
61+
# print(f"🤖 正在使用OpenAI翻译: {text}")
62+
print(f"🤖 正在使用OpenAI翻译")
63+
print(
64+
f"📊 OpenAI翻译tokens使用: {prompt_tokens}+{completion_tokens}={total_tokens}")
65+
# print(f"✅ OpenAI翻译成功: {text} -> {result}")
5666

5767
return result

app/server/user_init/user_init.py

Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -52,6 +52,14 @@
5252
"translate_target_lang": "zh",
5353
"show_auto_limit": 25,
5454
"random_template": "",
55+
"ai_info_setting": {
56+
"api_key": "",
57+
"base_url": "https://api.siliconflow.cn/v1",
58+
"model": "THUDM/glm-4-9b-chat",
59+
"temperature": 0,
60+
"top_p": 0.7,
61+
"max_tokens": 4096
62+
}
5563
}
5664

5765
def read_init_file():
@@ -176,6 +184,30 @@ def update_random_template_setting(new_setting):
176184
"""更新random_template参数"""
177185
data = read_init_file() or {}
178186
data['random_template'] = new_setting
187+
with open(init_file_path, 'w', encoding='utf-8') as f:
188+
json.dump(data, f, ensure_ascii=False, indent=4)
189+
return True
190+
191+
def get_ai_info_setting():
192+
"""获取ai_info_setting参数,如果不存在则添加默认值"""
193+
data = read_init_file() or {}
194+
if 'ai_info_setting' not in data:
195+
data['ai_info_setting'] = {
196+
"api_key": "",
197+
"base_url": "https://api.siliconflow.cn/v1",
198+
"model": "THUDM/glm-4-9b-chat",
199+
"temperature": 0,
200+
"top_p": 0.7,
201+
"max_tokens": 4096
202+
}
203+
with open(init_file_path, 'w', encoding='utf-8') as f:
204+
json.dump(data, f, ensure_ascii=False, indent=4)
205+
return data['ai_info_setting']
206+
207+
def update_ai_info_setting(new_setting: dict):
208+
"""更新ai_info_setting参数"""
209+
data = read_init_file() or {}
210+
data['ai_info_setting'] = new_setting
179211
with open(init_file_path, 'w', encoding='utf-8') as f:
180212
json.dump(data, f, ensure_ascii=False, indent=4)
181213
return True

0 commit comments

Comments
 (0)