Skip to content

Commit cb21b67

Browse files
committed
Launcher GUI add options
1 parent 1e67f45 commit cb21b67

File tree

2 files changed

+42
-24
lines changed

2 files changed

+42
-24
lines changed

builder/attachments/launcher.py

Lines changed: 16 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,6 @@ def save_config(args):
2424
"use_pytorch_cross_attention": args.use_pytorch_cross_attention,
2525
"use_sage_attention": args.use_sage_attention,
2626
"use_flash_attention": args.use_flash_attention,
27-
"use_xformers": args.use_xformers,
2827
"extra_args": args.extra_args,
2928
}
3029
with open(CONFIG_FILE, "w") as f:
@@ -48,7 +47,7 @@ def main():
4847
parser = GooeyParser(description="Customize settings before launching ComfyUI")
4948

5049
# Environment Variable Configuration Tab
51-
env_tab = parser.add_argument_group('Environment Variable Configuration',
50+
env_tab = parser.add_argument_group('Environment Variables',
5251
'Configure the environment variables for ComfyUI',
5352
gooey_options={'show_border': True})
5453
env_tab.add_argument('--http_proxy',
@@ -77,7 +76,7 @@ def main():
7776
default=saved_config.get("hf_endpoint", "") if saved_config else '')
7877

7978
# Launch Parameter Configuration Tab
80-
launch_tab = parser.add_argument_group('Launch Parameter Configuration',
79+
launch_tab = parser.add_argument_group('Launch Parameters',
8180
'Configure the launch parameters for ComfyUI',
8281
gooey_options={'show_border': True})
8382
launch_tab.add_argument('--disable_auto_launch',
@@ -100,32 +99,30 @@ def main():
10099
action='store_true',
101100
help='More conservative VRAM usage, reduce speed, recommended only when VRAM is insufficient (--lowvram)',
102101
default=saved_config.get("lowvram", False) if saved_config else False)
103-
# Mutually exclusive Attention Implementation options
104-
attention_group = launch_tab.add_mutually_exclusive_group()
105-
attention_group.add_argument('--use-xformers',
106-
metavar='Use xFormers',
107-
action='store_true',
108-
help='Choose xFormers as the attention implementation. Default option',
109-
default=saved_config.get("use_xformers", True) if saved_config else True)
110-
attention_group.add_argument('--use-pytorch-cross-attention',
111-
metavar='Use PyTorch native cross-attention',
102+
launch_tab.add_argument('--extra_args',
103+
metavar='Additional Launch Arguments',
104+
help='Refer to ComfyUI’s cli_args.py, add extra launch parameters (e.g., " --cpu" for CPU-only mode), mind spaces',
105+
default=saved_config.get("extra_args", "") if saved_config else '')
106+
107+
# Attention Implementation Configuration Tab
108+
attn_tab = parser.add_argument_group('Attention Implementation',
109+
'Options are mutually exclusive. If nothing is selected, xFormers will be used by default',
110+
gooey_options={'show_border': True})
111+
attn_tab.add_argument('--use-pytorch-cross-attention',
112+
metavar='Disable xFormers/FlashAttention/SageAttention',
112113
action='store_true',
113-
help='More stable (not better) image generation (--use-pytorch-cross-attention)',
114+
help='Use native PyTorch cross-attention. More stable (not better) image generation. Not recommended for videos (--use-pytorch-cross-attention)',
114115
default=saved_config.get("use_pytorch_cross_attention", False) if saved_config else False)
115-
attention_group.add_argument('--use-sage-attention',
116+
attn_tab.add_argument('--use-sage-attention',
116117
metavar='Use SageAttention',
117118
action='store_true',
118119
help='Better performance but less compatibility (--use-sage-attention)',
119120
default=saved_config.get("use_sage_attention", False) if saved_config else False)
120-
attention_group.add_argument('--use-flash-attention',
121+
attn_tab.add_argument('--use-flash-attention',
121122
metavar='Use FlashAttention',
122123
action='store_true',
123124
help='On par with xFormers (--use-flash-attention)',
124125
default=saved_config.get("use_flash_attention", False) if saved_config else False)
125-
launch_tab.add_argument('--extra_args',
126-
metavar='Additional Launch Arguments',
127-
help='Refer to ComfyUI’s cli_args.py, add extra launch parameters (e.g., " --cpu" for CPU-only mode), mind spaces',
128-
default=saved_config.get("extra_args", "") if saved_config else '')
129126

130127
args = parser.parse_args()
131128

builder/attachments/launcher_cn.py

Lines changed: 26 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,8 @@ def save_config(args):
2222
"disable_smart_memory": args.disable_smart_memory,
2323
"lowvram": args.lowvram,
2424
"use_pytorch_cross_attention": args.use_pytorch_cross_attention,
25+
"use_sage_attention": args.use_sage_attention,
26+
"use_flash_attention": args.use_flash_attention,
2527
"extra_args": args.extra_args,
2628
}
2729
with open(CONFIG_FILE, "w") as f:
@@ -132,16 +134,31 @@ def main():
132134
action='store_true',
133135
help='更“节约”地使用显存, 牺牲速度, 仅建议显存不足时开启 (--lowvram)',
134136
default=saved_config.get("lowvram", False) if saved_config else False)
135-
launch_tab.add_argument('--use-pytorch-cross-attention',
136-
metavar='禁用 xFormers/FlashAttention/SageAttention',
137-
action='store_true',
138-
help='禁用后,会启用 PyTorch 原生交叉注意力机制。 如需生成视频, 建议不要勾选 (--use-pytorch-cross-attention)',
139-
default=saved_config.get("use_pytorch_cross_attention", False) if saved_config else False)
140137
launch_tab.add_argument('--extra_args',
141138
metavar='额外启动参数',
142139
help='参数列表在 ComfyUI 的 cli_args.py, 注意添加空格 (例如 " --cpu" 启用仅 CPU 模式)',
143140
default=saved_config.get("extra_args", "") if saved_config else '')
144141

142+
# 注意力实现配置 Tab
143+
attn_tab = parser.add_argument_group('注意力实现配置 ',
144+
'各选项互斥,请勿多选,不选则默认使用 xFormers',
145+
gooey_options={'show_border': True})
146+
attn_tab.add_argument('--use-pytorch-cross-attention',
147+
metavar='禁用 xFormers/FlashAttention/SageAttention',
148+
action='store_true',
149+
help='使用 PyTorch 原生交叉注意力实现, 图像生成更稳定(不是更好)。 不适合视频生成 (--use-pytorch-cross-attention)',
150+
default=saved_config.get("use_pytorch_cross_attention", False) if saved_config else False)
151+
attn_tab.add_argument('--use-sage-attention',
152+
metavar='使用 SageAttention',
153+
action='store_true',
154+
help='性能更佳, 但可能有兼容性问题 (--use-sage-attention)',
155+
default=saved_config.get("use_sage_attention", False) if saved_config else False)
156+
attn_tab.add_argument('--use-flash-attention',
157+
metavar='使用 FlashAttention',
158+
action='store_true',
159+
help='理论上与 xFormers 相当 (--use-flash-attention)',
160+
default=saved_config.get("use_flash_attention", False) if saved_config else False)
161+
145162
args = parser.parse_args()
146163

147164
# 保存当前配置
@@ -192,6 +209,10 @@ def main():
192209
command.append('--lowvram')
193210
if args.use_pytorch_cross_attention:
194211
command.append('--use-pytorch-cross-attention')
212+
if args.use_sage_attention:
213+
command.append('--use-sage-attention')
214+
if args.use_flash_attention:
215+
command.append('--use-flash-attention')
195216

196217
# 添加用户自定义的额外参数
197218
if args.extra_args:

0 commit comments

Comments
 (0)