Skip to content

Commit 26c5bbb

Browse files
Move nodes from previous PR into their own file. (#12066)
1 parent a97c980 commit 26c5bbb

File tree

2 files changed

+80
-67
lines changed

2 files changed

+80
-67
lines changed

comfy_extras/nodes_lora_debug.py

Lines changed: 79 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,79 @@
1+
import folder_paths
2+
import comfy.utils
3+
import comfy.sd
4+
5+
6+
class LoraLoaderBypass:
7+
"""
8+
Apply LoRA in bypass mode without modifying base model weights.
9+
10+
Bypass mode computes: output = base_forward(x) + lora_path(x)
11+
This is useful for training and when model weights are offloaded.
12+
"""
13+
14+
def __init__(self):
15+
self.loaded_lora = None
16+
17+
@classmethod
18+
def INPUT_TYPES(s):
19+
return {
20+
"required": {
21+
"model": ("MODEL", {"tooltip": "The diffusion model the LoRA will be applied to."}),
22+
"clip": ("CLIP", {"tooltip": "The CLIP model the LoRA will be applied to."}),
23+
"lora_name": (folder_paths.get_filename_list("loras"), {"tooltip": "The name of the LoRA."}),
24+
"strength_model": ("FLOAT", {"default": 1.0, "min": -100.0, "max": 100.0, "step": 0.01, "tooltip": "How strongly to modify the diffusion model. This value can be negative."}),
25+
"strength_clip": ("FLOAT", {"default": 1.0, "min": -100.0, "max": 100.0, "step": 0.01, "tooltip": "How strongly to modify the CLIP model. This value can be negative."}),
26+
}
27+
}
28+
29+
RETURN_TYPES = ("MODEL", "CLIP")
30+
OUTPUT_TOOLTIPS = ("The modified diffusion model.", "The modified CLIP model.")
31+
FUNCTION = "load_lora"
32+
33+
CATEGORY = "loaders"
34+
DESCRIPTION = "Apply LoRA in bypass mode. Unlike regular LoRA, this doesn't modify model weights - instead it injects the LoRA computation during forward pass. Useful for training scenarios."
35+
EXPERIMENTAL = True
36+
37+
def load_lora(self, model, clip, lora_name, strength_model, strength_clip):
38+
if strength_model == 0 and strength_clip == 0:
39+
return (model, clip)
40+
41+
lora_path = folder_paths.get_full_path_or_raise("loras", lora_name)
42+
lora = None
43+
if self.loaded_lora is not None:
44+
if self.loaded_lora[0] == lora_path:
45+
lora = self.loaded_lora[1]
46+
else:
47+
self.loaded_lora = None
48+
49+
if lora is None:
50+
lora = comfy.utils.load_torch_file(lora_path, safe_load=True)
51+
self.loaded_lora = (lora_path, lora)
52+
53+
model_lora, clip_lora = comfy.sd.load_bypass_lora_for_models(model, clip, lora, strength_model, strength_clip)
54+
return (model_lora, clip_lora)
55+
56+
57+
class LoraLoaderBypassModelOnly(LoraLoaderBypass):
58+
@classmethod
59+
def INPUT_TYPES(s):
60+
return {"required": { "model": ("MODEL",),
61+
"lora_name": (folder_paths.get_filename_list("loras"), ),
62+
"strength_model": ("FLOAT", {"default": 1.0, "min": -100.0, "max": 100.0, "step": 0.01}),
63+
}}
64+
RETURN_TYPES = ("MODEL",)
65+
FUNCTION = "load_lora_model_only"
66+
67+
def load_lora_model_only(self, model, lora_name, strength_model):
68+
return (self.load_lora(model, None, lora_name, strength_model, 0)[0],)
69+
70+
71+
NODE_CLASS_MAPPINGS = {
72+
"LoraLoaderBypass": LoraLoaderBypass,
73+
"LoraLoaderBypassModelOnly": LoraLoaderBypassModelOnly,
74+
}
75+
76+
NODE_DISPLAY_NAME_MAPPINGS = {
77+
"LoraLoaderBypass": "Load LoRA (Bypass) (For debugging)",
78+
"LoraLoaderBypassModelOnly": "Load LoRA (Bypass, Model Only) (for debugging)",
79+
}

nodes.py

Lines changed: 1 addition & 67 deletions
Original file line numberDiff line numberDiff line change
@@ -722,69 +722,6 @@ def INPUT_TYPES(s):
722722
def load_lora_model_only(self, model, lora_name, strength_model):
723723
return (self.load_lora(model, None, lora_name, strength_model, 0)[0],)
724724

725-
class LoraLoaderBypass:
726-
"""
727-
Apply LoRA in bypass mode without modifying base model weights.
728-
729-
Bypass mode computes: output = base_forward(x) + lora_path(x)
730-
This is useful for training and when model weights are offloaded.
731-
"""
732-
733-
def __init__(self):
734-
self.loaded_lora = None
735-
736-
@classmethod
737-
def INPUT_TYPES(s):
738-
return {
739-
"required": {
740-
"model": ("MODEL", {"tooltip": "The diffusion model the LoRA will be applied to."}),
741-
"clip": ("CLIP", {"tooltip": "The CLIP model the LoRA will be applied to."}),
742-
"lora_name": (folder_paths.get_filename_list("loras"), {"tooltip": "The name of the LoRA."}),
743-
"strength_model": ("FLOAT", {"default": 1.0, "min": -100.0, "max": 100.0, "step": 0.01, "tooltip": "How strongly to modify the diffusion model. This value can be negative."}),
744-
"strength_clip": ("FLOAT", {"default": 1.0, "min": -100.0, "max": 100.0, "step": 0.01, "tooltip": "How strongly to modify the CLIP model. This value can be negative."}),
745-
}
746-
}
747-
748-
RETURN_TYPES = ("MODEL", "CLIP")
749-
OUTPUT_TOOLTIPS = ("The modified diffusion model.", "The modified CLIP model.")
750-
FUNCTION = "load_lora"
751-
752-
CATEGORY = "loaders"
753-
DESCRIPTION = "Apply LoRA in bypass mode. Unlike regular LoRA, this doesn't modify model weights - instead it injects the LoRA computation during forward pass. Useful for training scenarios."
754-
755-
def load_lora(self, model, clip, lora_name, strength_model, strength_clip):
756-
if strength_model == 0 and strength_clip == 0:
757-
return (model, clip)
758-
759-
lora_path = folder_paths.get_full_path_or_raise("loras", lora_name)
760-
lora = None
761-
if self.loaded_lora is not None:
762-
if self.loaded_lora[0] == lora_path:
763-
lora = self.loaded_lora[1]
764-
else:
765-
self.loaded_lora = None
766-
767-
if lora is None:
768-
lora = comfy.utils.load_torch_file(lora_path, safe_load=True)
769-
self.loaded_lora = (lora_path, lora)
770-
771-
model_lora, clip_lora = comfy.sd.load_bypass_lora_for_models(model, clip, lora, strength_model, strength_clip)
772-
return (model_lora, clip_lora)
773-
774-
775-
class LoraLoaderBypassModelOnly(LoraLoaderBypass):
776-
@classmethod
777-
def INPUT_TYPES(s):
778-
return {"required": { "model": ("MODEL",),
779-
"lora_name": (folder_paths.get_filename_list("loras"), ),
780-
"strength_model": ("FLOAT", {"default": 1.0, "min": -100.0, "max": 100.0, "step": 0.01}),
781-
}}
782-
RETURN_TYPES = ("MODEL",)
783-
FUNCTION = "load_lora_model_only"
784-
785-
def load_lora_model_only(self, model, lora_name, strength_model):
786-
return (self.load_lora(model, None, lora_name, strength_model, 0)[0],)
787-
788725
class VAELoader:
789726
video_taes = ["taehv", "lighttaew2_2", "lighttaew2_1", "lighttaehy1_5", "taeltx_2"]
790727
image_taes = ["taesd", "taesdxl", "taesd3", "taef1"]
@@ -2130,8 +2067,6 @@ def expand_image(self, image, left, top, right, bottom, feathering):
21302067
"LatentFlip": LatentFlip,
21312068
"LatentCrop": LatentCrop,
21322069
"LoraLoader": LoraLoader,
2133-
"LoraLoaderBypass": LoraLoaderBypass,
2134-
"LoraLoaderBypassModelOnly": LoraLoaderBypassModelOnly,
21352070
"CLIPLoader": CLIPLoader,
21362071
"UNETLoader": UNETLoader,
21372072
"DualCLIPLoader": DualCLIPLoader,
@@ -2171,8 +2106,6 @@ def expand_image(self, image, left, top, right, bottom, feathering):
21712106
"CheckpointLoaderSimple": "Load Checkpoint",
21722107
"VAELoader": "Load VAE",
21732108
"LoraLoader": "Load LoRA",
2174-
"LoraLoaderBypass": "Load LoRA (Bypass)",
2175-
"LoraLoaderBypassModelOnly": "Load LoRA (Bypass, Model Only)",
21762109
"CLIPLoader": "Load CLIP",
21772110
"ControlNetLoader": "Load ControlNet Model",
21782111
"DiffControlNetLoader": "Load ControlNet Model (diff)",
@@ -2498,6 +2431,7 @@ async def init_builtin_extra_nodes():
24982431
"nodes_wanmove.py",
24992432
"nodes_image_compare.py",
25002433
"nodes_zimage.py",
2434+
"nodes_lora_debug.py"
25012435
]
25022436

25032437
import_failed = []

0 commit comments

Comments
 (0)