Skip to content

Commit 3ce0c6e

Browse files
Symbiomatrixwoct0rdho
authored andcommitted
Fix.
1 parent 63ec59f commit 3ce0c6e

File tree

1 file changed

+4
-4
lines changed

1 file changed

+4
-4
lines changed

networks/resize_lora.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -240,15 +240,15 @@ def resize_lora_model(lora_sd, new_rank, new_conv_rank, save_dtype, device, dyna
240240
for key, value in tqdm(lora_sd.items()):
241241
weight_name = None
242242
if LORAFMT[0] in key:
243-
block_down_name = key.rsplit(f".LORAFMT[0]", 1)[0]
243+
block_down_name = key.rsplit(f".{LORAFMT[0]}", 1)[0]
244244
weight_name = key.rsplit(".", 1)[-1]
245245
lora_down_weight = value
246246
else:
247247
continue
248248

249249
# find corresponding lora_up and alpha
250250
block_up_name = block_down_name
251-
lora_up_weight = lora_sd.get(block_up_name + f".LORAFMT[1]." + weight_name, None)
251+
lora_up_weight = lora_sd.get(block_up_name + f".{LORAFMT[1]}." + weight_name, None)
252252
lora_alpha = lora_sd.get(block_down_name + ".alpha", None)
253253

254254
weights_loaded = lora_down_weight is not None and lora_up_weight is not None
@@ -286,8 +286,8 @@ def resize_lora_model(lora_sd, new_rank, new_conv_rank, save_dtype, device, dyna
286286
verbose_str += "\n"
287287

288288
new_alpha = param_dict["new_alpha"]
289-
o_lora_sd[block_down_name + f".LORAFMT[0].weight"] = param_dict[LORAFMT[0]].to(save_dtype).contiguous()
290-
o_lora_sd[block_up_name + f".LORAFMT[1].weight"] = param_dict[LORAFMT[1]].to(save_dtype).contiguous()
289+
o_lora_sd[block_down_name + f".{LORAFMT[0]}.weight"] = param_dict[LORAFMT[0]].to(save_dtype).contiguous()
290+
o_lora_sd[block_up_name + f".{LORAFMT[1]}.weight"] = param_dict[LORAFMT[1]].to(save_dtype).contiguous()
291291
o_lora_sd[block_up_name + ".alpha"] = torch.tensor(param_dict["new_alpha"]).to(save_dtype)
292292

293293
block_down_name = None

0 commit comments

Comments
 (0)