diff --git a/convert_lora_to_gguf.py b/convert_lora_to_gguf.py index 439a78de108ca..26de8b1ca69b0 100755 --- a/convert_lora_to_gguf.py +++ b/convert_lora_to_gguf.py @@ -339,27 +339,28 @@ def get_tensors(self) -> Iterator[tuple[str, Tensor]]: tensor_map: dict[str, PartialLoraTensor] = {} for name, tensor in lora_model.items(): - if self.lazy: - tensor = LazyTorchTensor.from_eager(tensor) - base_name = get_base_tensor_name(name) - is_lora_a = ".lora_A.weight" in name - is_lora_b = ".lora_B.weight" in name - if not is_lora_a and not is_lora_b: - if ".base_layer.weight" in name: - continue - logger.error(f"Unexpected name '{name}': Not a lora_A or lora_B tensor") - sys.exit(1) - - if base_name in tensor_map: - if is_lora_a: - tensor_map[base_name].A = tensor + if ("lora_" in name) or (".base_layer.weight" in name): + if self.lazy: + tensor = LazyTorchTensor.from_eager(tensor) + base_name = get_base_tensor_name(name) + is_lora_a = ".lora_A.weight" in name + is_lora_b = ".lora_B.weight" in name + if not is_lora_a and not is_lora_b: + if ".base_layer.weight" in name: + continue + + if base_name in tensor_map: + if is_lora_a: + tensor_map[base_name].A = tensor + else: + tensor_map[base_name].B = tensor else: - tensor_map[base_name].B = tensor + if is_lora_a: + tensor_map[base_name] = PartialLoraTensor(A=tensor) + else: + tensor_map[base_name] = PartialLoraTensor(B=tensor) else: - if is_lora_a: - tensor_map[base_name] = PartialLoraTensor(A=tensor) - else: - tensor_map[base_name] = PartialLoraTensor(B=tensor) + pass for name, tensor in tensor_map.items(): assert tensor.A is not None