We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 77c8627 commit 2e11ea2Copy full SHA for 2e11ea2
convert_lora_to_gguf.py
@@ -359,7 +359,6 @@ def set_type(self):
359
360
def set_gguf_parameters(self):
361
self.gguf_writer.add_float32(gguf.Keys.Adapter.LORA_ALPHA, self.lora_alpha)
362
- super().set_gguf_parameters()
363
364
def generate_extra_tensors(self) -> Iterable[tuple[str, Tensor]]:
365
# Never add extra tensors (e.g. rope_freqs) for LoRA adapters
0 commit comments