We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent f9e8bf6 commit fbc0278Copy full SHA for fbc0278
examples/llm_qat/export.py
@@ -44,7 +44,7 @@ def get_lora_model(
44
model = AutoModelForCausalLM.from_pretrained(ckpt_path, device_map=device_map)
45
46
# Restore modelopt state
47
- modelopt_state = torch.load(f"{ckpt_path}/modelopt_state_calibration.pth", weights_only=False)
+ modelopt_state = torch.load(f"{ckpt_path}/modelopt_state_calib.pth", weights_only=False)
48
restore_from_modelopt_state(model, modelopt_state)
49
50
# Restore modelopt quantizer state dict
0 commit comments