We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
2 parents b928cc7 + 11041ae commit b08f22bCopy full SHA for b08f22b
examples/models/llama/model.py
@@ -98,7 +98,7 @@ def __init__(self, llm_config: Optional[LlmConfig] = None):
98
checkpoint = torch.load(checkpoint_path, map_location=device, mmap=True)
99
100
# If given checkpoint is fairseq, convert to llama checkpoint.
101
- fairseq2_checkpoint = llm_config.base.fairseq2
+ fairseq2_checkpoint = self.llm_config.base.fairseq2
102
if fairseq2_checkpoint:
103
print("Using fairseq2 checkpoint")
104
checkpoint = convert_to_llama_checkpoint(checkpoint=checkpoint)
0 commit comments