Skip to content
This repository was archived by the owner on Sep 10, 2025. It is now read-only.

Commit 1078c89

Browse files
committed
solve llama3.2 import issue
1 parent f7aef8a commit 1078c89

File tree

1 file changed

+2
-2
lines changed

1 file changed

+2
-2
lines changed

torchchat/model.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@
3131
)
3232
from torch.nn import functional as F
3333

34-
from torchtune.models.flamingo import flamingo_decoder, flamingo_vision_encoder
34+
from torchtune.models.llama3_2_vision import llama3_2_vision_decoder, llama3_2_vision_encoder
3535
from torchtune.models.llama3_1._component_builders import llama3_1 as llama3_1_builder
3636
from torchtune.modules.model_fusion import DeepFusionModel
3737
from torchtune.models.clip import clip_vision_encoder
@@ -213,7 +213,7 @@ def _llama3_1(cls):
213213
def _flamingo(cls):
214214
return cls(
215215
model_type=ModelType.Flamingo,
216-
modules={"encoder": flamingo_vision_encoder, "decoder": flamingo_decoder},
216+
modules={"encoder": llama3_2_vision_encoder, "decoder": llama3_2_vision_decoder},
217217
fusion_class=DeepFusionModel,
218218
)
219219

0 commit comments

Comments
 (0)