Skip to content
This repository was archived by the owner on Sep 10, 2025. It is now read-only.

Commit 5b201a5

Browse files
committed
torchchat
1 parent ddc4555 commit 5b201a5

File tree

1 file changed

+1
-4
lines changed

1 file changed

+1
-4
lines changed

torchchat/model.py

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -30,9 +30,6 @@
3030
SequenceParallel,
3131
)
3232
from torch.nn import functional as F
33-
# TODO: remove this after we figure out where in torchtune an `evaluate` module
34-
# is being imported, which is being confused with huggingface's `evaluate``.
35-
import lm_eval # noqa
3633
from torchtune.models.clip import clip_vision_encoder
3734
from torchtune.models.llama3_1._component_builders import llama3_1 as llama3_1_builder
3835
from torchtune.models.llama3_2_vision._component_builders import (
@@ -466,7 +463,7 @@ def build_model(self) -> nn.Module:
466463
modules[name] = module_class(TransformerArgs.from_params(config_args))
467464
else:
468465
modules[name] = module_class(**config_args)
469-
466+
470467
# Temporary add extra params to the DeepFusionModel.
471468
# TODO: Remove it once we can make fusion model configurable in model_param.
472469
if recipe.fusion_class == DeepFusionModel:

0 commit comments

Comments
 (0)