Skip to content
This repository was archived by the owner on Sep 10, 2025. It is now read-only.

Commit ecb5664

Browse files
committed
Ok can't remove import lm_eval. Move it inside eval condition
Summary: Test Plan: Reviewers: Subscribers: Tasks: Tags:
1 parent 4778332 commit ecb5664

File tree

2 files changed

+4
-3
lines changed

2 files changed

+4
-3
lines changed

torchchat/cli/cli.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,8 +13,9 @@
1313

1414
import torch
1515

16-
from torchchat.cli.download import download_and_convert, is_model_downloaded
16+
import lm_eval # noqa
1717

18+
from torchchat.cli.download import download_and_convert, is_model_downloaded
1819
from torchchat.utils.build_utils import (
1920
allowable_dtype_names,
2021
allowable_params_table,

torchchat/usages/eval.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -10,8 +10,6 @@
1010
import torch._dynamo.config
1111
import torch._inductor.config
1212

13-
import lm_eval # noqa
14-
1513
from torchchat.cli.builder import (
1614
_initialize_model,
1715
_initialize_tokenizer,
@@ -30,6 +28,8 @@
3028
torch._inductor.config.triton.cudagraphs = True
3129
torch._dynamo.config.cache_size_limit = 100000
3230

31+
import lm_eval
32+
3333
from lm_eval.evaluator import evaluate
3434
from lm_eval.models.huggingface import HFLM as eval_wrapper
3535
from lm_eval.tasks import get_task_dict

0 commit comments

Comments
 (0)