Skip to content

Commit be480df

Browse files
authored
Move import to top of file (#1209)
1 parent f39f0d7 commit be480df

File tree

1 file changed

+1
-1
lines changed

1 file changed

+1
-1
lines changed

litgpt/pretrain.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@
2020

2121
from litgpt import Tokenizer
2222
from litgpt.args import EvalArgs, TrainArgs
23+
from litgpt.config import name_to_config
2324
from litgpt.data import DataModule, TinyLlama
2425
from litgpt.model import GPT, Block, CausalSelfAttention, Config, LLaMAMLP
2526
from litgpt.utils import (
@@ -91,7 +92,6 @@ def setup(
9192
if model_config is not None and model_name is not None:
9293
raise ValueError("Only one of `model_name` or `model_config` can be set.")
9394
elif model_config is None and model_name is None:
94-
from litgpt.config import name_to_config
9595
available_models = "\n".join(sorted(name_to_config))
9696
raise ValueError(f"Please specify --model_name <model_name>. Available values:\n{available_models}")
9797
config = Config.from_name(model_name) if model_config is None else model_config

0 commit comments

Comments
 (0)