Skip to content

Commit 4b93dc6

Browse files
committed
Use RuntimeError instead assert
1 parent d872772 commit 4b93dc6

File tree

1 file changed

+8
-11
lines changed

1 file changed

+8
-11
lines changed

src/llama_recipes/utils/config_utils.py

Lines changed: 8 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -45,17 +45,14 @@ def generate_peft_config(train_config, kwargs):
4545
peft_configs = (LoraConfig, AdaptionPromptConfig, PrefixTuningConfig)
4646
names = tuple(c.__name__.rstrip("_config") for c in configs)
4747

48-
assert (
49-
train_config.peft_method in names
50-
), f"Peft config not found: {train_config.peft_method}"
51-
52-
assert (
53-
train_config.peft_method != "prefix"
54-
), "PrefixTuning is currently not supported (see https://github.com/meta-llama/llama-recipes/issues/359#issuecomment-2089350811)"
55-
if train_config.enable_fsdp:
56-
assert (
57-
train_config.peft_method != "llama_adapter"
58-
), "Llama_adapter is currently not supported in combination with FSDP (see https://github.com/meta-llama/llama-recipes/issues/359#issuecomment-2089274425)"
48+
if train_config.peft_method not in names:
49+
raise RuntimeError(f"Peft config not found: {train_config.peft_method}")
50+
51+
if train_config.peft_method == "prefix":
52+
raise RuntimeError("PrefixTuning is currently not supported (see https://github.com/meta-llama/llama-recipes/issues/359#issuecomment-2089350811)")
53+
54+
if train_config.enable_fsdp and train_config.peft_method == "llama_adapter":
55+
raise RuntimeError("Llama_adapter is currently not supported in combination with FSDP (see https://github.com/meta-llama/llama-recipes/issues/359#issuecomment-2089274425)")
5956

6057
config = configs[names.index(train_config.peft_method)]()
6158

0 commit comments

Comments
 (0)