Skip to content

Commit 530f755

Browse files
committed
Update on "CI test for lora"
Add CI test for LoRA: note, not program-data separated yet. Differential Revision: [D78751767](https://our.internmc.facebook.com/intern/diff/D78751767/) [ghstack-poisoned]
2 parents baf77b5 + 9a76d16 commit 530f755

File tree

1 file changed

+5
-1
lines changed

1 file changed

+5
-1
lines changed

extension/llm/export/config/llm_config.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -60,7 +60,7 @@ class PreqMode(str, Enum):
6060
@dataclass
6161
class BaseConfig:
6262
"""
63-
Configurations specific to the model, e.g. whether its Qwen3 or Phi-4-mini,
63+
Configurations specific to the model, e.g. whether it's Qwen3 or Phi-4-mini,
6464
and are the minimal set of parameters needed to load the pretrained
6565
eager model and its weights.
6666
@@ -479,6 +479,10 @@ def from_args(cls, args: argparse.Namespace) -> "LlmConfig": # noqa: C901
479479
llm_config.base.checkpoint = args.checkpoint
480480
if hasattr(args, "checkpoint_dir"):
481481
llm_config.base.checkpoint_dir = args.checkpoint_dir
482+
if hasattr(args, "adapter_checkpoint"):
483+
llm_config.base.adapter_checkpoint = args.adapter_checkpoint
484+
if hasattr(args, "adapter_config"):
485+
llm_config.base.adapter_config = args.adapter_config
482486
if hasattr(args, "tokenizer_path"):
483487
llm_config.base.tokenizer_path = args.tokenizer_path
484488
if hasattr(args, "metadata"):

0 commit comments

Comments
 (0)