@@ -363,6 +363,7 @@ def test_parse_arguments(job_config):
363363 _ ,
364364 _ ,
365365 _ ,
366+ _ ,
366367 ) = sft_trainer .parse_arguments (parser , job_config_copy )
367368 assert str (model_args .torch_dtype ) == "torch.bfloat16"
368369 assert data_args .dataset_text_field == "output"
@@ -390,6 +391,7 @@ def test_parse_arguments_defaults(job_config):
390391 _ ,
391392 _ ,
392393 _ ,
394+ _ ,
393395 ) = sft_trainer .parse_arguments (parser , job_config_defaults )
394396 assert str (model_args .torch_dtype ) == "torch.bfloat16"
395397 assert model_args .use_flash_attn is False
@@ -400,14 +402,14 @@ def test_parse_arguments_peft_method(job_config):
400402 parser = sft_trainer .get_parser ()
401403 job_config_pt = copy .deepcopy (job_config )
402404 job_config_pt ["peft_method" ] = "pt"
403- _ , _ , _ , _ , tune_config , _ , _ , _ , _ , _ , _ , _ , _ = sft_trainer .parse_arguments (
405+ _ , _ , _ , _ , tune_config , _ , _ , _ , _ , _ , _ , _ , _ , _ = sft_trainer .parse_arguments (
404406 parser , job_config_pt
405407 )
406408 assert isinstance (tune_config , peft_config .PromptTuningConfig )
407409
408410 job_config_lora = copy .deepcopy (job_config )
409411 job_config_lora ["peft_method" ] = "lora"
410- _ , _ , _ , _ , tune_config , _ , _ , _ , _ , _ , _ , _ , _ = sft_trainer .parse_arguments (
412+ _ , _ , _ , _ , tune_config , _ , _ , _ , _ , _ , _ , _ , _ , _ = sft_trainer .parse_arguments (
411413 parser , job_config_lora
412414 )
413415 assert isinstance (tune_config , peft_config .LoraConfig )
0 commit comments