Skip to content

Commit d71bf6b

Browse files
tastelikefeetJintao-Huang
authored andcommitted
Fix test bugs (#5484)
1 parent 7d3d9b7 commit d71bf6b

File tree

4 files changed

+7
-3
lines changed

4 files changed

+7
-3
lines changed

swift/llm/argument/base_args/base_args.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -103,7 +103,8 @@ def _prepare_training_args(self, training_args: Dict[str, Any]) -> None:
103103

104104
def _init_lazy_tokenize(self):
105105
if self.lazy_tokenize is None:
106-
if self.model_meta.is_multimodal and not self.streaming and not self.packing:
106+
if (self.model_meta is not None and self.model_meta.is_multimodal and not self.streaming
107+
and not self.packing):
107108
self.lazy_tokenize = True
108109
else:
109110
self.lazy_tokenize = False

swift/llm/argument/base_args/template_args.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@ class TemplateArguments:
4747
template_backend: Literal['swift', 'jinja'] = 'swift'
4848

4949
def __post_init__(self):
50-
if self.template is None and hasattr(self, 'model_meta'):
50+
if self.template is None and getattr(self, 'model_meta', None):
5151
self.template = self.model_meta.template
5252
if self.use_chat_template is None:
5353
self.use_chat_template = True

swift/llm/argument/sampling_args.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -56,6 +56,9 @@ class SamplingArguments(BaseArguments):
5656
def _init_model_info(self):
5757
if self.sampler_engine != 'client':
5858
return super()._init_model_info()
59+
else:
60+
self.model_info = None
61+
self.model_meta = None
5962
self.task_type = 'causal_lm'
6063
return
6164

swift/llm/train/tuner.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -243,7 +243,7 @@ def prepare_adapter(args: TrainArguments, model, *, template=None, train_dataset
243243
logger.info(f'adalora_config: {adalora_config}')
244244
elif args.train_type == 'llamapro':
245245
llamapro_config = LLaMAProConfig(
246-
model_type=model.model_meta.model_arch,
246+
model_type=model.model_meta.model_arch.arch_name,
247247
num_new_blocks=args.llamapro_num_new_blocks,
248248
num_groups=args.llamapro_num_groups)
249249
model = Swift.prepare_model(model, llamapro_config)

0 commit comments

Comments
 (0)