We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
2 parents 3393361 + 8a128f2 commit 7e16e00Copy full SHA for 7e16e00
swift/trainers/mixin.py
@@ -590,7 +590,14 @@ def create_optimizer(self):
590
opt_model = self.model
591
592
if self.optimizer is None:
593
- decay_parameters = self.get_decay_parameter_names(opt_model)
+ if version.parse(
594
+ transformers.__version__) < version.parse('4.34.0'):
595
+ logger.warning(
596
+ f'If you are using lora+, please remember using transformers>=4.34.0, '
597
+ f'but now is {transformers.__version__}')
598
+ return super().create_optimizer()
599
+ else:
600
+ decay_parameters = self.get_decay_parameter_names(opt_model)
601
if isinstance(self.model, SwiftModel):
602
optimizer_grouped_parameters = self.model.create_optimizer_param_groups(
603
lr=self.args.learning_rate,
0 commit comments