@@ -3666,9 +3666,11 @@ export class CLIPModel extends CLIPPreTrainedModel { }
36663666export class CLIPTextModel extends CLIPPreTrainedModel {
36673667 /** @type {typeof PreTrainedModel.from_pretrained } */
36683668 static async from_pretrained ( pretrained_model_name_or_path , options = { } ) {
3669- // Update default model file name if not provided
3670- options . model_file_name ??= 'text_model' ;
3671- return super . from_pretrained ( pretrained_model_name_or_path , options ) ;
3669+ return super . from_pretrained ( pretrained_model_name_or_path , {
3670+ // Update default model file name if not provided
3671+ model_file_name : 'text_model' ,
3672+ ...options ,
3673+ } ) ;
36723674 }
36733675}
36743676
@@ -3701,9 +3703,11 @@ export class CLIPTextModel extends CLIPPreTrainedModel {
37013703export class CLIPTextModelWithProjection extends CLIPPreTrainedModel {
37023704 /** @type {typeof PreTrainedModel.from_pretrained } */
37033705 static async from_pretrained ( pretrained_model_name_or_path , options = { } ) {
3704- // Update default model file name if not provided
3705- options . model_file_name ??= 'text_model' ;
3706- return super . from_pretrained ( pretrained_model_name_or_path , options ) ;
3706+ return super . from_pretrained ( pretrained_model_name_or_path , {
3707+ // Update default model file name if not provided
3708+ model_file_name : 'text_model' ,
3709+ ...options ,
3710+ } ) ;
37073711 }
37083712}
37093713
@@ -3713,9 +3717,11 @@ export class CLIPTextModelWithProjection extends CLIPPreTrainedModel {
37133717export class CLIPVisionModel extends CLIPPreTrainedModel {
37143718 /** @type {typeof PreTrainedModel.from_pretrained } */
37153719 static async from_pretrained ( pretrained_model_name_or_path , options = { } ) {
3716- // Update default model file name if not provided
3717- options . model_file_name ??= 'vision_model' ;
3718- return super . from_pretrained ( pretrained_model_name_or_path , options ) ;
3720+ return super . from_pretrained ( pretrained_model_name_or_path , {
3721+ // Update default model file name if not provided
3722+ model_file_name : 'vision_model' ,
3723+ ...options ,
3724+ } ) ;
37193725 }
37203726}
37213727
@@ -3748,9 +3754,11 @@ export class CLIPVisionModel extends CLIPPreTrainedModel {
37483754export class CLIPVisionModelWithProjection extends CLIPPreTrainedModel {
37493755 /** @type {typeof PreTrainedModel.from_pretrained } */
37503756 static async from_pretrained ( pretrained_model_name_or_path , options = { } ) {
3751- // Update default model file name if not provided
3752- options . model_file_name ??= 'vision_model' ;
3753- return super . from_pretrained ( pretrained_model_name_or_path , options ) ;
3757+ return super . from_pretrained ( pretrained_model_name_or_path , {
3758+ // Update default model file name if not provided
3759+ model_file_name : 'vision_model' ,
3760+ ...options ,
3761+ } ) ;
37543762 }
37553763}
37563764//////////////////////////////////////////////////
@@ -3834,9 +3842,11 @@ export class SiglipModel extends SiglipPreTrainedModel { }
38343842export class SiglipTextModel extends SiglipPreTrainedModel {
38353843 /** @type {typeof PreTrainedModel.from_pretrained } */
38363844 static async from_pretrained ( pretrained_model_name_or_path , options = { } ) {
3837- // Update default model file name if not provided
3838- options . model_file_name ??= 'text_model' ;
3839- return super . from_pretrained ( pretrained_model_name_or_path , options ) ;
3845+ return super . from_pretrained ( pretrained_model_name_or_path , {
3846+ // Update default model file name if not provided
3847+ model_file_name : 'text_model' ,
3848+ ...options ,
3849+ } ) ;
38403850 }
38413851}
38423852
@@ -3869,9 +3879,11 @@ export class SiglipTextModel extends SiglipPreTrainedModel {
38693879export class SiglipVisionModel extends CLIPPreTrainedModel {
38703880 /** @type {typeof PreTrainedModel.from_pretrained } */
38713881 static async from_pretrained ( pretrained_model_name_or_path , options = { } ) {
3872- // Update default model file name if not provided
3873- options . model_file_name ??= 'vision_model' ;
3874- return super . from_pretrained ( pretrained_model_name_or_path , options ) ;
3882+ return super . from_pretrained ( pretrained_model_name_or_path , {
3883+ // Update default model file name if not provided
3884+ model_file_name : 'vision_model' ,
3885+ ...options ,
3886+ } ) ;
38753887 }
38763888}
38773889//////////////////////////////////////////////////
@@ -3926,18 +3938,22 @@ export class JinaCLIPModel extends JinaCLIPPreTrainedModel {
39263938export class JinaCLIPTextModel extends JinaCLIPPreTrainedModel {
39273939 /** @type {typeof PreTrainedModel.from_pretrained } */
39283940 static async from_pretrained ( pretrained_model_name_or_path , options = { } ) {
3929- // Update default model file name if not provided
3930- options . model_file_name ??= 'text_model' ;
3931- return super . from_pretrained ( pretrained_model_name_or_path , options ) ;
3941+ return super . from_pretrained ( pretrained_model_name_or_path , {
3942+ // Update default model file name if not provided
3943+ model_file_name : 'text_model' ,
3944+ ...options ,
3945+ } ) ;
39323946 }
39333947}
39343948
39353949export class JinaCLIPVisionModel extends JinaCLIPPreTrainedModel {
39363950 /** @type {typeof PreTrainedModel.from_pretrained } */
39373951 static async from_pretrained ( pretrained_model_name_or_path , options = { } ) {
3938- // Update default model file name if not provided
3939- options . model_file_name ??= 'vision_model' ;
3940- return super . from_pretrained ( pretrained_model_name_or_path , options ) ;
3952+ return super . from_pretrained ( pretrained_model_name_or_path , {
3953+ // Update default model file name if not provided
3954+ model_file_name : 'vision_model' ,
3955+ ...options ,
3956+ } ) ;
39413957 }
39423958}
39433959//////////////////////////////////////////////////
@@ -6159,9 +6175,11 @@ export class ClapModel extends ClapPreTrainedModel { }
61596175export class ClapTextModelWithProjection extends ClapPreTrainedModel {
61606176 /** @type {typeof PreTrainedModel.from_pretrained } */
61616177 static async from_pretrained ( pretrained_model_name_or_path , options = { } ) {
6162- // Update default model file name if not provided
6163- options . model_file_name ??= 'text_model' ;
6164- return super . from_pretrained ( pretrained_model_name_or_path , options ) ;
6178+ return super . from_pretrained ( pretrained_model_name_or_path , {
6179+ // Update default model file name if not provided
6180+ model_file_name : 'text_model' ,
6181+ ...options ,
6182+ } ) ;
61656183 }
61666184}
61676185
@@ -6194,9 +6212,11 @@ export class ClapTextModelWithProjection extends ClapPreTrainedModel {
61946212export class ClapAudioModelWithProjection extends ClapPreTrainedModel {
61956213 /** @type {typeof PreTrainedModel.from_pretrained } */
61966214 static async from_pretrained ( pretrained_model_name_or_path , options = { } ) {
6197- // Update default model file name if not provided
6198- options . model_file_name ??= 'audio_model' ;
6199- return super . from_pretrained ( pretrained_model_name_or_path , options ) ;
6215+ return super . from_pretrained ( pretrained_model_name_or_path , {
6216+ // Update default model file name if not provided
6217+ model_file_name : 'audio_model' ,
6218+ ...options ,
6219+ } ) ;
62006220 }
62016221}
62026222//////////////////////////////////////////////////
0 commit comments