@@ -69,9 +69,6 @@ class AutoModelForCausalLMWithValueHead(PreTrainedModelWrapper):
6969 Class attributes:
7070 - **transformers_parent_class** (`transformers.PreTrainedModel`) -- The parent class of the wrapped model. This
7171 should be set to `transformers.AutoModelForCausalLM` for this class.
72- - **lm_head_namings** (`tuple`) -- A tuple of strings that are used to identify the language model head of the
73- wrapped model. This is set to `("lm_head", "embed_out", "output_layer")` for this class but can be changed
74- for other models in the future
7572 - **supported_args** (`tuple`) -- A tuple of strings that are used to identify the arguments that are supported
7673 by the `ValueHead` class. Currently, the supported args are:
7774 - **summary_dropout_prob** (`float`, `optional`, defaults to `None`) -- The dropout probability for the
@@ -86,7 +83,6 @@ class AutoModelForCausalLMWithValueHead(PreTrainedModelWrapper):
8683 """
8784
8885 transformers_parent_class = AutoModelForCausalLM
89- lm_head_namings = ["lm_head" , "embed_out" , "output_layer" ]
9086 supported_args = (
9187 "summary_dropout_prob" ,
9288 "v_head_initializer_range" ,
@@ -106,12 +102,7 @@ def __init__(self, pretrained_model, **kwargs):
106102 """
107103 super ().__init__ (pretrained_model , ** kwargs )
108104 v_head_kwargs , _ , _ = self ._split_kwargs (kwargs )
109-
110- if not any (hasattr (self .pretrained_model , attribute ) for attribute in self .lm_head_namings ):
111- raise ValueError ("The model does not have a language model head, please use a model that has one." )
112-
113105 self .v_head = ValueHead (self .pretrained_model .config , ** v_head_kwargs )
114-
115106 self ._init_weights (** v_head_kwargs )
116107
117108 def _init_weights (self , ** kwargs ):
0 commit comments