File tree Expand file tree Collapse file tree 2 files changed +5
-5
lines changed
autointent/modules/scoring/_lora Expand file tree Collapse file tree 2 files changed +5
-5
lines changed Original file line number Diff line number Diff line change @@ -87,13 +87,13 @@ def fit(
8787 model_name = self .model_config .model_name
8888 self ._tokenizer = AutoTokenizer .from_pretrained (model_name )
8989 self ._model = AutoModelForSequenceClassification .from_pretrained (
90- model_name ,
90+ model_name ,
9191 num_labels = self ._n_classes ,
9292 problem_type = "multi_label_classification" if self ._multilabel else "single_label_classification"
9393 )
9494 self ._model = get_peft_model (self ._model , self ._lora_config )
9595
96- device = torch .device (self .model_config .device if self .model_config .device else ' cpu' )
96+ device = torch .device (self .model_config .device if self .model_config .device else " cpu" )
9797 self ._model = self ._model .to (device )
9898
9999 use_cpu = self .model_config .device == "cpu"
@@ -136,8 +136,8 @@ def predict(self, utterances: list[str]) -> npt.NDArray[Any]:
136136 if not hasattr (self , "_model" ) or not hasattr (self , "_tokenizer" ):
137137 msg = "Model is not trained. Call fit() first."
138138 raise RuntimeError (msg )
139-
140- device = torch .device (self .model_config .device if self .model_config .device else ' cpu' )
139+
140+ device = torch .device (self .model_config .device if self .model_config .device else " cpu" )
141141 self ._model = self ._model .to (device )
142142
143143 all_predictions = []
Original file line number Diff line number Diff line change @@ -53,4 +53,4 @@ def test_bert_cache_clearing(dataset):
5353 assert not hasattr (scorer , "_tokenizer" ) or scorer ._tokenizer is None
5454
5555 with pytest .raises (RuntimeError ):
56- scorer .predict (test_data )
56+ scorer .predict (test_data )
You can’t perform that action at this time.
0 commit comments