@@ -50,7 +50,7 @@ def decision_accuracy(y_true: LABELS_VALUE_TYPE, y_pred: LABELS_VALUE_TYPE) -> f
5050 :return: Score of the decision accuracy
5151 """
5252 y_true_ , y_pred_ = transform (y_true , y_pred )
53- return np .mean (y_true_ == y_pred_ ) # type: ignore[no-any-return]
53+ return float ( np .mean (y_true_ == y_pred_ ))
5454
5555
5656def _decision_roc_auc_multiclass (y_true : LABELS_VALUE_TYPE , y_pred : LABELS_VALUE_TYPE ) -> float :
@@ -83,7 +83,7 @@ def _decision_roc_auc_multiclass(y_true: LABELS_VALUE_TYPE, y_pred: LABELS_VALUE
8383 binarized_pred = (y_pred_ == k ).astype (int )
8484 roc_auc_scores .append (roc_auc_score (binarized_true , binarized_pred ))
8585
86- return np .mean (roc_auc_scores ) # type: ignore[return-value]
86+ return float ( np .mean (roc_auc_scores ))
8787
8888
8989def _decision_roc_auc_multilabel (y_true : LABELS_VALUE_TYPE , y_pred : LABELS_VALUE_TYPE ) -> float :
@@ -98,7 +98,7 @@ def _decision_roc_auc_multilabel(y_true: LABELS_VALUE_TYPE, y_pred: LABELS_VALUE
9898 :param y_pred: Predicted values of labels
9999 :return: Score of the decision accuracy
100100 """
101- return roc_auc_score (y_true , y_pred , average = "macro" ) # type: ignore[no-any-return]
101+ return float ( roc_auc_score (y_true , y_pred , average = "macro" ))
102102
103103
104104def decision_roc_auc (y_true : LABELS_VALUE_TYPE , y_pred : LABELS_VALUE_TYPE ) -> float :
@@ -135,7 +135,7 @@ def decision_precision(y_true: LABELS_VALUE_TYPE, y_pred: LABELS_VALUE_TYPE) ->
135135 :param y_pred: Predicted values of labels
136136 :return: Score of the decision precision
137137 """
138- return precision_score (y_true , y_pred , average = "macro" ) # type: ignore[no-any-return]
138+ return float ( precision_score (y_true , y_pred , average = "macro" ))
139139
140140
141141def decision_recall (y_true : LABELS_VALUE_TYPE , y_pred : LABELS_VALUE_TYPE ) -> float :
@@ -150,7 +150,7 @@ def decision_recall(y_true: LABELS_VALUE_TYPE, y_pred: LABELS_VALUE_TYPE) -> flo
150150 :param y_pred: Predicted values of labels
151151 :return: Score of the decision recall
152152 """
153- return recall_score (y_true , y_pred , average = "macro" ) # type: ignore[no-any-return]
153+ return float ( recall_score (y_true , y_pred , average = "macro" ))
154154
155155
156156def decision_f1 (y_true : LABELS_VALUE_TYPE , y_pred : LABELS_VALUE_TYPE ) -> float :
@@ -165,4 +165,4 @@ def decision_f1(y_true: LABELS_VALUE_TYPE, y_pred: LABELS_VALUE_TYPE) -> float:
165165 :param y_pred: Predicted values of labels
166166 :return: Score of the decision accuracy
167167 """
168- return f1_score (y_true , y_pred , average = "macro" ) # type: ignore[no-any-return]
168+ return float ( f1_score (y_true , y_pred , average = "macro" ))
0 commit comments