@@ -188,27 +188,30 @@ def infer(self, dataset: DatasetEntity, inference_parameters: InferenceParameter
188188 label = self .anomalous_label if image_result .pred_score >= 0.5 else self .normal_label
189189 elif self .task_type == TaskType .ANOMALY_SEGMENTATION :
190190 annotations = create_annotation_from_segmentation_map (
191- pred_mask , image_result .anomaly_map .squeeze (), {0 : self .normal_label , 1 : self .anomalous_label }
191+ pred_mask ,
192+ image_result .anomaly_map .squeeze () / 255.0 ,
193+ {0 : self .normal_label , 1 : self .anomalous_label },
192194 )
193195 dataset_item .append_annotations (annotations )
194196 label = self .normal_label if len (annotations ) == 0 else self .anomalous_label
195197 elif self .task_type == TaskType .ANOMALY_DETECTION :
196198 annotations = create_detection_annotation_from_anomaly_heatmap (
197- pred_mask , image_result .anomaly_map .squeeze (), {0 : self .normal_label , 1 : self .anomalous_label }
199+ pred_mask ,
200+ image_result .anomaly_map .squeeze () / 255.0 ,
201+ {0 : self .normal_label , 1 : self .anomalous_label },
198202 )
199203 dataset_item .append_annotations (annotations )
200204 label = self .normal_label if len (annotations ) == 0 else self .anomalous_label
201205 else :
202206 raise ValueError (f"Unknown task type: { self .task_type } " )
203207
204208 dataset_item .append_labels ([ScoredLabel (label = label , probability = float (probability ))])
205- anomaly_map = (image_result .anomaly_map * 255 ).astype (np .uint8 )
206209 heatmap_media = ResultMediaEntity (
207210 name = "Anomaly Map" ,
208211 type = "anomaly_map" ,
209212 label = label ,
210213 annotation_scene = dataset_item .annotation_scene ,
211- numpy = anomaly_map ,
214+ numpy = image_result . anomaly_map ,
212215 )
213216 dataset_item .append_metadata_item (heatmap_media )
214217 update_progress_callback (int ((idx + 1 ) / len (dataset ) * 100 ))
0 commit comments