Skip to content

Commit b0eac19

Browse files
authored
Delete mem cache handler after training is done (#2535)
release mem cache handler after training is done
1 parent 88deb2d commit b0eac19

File tree

3 files changed

+9
-0
lines changed

3 files changed

+9
-0
lines changed

src/otx/algorithms/classification/task.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -80,6 +80,7 @@
8080
from otx.api.utils.dataset_utils import add_saliency_maps_to_dataset_item
8181
from otx.api.utils.labels_utils import get_empty_label
8282
from otx.cli.utils.multi_gpu import is_multigpu_child_process
83+
from otx.core.data.caching.mem_cache_handler import MemCacheHandlerSingleton
8384

8485
logger = get_logger()
8586
RECIPE_TRAIN_TYPE = {
@@ -215,6 +216,8 @@ def train(
215216

216217
results = self._train_model(dataset)
217218

219+
MemCacheHandlerSingleton.delete()
220+
218221
# Check for stop signal when training has stopped. If should_stop is true, training was cancelled and no new
219222
if self._should_stop:
220223
logger.info("Training cancelled.")

src/otx/algorithms/detection/task.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -65,6 +65,7 @@
6565
from otx.api.usecases.tasks.interfaces.export_interface import ExportType
6666
from otx.api.utils.dataset_utils import add_saliency_maps_to_dataset_item
6767
from otx.cli.utils.multi_gpu import is_multigpu_child_process
68+
from otx.core.data.caching.mem_cache_handler import MemCacheHandlerSingleton
6869

6970
logger = get_logger()
7071

@@ -231,6 +232,8 @@ def train(
231232
val_dataset.purpose = DatasetPurpose.INFERENCE
232233
val_preds, val_map = self._infer_model(val_dataset, InferenceParameters(is_evaluation=True))
233234

235+
MemCacheHandlerSingleton.delete()
236+
234237
preds_val_dataset = val_dataset.with_empty_annotations()
235238
if self._hyperparams.postprocessing.result_based_confidence_threshold:
236239
confidence_threshold = 0.0 # Use all predictions to compute best threshold

src/otx/algorithms/segmentation/task.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -70,6 +70,7 @@
7070
create_hard_prediction_from_soft_prediction,
7171
)
7272
from otx.cli.utils.multi_gpu import is_multigpu_child_process
73+
from otx.core.data.caching.mem_cache_handler import MemCacheHandlerSingleton
7374

7475
logger = get_logger()
7576
RECIPE_TRAIN_TYPE = {
@@ -171,6 +172,8 @@ def train(
171172

172173
results = self._train_model(dataset)
173174

175+
MemCacheHandlerSingleton.delete()
176+
174177
# Check for stop signal when training has stopped. If should_stop is true, training was cancelled and no new
175178
if self._should_stop:
176179
logger.info("Training cancelled.")

0 commit comments

Comments
 (0)