Skip to content

Commit 0e26106

Browse files
author
Soobee Lee
authored
Move all hooks in MPA into OTX common mmcv adapter (#1922)
1 parent beb807b commit 0e26106

File tree

67 files changed

+928
-1136
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

67 files changed

+928
-1136
lines changed

docs/source/guide/reference/mpa/modules/hooks.rst

Lines changed: 0 additions & 58 deletions
This file was deleted.

docs/source/guide/reference/mpa/modules/index.rst

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,5 @@ Modules
66

77
models/index
88
datasets
9-
hooks
109
ov/index
1110
utils

otx/algorithms/classification/adapters/mmcls/models/classifiers/sam_classifier.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -275,7 +275,7 @@ def extract_feat(self, img):
275275
if is_mmdeploy_enabled():
276276
from mmdeploy.core import FUNCTION_REWRITER
277277

278-
from otx.mpa.modules.hooks.recording_forward_hooks import ( # pylint: disable=ungrouped-imports
278+
from otx.algorithms.common.adapters.mmcv.hooks.recording_forward_hook import ( # pylint: disable=ungrouped-imports
279279
FeatureVectorHook,
280280
ReciproCAMHook,
281281
)
Lines changed: 55 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
"""Adapters for mmcv support."""
22

3-
# Copyright (C) 2021-2023 Intel Corporation
3+
# Copyright (C) 2022-2023 Intel Corporation
44
#
55
# Licensed under the Apache License, Version 2.0 (the "License");
66
# you may not use this file except in compliance with the License.
@@ -14,40 +14,76 @@
1414
# See the License for the specific language governing permissions
1515
# and limitations under the License.
1616

17-
from .base_hook import (
18-
CancelTrainingHook,
19-
EarlyStoppingHook,
20-
EMAMomentumUpdateHook,
17+
from .adaptive_training_hook import AdaptiveTrainSchedulingHook
18+
from .cancel_hook import CancelInterfaceHook, CancelTrainingHook
19+
from .checkpoint_hook import (
20+
CheckpointHookWithValResults,
2121
EnsureCorrectBestCheckpointHook,
22-
OTXLoggerHook,
23-
OTXProgressHook,
22+
SaveInitialWeightHook,
23+
)
24+
from .composed_dataloaders_hook import ComposedDataLoadersHook
25+
from .custom_model_ema_hook import CustomModelEMAHook, EMAMomentumUpdateHook
26+
from .dual_model_ema_hook import DualModelEMAHook
27+
from .early_stopping_hook import (
28+
EarlyStoppingHook,
29+
LazyEarlyStoppingHook,
2430
ReduceLROnPlateauLrUpdaterHook,
2531
StopLossNanTrainingHook,
26-
TwoCropTransformHook,
2732
)
28-
from .checkpoint_hook import CheckpointHookWithValResults
29-
from .eval_hook import CustomEvalHook
33+
from .eval_hook import CustomEvalHook, DistCustomEvalHook
34+
from .force_train_hook import ForceTrainModeHook
3035
from .fp16_sam_optimizer_hook import Fp16SAMOptimizerHook
3136
from .ib_loss_hook import IBLossHook
37+
from .logger_hook import LoggerReplaceHook, OTXLoggerHook
38+
from .model_ema_v2_hook import ModelEmaV2Hook
3239
from .no_bias_decay_hook import NoBiasDecayHook
40+
from .progress_hook import OTXProgressHook
41+
from .recording_forward_hook import (
42+
ActivationMapHook,
43+
BaseRecordingForwardHook,
44+
EigenCamHook,
45+
FeatureVectorHook,
46+
)
3347
from .sam_optimizer_hook import SAMOptimizerHook
3448
from .semisl_cls_hook import SemiSLClsHook
49+
from .task_adapt_hook import TaskAdaptHook
50+
from .two_crop_transform_hook import TwoCropTransformHook
51+
from .unbiased_teacher_hook import UnbiasedTeacherHook
52+
from .workflow_hook import WorkflowHook
3553

3654
__all__ = [
55+
"AdaptiveTrainSchedulingHook",
56+
"CancelInterfaceHook",
57+
"CancelTrainingHook",
3758
"CheckpointHookWithValResults",
59+
"EnsureCorrectBestCheckpointHook",
60+
"ComposedDataLoadersHook",
3861
"CustomEvalHook",
62+
"DistCustomEvalHook",
63+
"EarlyStoppingHook",
64+
"LazyEarlyStoppingHook",
65+
"ReduceLROnPlateauLrUpdaterHook",
66+
"EMAMomentumUpdateHook",
67+
"ForceTrainModeHook",
68+
"Fp16SAMOptimizerHook",
69+
"StopLossNanTrainingHook",
3970
"IBLossHook",
71+
"OTXLoggerHook",
72+
"LoggerReplaceHook",
73+
"CustomModelEMAHook",
74+
"DualModelEMAHook",
75+
"ModelEmaV2Hook",
4076
"NoBiasDecayHook",
77+
"OTXProgressHook",
78+
"BaseRecordingForwardHook",
79+
"EigenCamHook",
80+
"ActivationMapHook",
81+
"FeatureVectorHook",
4182
"SAMOptimizerHook",
42-
"Fp16SAMOptimizerHook",
83+
"SaveInitialWeightHook",
4384
"SemiSLClsHook",
44-
"CancelTrainingHook",
45-
"OTXLoggerHook",
46-
"OTXProgressHook",
47-
"EarlyStoppingHook",
48-
"ReduceLROnPlateauLrUpdaterHook",
49-
"EnsureCorrectBestCheckpointHook",
50-
"StopLossNanTrainingHook",
51-
"EMAMomentumUpdateHook",
85+
"TaskAdaptHook",
5286
"TwoCropTransformHook",
87+
"UnbiasedTeacherHook",
88+
"WorkflowHook",
5389
]

otx/mpa/modules/hooks/adaptive_training_hooks.py renamed to otx/algorithms/common/adapters/mmcv/hooks/adaptive_training_hook.py

Lines changed: 12 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
1-
# Copyright (C) 2022 Intel Corporation
1+
"""Adaptive training schedule hook."""
2+
# Copyright (C) 2023 Intel Corporation
23
# SPDX-License-Identifier: Apache-2.0
34
#
45

@@ -8,15 +9,19 @@
89
from mmcv.runner.hooks.checkpoint import CheckpointHook
910
from mmcv.runner.hooks.evaluation import EvalHook
1011

11-
from otx.mpa.modules.hooks.early_stopping_hook import EarlyStoppingHook
12+
from otx.algorithms.common.adapters.mmcv.hooks.early_stopping_hook import (
13+
EarlyStoppingHook,
14+
)
1215
from otx.mpa.utils.logger import get_logger
1316

1417
logger = get_logger()
1518

19+
# pylint: disable=too-many-arguments, too-many-instance-attributes
20+
1621

1722
@HOOKS.register_module()
1823
class AdaptiveTrainSchedulingHook(Hook):
19-
"""Adaptive Training Scheduling Hook
24+
"""Adaptive Training Scheduling Hook.
2025
2126
Depending on the size of iteration per epoch, adaptively update the validation interval and related values.
2227
@@ -58,6 +63,7 @@ def __init__(
5863
self._original_interval = None
5964

6065
def before_run(self, runner):
66+
"""Before run."""
6167
if self.enable_eval_before_run:
6268
hook = self.get_evalhook(runner)
6369
if hook is None:
@@ -68,6 +74,7 @@ def before_run(self, runner):
6874
hook.start = 0
6975

7076
def before_train_iter(self, runner):
77+
"""Before train iter."""
7178
if self.enable_eval_before_run and self._original_interval is not None:
7279
hook = self.get_evalhook(runner)
7380
hook.interval = self._original_interval
@@ -110,10 +117,12 @@ def before_train_iter(self, runner):
110117
self._initialized = True
111118

112119
def get_adaptive_interval(self, iter_per_epoch):
120+
"""Get adaptive interval."""
113121
adaptive_interval = max(round(math.exp(self.decay * iter_per_epoch) * self.max_interval), 1)
114122
return adaptive_interval
115123

116124
def get_evalhook(self, runner):
125+
"""Get evaluation hook."""
117126
target_hook = None
118127
for hook in runner.hooks:
119128
if isinstance(hook, EvalHook):

0 commit comments

Comments
 (0)