Skip to content

Commit c0ca809

Browse files
committed
refactor: gc to gradient_centralization
1 parent 524cb00 commit c0ca809

File tree

9 files changed

+8
-8
lines changed

9 files changed

+8
-8
lines changed

pytorch_optimizer/optimizer/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@
4949
from pytorch_optimizer.optimizer.fromage import Fromage
5050
from pytorch_optimizer.optimizer.ftrl import FTRL
5151
from pytorch_optimizer.optimizer.galore import GaLore
52-
from pytorch_optimizer.optimizer.gc import centralize_gradient
52+
from pytorch_optimizer.optimizer.gradient_centralization import centralize_gradient
5353
from pytorch_optimizer.optimizer.grams import Grams
5454
from pytorch_optimizer.optimizer.gravity import Gravity
5555
from pytorch_optimizer.optimizer.grokfast import GrokFastAdamW

pytorch_optimizer/optimizer/adai.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
from pytorch_optimizer.base.exception import NoSparseGradientError, ZeroParameterSizeError
66
from pytorch_optimizer.base.optimizer import BaseOptimizer
77
from pytorch_optimizer.base.type import BETAS, CLOSURE, DEFAULTS, LOSS, PARAMETERS
8-
from pytorch_optimizer.optimizer.gc import centralize_gradient
8+
from pytorch_optimizer.optimizer.gradient_centralization import centralize_gradient
99

1010

1111
class Adai(BaseOptimizer):

pytorch_optimizer/optimizer/adamp.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
from pytorch_optimizer.base.exception import NoSparseGradientError
66
from pytorch_optimizer.base.optimizer import BaseOptimizer
77
from pytorch_optimizer.base.type import BETAS, CLOSURE, DEFAULTS, LOSS, PARAMETERS
8-
from pytorch_optimizer.optimizer.gc import centralize_gradient
8+
from pytorch_optimizer.optimizer.gradient_centralization import centralize_gradient
99
from pytorch_optimizer.optimizer.utils import projection
1010

1111

pytorch_optimizer/optimizer/adan.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
from pytorch_optimizer.base.exception import NoSparseGradientError
77
from pytorch_optimizer.base.optimizer import BaseOptimizer
88
from pytorch_optimizer.base.type import BETAS, CLOSURE, DEFAULTS, LOSS, PARAMETERS
9-
from pytorch_optimizer.optimizer.gc import centralize_gradient
9+
from pytorch_optimizer.optimizer.gradient_centralization import centralize_gradient
1010
from pytorch_optimizer.optimizer.utils import get_global_gradient_norm
1111

1212

pytorch_optimizer/optimizer/lion.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
from pytorch_optimizer.base.exception import NoSparseGradientError
44
from pytorch_optimizer.base.optimizer import BaseOptimizer
55
from pytorch_optimizer.base.type import BETAS, CLOSURE, DEFAULTS, LOSS, PARAMETERS
6-
from pytorch_optimizer.optimizer.gc import centralize_gradient
6+
from pytorch_optimizer.optimizer.gradient_centralization import centralize_gradient
77

88

99
class Lion(BaseOptimizer):

pytorch_optimizer/optimizer/ranger.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
from pytorch_optimizer.base.exception import NoSparseGradientError
44
from pytorch_optimizer.base.optimizer import BaseOptimizer
55
from pytorch_optimizer.base.type import BETAS, CLOSURE, DEFAULTS, LOSS, PARAMETERS
6-
from pytorch_optimizer.optimizer.gc import centralize_gradient
6+
from pytorch_optimizer.optimizer.gradient_centralization import centralize_gradient
77

88

99
class Ranger(BaseOptimizer):

pytorch_optimizer/optimizer/ranger21.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
from pytorch_optimizer.base.optimizer import BaseOptimizer
99
from pytorch_optimizer.base.type import BETAS, CLOSURE, DEFAULTS, LOSS, PARAMETERS
1010
from pytorch_optimizer.optimizer.agc import agc
11-
from pytorch_optimizer.optimizer.gc import centralize_gradient
11+
from pytorch_optimizer.optimizer.gradient_centralization import centralize_gradient
1212
from pytorch_optimizer.optimizer.utils import normalize_gradient, unit_norm
1313

1414

pytorch_optimizer/optimizer/sam.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@
1111
from pytorch_optimizer.base.exception import NoClosureError
1212
from pytorch_optimizer.base.optimizer import BaseOptimizer
1313
from pytorch_optimizer.base.type import BETAS, CLOSURE, DEFAULTS, OPTIMIZER, PARAMETERS
14-
from pytorch_optimizer.optimizer.gc import centralize_gradient
14+
from pytorch_optimizer.optimizer.gradient_centralization import centralize_gradient
1515
from pytorch_optimizer.optimizer.utils import disable_running_stats, enable_running_stats
1616

1717

0 commit comments

Comments
 (0)