Skip to content

Commit 02320fa

Browse files
committed
style: fix PLW1508
1 parent 3f87063 commit 02320fa

File tree

2 files changed

+4
-4
lines changed

2 files changed

+4
-4
lines changed

pytorch_optimizer/optimizer/lomo.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@ def __init__(
4141
self.clip_grad_norm = clip_grad_norm
4242
self.clip_grad_value = clip_grad_value
4343

44-
self.local_rank: int = int(os.environ.get('LOCAL_RANK', 0))
44+
self.local_rank: int = int(os.environ.get('LOCAL_RANK', '0'))
4545

4646
self.gather_norm: bool = False
4747
self.grad_norms: List[torch.Tensor] = []
@@ -256,7 +256,7 @@ def __init__(
256256
self.grad_norms: List[torch.Tensor] = []
257257
self.clip_coef: Optional[float] = None
258258

259-
self.local_rank: int = int(os.environ.get('LOCAL_RANK', 0))
259+
self.local_rank: int = int(os.environ.get('LOCAL_RANK', '0'))
260260
self.zero3_enabled: bool = is_deepspeed_zero3_enabled()
261261

262262
self.grad_func: Callable[[Any], Any] = self.fuse_update_zero3() if self.zero3_enabled else self.fuse_update()

pytorch_optimizer/optimizer/muon.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -73,8 +73,8 @@ def __init__(
7373
adamw_params = self.get_parameters(adamw_params) if adamw_params is not None else []
7474
params.extend(adamw_params)
7575

76-
self.world_size: int = int(os.environ.get('WORLD_SIZE', 1))
77-
self.rank: int = int(os.environ.get('RANK', 0))
76+
self.world_size: int = int(os.environ.get('WORLD_SIZE', '1'))
77+
self.rank: int = int(os.environ.get('RANK', '0'))
7878

7979
defaults: DEFAULTS = {
8080
'lr': lr,

0 commit comments

Comments
 (0)