File tree Expand file tree Collapse file tree 2 files changed +4
-4
lines changed
pytorch_optimizer/optimizer Expand file tree Collapse file tree 2 files changed +4
-4
lines changed Original file line number Diff line number Diff line change @@ -41,7 +41,7 @@ def __init__(
4141 self .clip_grad_norm = clip_grad_norm
4242 self .clip_grad_value = clip_grad_value
4343
44- self .local_rank : int = int (os .environ .get ('LOCAL_RANK' , 0 ))
44+ self .local_rank : int = int (os .environ .get ('LOCAL_RANK' , '0' ))
4545
4646 self .gather_norm : bool = False
4747 self .grad_norms : List [torch .Tensor ] = []
@@ -256,7 +256,7 @@ def __init__(
256256 self .grad_norms : List [torch .Tensor ] = []
257257 self .clip_coef : Optional [float ] = None
258258
259- self .local_rank : int = int (os .environ .get ('LOCAL_RANK' , 0 ))
259+ self .local_rank : int = int (os .environ .get ('LOCAL_RANK' , '0' ))
260260 self .zero3_enabled : bool = is_deepspeed_zero3_enabled ()
261261
262262 self .grad_func : Callable [[Any ], Any ] = self .fuse_update_zero3 () if self .zero3_enabled else self .fuse_update ()
Original file line number Diff line number Diff line change @@ -73,8 +73,8 @@ def __init__(
7373 adamw_params = self .get_parameters (adamw_params ) if adamw_params is not None else []
7474 params .extend (adamw_params )
7575
76- self .world_size : int = int (os .environ .get ('WORLD_SIZE' , 1 ))
77- self .rank : int = int (os .environ .get ('RANK' , 0 ))
76+ self .world_size : int = int (os .environ .get ('WORLD_SIZE' , '1' ))
77+ self .rank : int = int (os .environ .get ('RANK' , '0' ))
7878
7979 defaults : DEFAULTS = {
8080 'lr' : lr ,
You can’t perform that action at this time.
0 commit comments