Skip to content

Commit e6ba090

Browse files
committed
fix tricks Conv1dFlatWeights
Signed-off-by: Hao Wu <[email protected]>
1 parent 1cbdcbb commit e6ba090

File tree

1 file changed

+7
-7
lines changed

1 file changed

+7
-7
lines changed

emerging_optimizers/utils/modules.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -118,16 +118,16 @@ def forward(self, x: torch.Tensor) -> torch.Tensor:
118118
def extra_repr(self) -> str:
119119
s = "{in_channels}, {out_channels}, kernel_size={kernel_size}, stride={stride}"
120120
if self.padding != (0,) * len(self.padding):
121-
s += ", padding={padding}"
121+
s += f", padding={self.padding}"
122122
if self.dilation != (1,) * len(self.dilation):
123-
s += ", dilation={dilation}"
123+
s += f", dilation={self.dilation}"
124124
if self.output_padding != (0,) * len(self.output_padding):
125-
s += ", output_padding={output_padding}"
125+
s += f", output_padding={self.output_padding}"
126126
if self.groups != 1:
127-
s += ", groups={groups}"
127+
s += f", groups={self.groups}"
128128
if not self.has_bias:
129129
s += ", bias=False"
130130
if self.padding_mode != "zeros":
131-
s += ", padding_mode={padding_mode}"
132-
s += ", flattened_param_shape={tuple(self.weight.shape)}"
133-
return s.format(**self.__dict__)
131+
s += f", padding_mode={self.padding_mode}"
132+
s += f", flattened_param_shape={self.weight.shape}"
133+
return s

0 commit comments

Comments
 (0)