@@ -118,16 +118,16 @@ def forward(self, x: torch.Tensor) -> torch.Tensor:
118118 def extra_repr (self ) -> str :
119119 s = "{in_channels}, {out_channels}, kernel_size={kernel_size}, stride={stride}"
120120 if self .padding != (0 ,) * len (self .padding ):
121- s += ", padding={padding}"
121+ s += f ", padding={ self . padding } "
122122 if self .dilation != (1 ,) * len (self .dilation ):
123- s += ", dilation={dilation}"
123+ s += f ", dilation={ self . dilation } "
124124 if self .output_padding != (0 ,) * len (self .output_padding ):
125- s += ", output_padding={output_padding}"
125+ s += f ", output_padding={ self . output_padding } "
126126 if self .groups != 1 :
127- s += ", groups={groups}"
127+ s += f ", groups={ self . groups } "
128128 if not self .has_bias :
129129 s += ", bias=False"
130130 if self .padding_mode != "zeros" :
131- s += ", padding_mode={padding_mode}"
132- s += ", flattened_param_shape={tuple( self.weight.shape) }"
133- return s . format ( ** self . __dict__ )
131+ s += f ", padding_mode={ self . padding_mode } "
132+ s += f ", flattened_param_shape={ self .weight .shape } "
133+ return s
0 commit comments