Skip to content

Commit 1e85c50

Browse files
remove duplicate kl_loss definition in Conv1dReparameterization layer
Signed-off-by: Ranganath Krishnan <[email protected]>
1 parent da68be8 commit 1e85c50

File tree

1 file changed

+0
-9
lines changed

1 file changed

+0
-9
lines changed

bayesian_torch/layers/variational_layers/conv_variational.py

Lines changed: 0 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -169,15 +169,6 @@ def kl_loss(self):
169169

170170
return kl
171171

172-
def kl_loss(self):
173-
sigma_weight = torch.log1p(torch.exp(self.rho_kernel))
174-
kl = self.kl_div(self.mu_kernel, sigma_weight, self.prior_weight_mu, self.prior_weight_sigma)
175-
if self.bias:
176-
sigma_bias = torch.log1p(torch.exp(self.rho_bias))
177-
kl += self.kl_div(self.mu_bias, sigma_bias, self.prior_bias_mu, self.prior_bias_sigma)
178-
179-
return kl
180-
181172
def forward(self, input, return_kl=True):
182173
if self.dnn_to_bnn_flag:
183174
return_kl = False

0 commit comments

Comments
 (0)