Skip to content

Commit daca23c

Browse files
committed
refactor: remove unused entropy_parameters (usually identity)
1 parent fda2fd6 commit daca23c

File tree

1 file changed

+0
-6
lines changed

1 file changed

+0
-6
lines changed

compressai/latent_codecs/checkerboard.py

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -217,10 +217,6 @@ def _forward_twopass_step(
217217
# Save params for current step. This is later used for entropy estimation.
218218
self._copy(params, params_i, step)
219219

220-
# Apply latent_codec's "entropy_parameters()", if it exists. Usually identity.
221-
func = getattr(self.latent_codec["y"], "entropy_parameters", lambda x: x)
222-
params_i = func(params_i)
223-
224220
# Keep only elements needed for current step.
225221
# It's not necessary to mask the rest out just yet, but it doesn't hurt.
226222
params_i = self._keep_only(params_i, step)
@@ -243,8 +239,6 @@ def _forward_twopass_faster(self, y: Tensor, side_params: Tensor) -> Dict[str, A
243239
"""
244240
y_ctx = self._y_ctx_zero(y)
245241
params = self.entropy_parameters(self.merge(y_ctx, side_params))
246-
func = getattr(self.latent_codec["y"], "entropy_parameters", lambda x: x)
247-
params = func(params)
248242
params = self._keep_only(params, "anchor") # Probably unnecessary.
249243
_, means_hat = self.latent_codec["y"]._chunk(params)
250244
y_hat_anchors = quantize_ste(y - means_hat) + means_hat

0 commit comments

Comments
 (0)