We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 47b8c10 commit fd6dca0Copy full SHA for fd6dca0
src/aging_gan/train.py
@@ -138,9 +138,6 @@ def initialize_optimizers(
138
cfg, G, F, DX, DY
139
) -> tuple[optim.Optimizer, optim.Optimizer, optim.Optimizer, optim.Optimizer]:
140
"""Create Adam optimizers for all models."""
141
- # track all generator params (even frozen encoder params during initial training).
142
- # This would allow us to transition easily to the full fine-tuning later on by simply toggling requires_grad=True
143
- # since the optimizers already track all the parameters from the start.
144
opt_G = optim.Adam(
145
G.parameters(),
146
lr=cfg.gen_lr,
0 commit comments