Skip to content
This repository was archived by the owner on Jan 2, 2021. It is now read-only.

Commit 149ae4e

Browse files
committed
Update training parameters.
1 parent de047d6 commit 149ae4e

File tree

1 file changed

+3
-3
lines changed

1 file changed

+3
-3
lines changed

enhance.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -48,11 +48,11 @@
4848
add_arg('--epoch-size', default=72, type=int, help='Number of batches trained in an epoch.')
4949
add_arg('--save-every', default=10, type=int, help='Save generator after every training epoch.')
5050
add_arg('--batch-shape', default=192, type=int, help='Resolution of images in training batch.')
51-
add_arg('--batch-size', default=10, type=int, help='Number of images per training batch.')
51+
add_arg('--batch-size', default=15, type=int, help='Number of images per training batch.')
5252
add_arg('--buffer-size', default=1500, type=int, help='Total image fragments kept in cache.')
5353
add_arg('--buffer-similar', default=5, type=int, help='Fragments cached for each image loaded.')
54-
add_arg('--learning-rate', default=5E-4, type=float, help='Parameter for the ADAM optimizer.')
55-
add_arg('--learning-period', default=100, type=int, help='How often to decay the learning rate.')
54+
add_arg('--learning-rate', default=1E-4, type=float, help='Parameter for the ADAM optimizer.')
55+
add_arg('--learning-period', default=50, type=int, help='How often to decay the learning rate.')
5656
add_arg('--learning-decay', default=0.5, type=float, help='How much to decay the learning rate.')
5757
add_arg('--generator-upscale', default=2, type=int, help='Steps of 2x up-sampling as post-process.')
5858
add_arg('--generator-downscale',default=0, type=int, help='Steps of 2x down-sampling as preprocess.')

0 commit comments

Comments
 (0)