@@ -25,6 +25,7 @@ def __init__(self, sess, input_height=108, input_width=108, crop=True,
2525 batch_size = 64 , sample_num = 64 , output_height = 64 , output_width = 64 ,
2626 y_dim = None , z_dim = 100 , gf_dim = 64 , df_dim = 64 ,
2727 gfc_dim = 1024 , dfc_dim = 1024 , c_dim = 3 , dataset_name = 'default' ,
28+ max_to_keep = 1 ,
2829 input_fname_pattern = '*.jpg' , checkpoint_dir = None , sample_dir = None , data_dir = './data' ):
2930 """
3031
@@ -77,6 +78,7 @@ def __init__(self, sess, input_height=108, input_width=108, crop=True,
7778 self .input_fname_pattern = input_fname_pattern
7879 self .checkpoint_dir = checkpoint_dir
7980 self .data_dir = data_dir
81+ self .max_to_keep = max_to_keep
8082
8183 if self .dataset_name == 'mnist' :
8284 self .data_X , self .data_y = self .load_mnist ()
@@ -155,7 +157,7 @@ def sigmoid_cross_entropy_with_logits(x, y):
155157 self .d_vars = [var for var in t_vars if 'd_' in var .name ]
156158 self .g_vars = [var for var in t_vars if 'g_' in var .name ]
157159
158- self .saver = tf .train .Saver ()
160+ self .saver = tf .train .Saver (max_to_keep = self . max_to_keep )
159161
160162 def train (self , config ):
161163 d_optim = tf .train .AdamOptimizer (config .learning_rate , beta1 = config .beta1 ) \
0 commit comments