File tree Expand file tree Collapse file tree 1 file changed +7
-4
lines changed Expand file tree Collapse file tree 1 file changed +7
-4
lines changed Original file line number Diff line number Diff line change @@ -173,7 +173,9 @@ def load_model_weights(model, checkpoint_info):
173
173
print (f"Global Step: { pl_sd ['global_step' ]} " )
174
174
175
175
sd = get_state_dict_from_checkpoint (pl_sd )
176
- missing , extra = model .load_state_dict (sd , strict = False )
176
+ del pl_sd
177
+ model .load_state_dict (sd , strict = False )
178
+ del sd
177
179
178
180
if shared .cmd_opts .opt_channelslast :
179
181
model .to (memory_format = torch .channels_last )
@@ -197,9 +199,10 @@ def load_model_weights(model, checkpoint_info):
197
199
198
200
model .first_stage_model .to (devices .dtype_vae )
199
201
200
- checkpoints_loaded [checkpoint_info ] = model .state_dict ().copy ()
201
- while len (checkpoints_loaded ) > shared .opts .sd_checkpoint_cache :
202
- checkpoints_loaded .popitem (last = False ) # LRU
202
+ if shared .opts .sd_checkpoint_cache > 0 :
203
+ checkpoints_loaded [checkpoint_info ] = model .state_dict ().copy ()
204
+ while len (checkpoints_loaded ) > shared .opts .sd_checkpoint_cache :
205
+ checkpoints_loaded .popitem (last = False ) # LRU
203
206
else :
204
207
print (f"Loading weights [{ sd_model_hash } ] from cache" )
205
208
checkpoints_loaded .move_to_end (checkpoint_info )
You can’t perform that action at this time.
0 commit comments