File tree Expand file tree Collapse file tree 2 files changed +10
-8
lines changed Expand file tree Collapse file tree 2 files changed +10
-8
lines changed Original file line number Diff line number Diff line change @@ -233,14 +233,14 @@ def load_model_dict_into_meta(
233233 empty_state_dict = model .state_dict ()
234234 expanded_device_map = {}
235235
236- if device_map is not None :
237- for param_name , param in state_dict .items ():
238- if param_name not in empty_state_dict :
239- continue
240- param_device = _determine_param_device (param_name , device_map )
241- expanded_device_map [param_name ] = param_device
242- print (expanded_device_map )
243- _caching_allocator_warmup (model , expanded_device_map , dtype )
236+ # if device_map is not None:
237+ # for param_name, param in state_dict.items():
238+ # if param_name not in empty_state_dict:
239+ # continue
240+ # param_device = _determine_param_device(param_name, device_map)
241+ # expanded_device_map[param_name] = param_device
242+ # print(expanded_device_map)
243+ # _caching_allocator_warmup(model, expanded_device_map, dtype)
244244
245245 for param_name , param in state_dict .items ():
246246 if param_name not in empty_state_dict :
Original file line number Diff line number Diff line change @@ -1557,6 +1557,8 @@ def _find_mismatched_keys(
15571557
15581558 error_msgs += _load_state_dict_into_model (model , state_dict , assign_to_params_buffers )
15591559
1560+ torch .cuda .synchronize ()
1561+
15601562 if offload_index is not None and len (offload_index ) > 0 :
15611563 save_offload_index (offload_index , offload_folder )
15621564 offload_index = None
You can’t perform that action at this time.
0 commit comments