File tree Expand file tree Collapse file tree 3 files changed +9
-0
lines changed Expand file tree Collapse file tree 3 files changed +9
-0
lines changed Original file line number Diff line number Diff line change @@ -1557,6 +1557,8 @@ int main(int argc, char ** argv) {
15571557 return 1 ;
15581558 }
15591559 prev_inst = &inst;
1560+ } else {
1561+ llama_model_reset_time (lmodel);
15601562 }
15611563
15621564 llama_context * ctx = llama_new_context_with_model (lmodel, inst.to_llama_cparams ());
Original file line number Diff line number Diff line change @@ -414,6 +414,8 @@ extern "C" {
414414 const char * path_model,
415415 struct llama_model_params params);
416416
417+ LLAMA_API void llama_model_reset_time (struct llama_model * model);
418+
417419 LLAMA_API void llama_free_model (struct llama_model * model);
418420
419421 // TODO: rename to llama_init_from_model
Original file line number Diff line number Diff line change @@ -8809,6 +8809,11 @@ static bool llm_load_tensors(
88098809 return true;
88108810}
88118811
8812+ void llama_model_reset_time(llama_model * model) {
8813+ model->t_start_us = ggml_time_us();
8814+ model->t_load_us = ggml_time_us() - model->t_start_us;
8815+ }
8816+
88128817// Returns 0 on success, -1 on error, and -2 on cancellation via llama_progress_callback
88138818static int llama_model_load(const std::string & fname, llama_model & model, llama_model_params & params) {
88148819 model.t_start_us = ggml_time_us();
You can’t perform that action at this time.
0 commit comments