@@ -595,10 +595,11 @@ struct server_task_result_cmpl_final : server_task_result {
595595        std::time_t  t = std::time (0 );
596596
597597        json res = json {
598-             {" choices"  , json::array ({choice})},
599-             {" created"  , t},
600-             {" model"  , oaicompat_model},
601-             {" object"  , " chat.completion"  },
598+             {" choices"  ,            json::array ({choice})},
599+             {" created"  ,            t},
600+             {" model"  ,              oaicompat_model},
601+             {" system_fingerprint"  , build_info},
602+             {" object"  ,             " chat.completion"  },
602603            {" usage"  , json {
603604                {" completion_tokens"  , n_decoded},
604605                {" prompt_tokens"  ,     n_prompt_tokens},
@@ -632,11 +633,12 @@ struct server_task_result_cmpl_final : server_task_result {
632633        };
633634
634635        json ret = json {
635-             {" choices"  , json::array ({choice})},
636-             {" created"  , t},
637-             {" id"  ,      oaicompat_cmpl_id},
638-             {" model"  ,   oaicompat_model},
639-             {" object"  ,  " chat.completion.chunk"  },
636+             {" choices"  ,            json::array ({choice})},
637+             {" created"  ,            t},
638+             {" id"  ,                 oaicompat_cmpl_id},
639+             {" model"  ,              oaicompat_model},
640+             {" system_fingerprint"  , build_info},
641+             {" object"  ,             " chat.completion.chunk"  },
640642            {" usage"  , json {
641643                {" completion_tokens"  , n_decoded},
642644                {" prompt_tokens"  ,     n_prompt_tokens},
@@ -761,11 +763,12 @@ struct server_task_result_cmpl_partial : server_task_result {
761763        }
762764
763765        json ret = json {
764-             {" choices"  , choices},
765-             {" created"  , t},
766-             {" id"  ,      oaicompat_cmpl_id},
767-             {" model"  ,   oaicompat_model},
768-             {" object"  ,  " chat.completion.chunk"  }
766+             {" choices"  ,            choices},
767+             {" created"  ,            t},
768+             {" id"  ,                 oaicompat_cmpl_id},
769+             {" model"  ,              oaicompat_model},
770+             {" system_fingerprint"  , build_info},
771+             {" object"  ,             " chat.completion.chunk"  }
769772        };
770773
771774        if  (timings.prompt_n  >= 0 ) {
@@ -3476,6 +3479,7 @@ int main(int argc, char ** argv) {
34763479            { " total_slots"  ,                 ctx_server.params_base .n_parallel  },
34773480            { " model_path"  ,                  ctx_server.params_base .model  },
34783481            { " chat_template"  ,               llama_get_chat_template (ctx_server.model ) },
3482+             { " build_info"  ,                  build_info },
34793483        };
34803484
34813485        res_ok (res, data);
0 commit comments