Skip to content

Commit 8925da2

Browse files
committed
test passed
1 parent 8f1adf1 commit 8925da2

File tree

2 files changed

+21
-61
lines changed

2 files changed

+21
-61
lines changed

common/arg.cpp

Lines changed: 11 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -374,6 +374,8 @@ static bool common_download_file_single(const std::string & url, const std::stri
374374

375375
// display download progress
376376
curl_easy_setopt(curl.get(), CURLOPT_NOPROGRESS, 0L);
377+
curl_easy_setopt(curl.get(), CURLOPT_USERAGENT, "llama.cpp/1.0");
378+
377379

378380
// helper function to hide password in URL
379381
auto llama_download_hide_password_in_url = [](const std::string & url) -> std::string {
@@ -544,7 +546,15 @@ static struct common_hf_file_res common_get_hf_file(const std::string & hf_repo_
544546
curl_ptr curl(curl_easy_init(), &curl_easy_cleanup);
545547
curl_slist_ptr http_headers;
546548
std::string res_str;
547-
std::string url = "https://huggingface.co/v2/" + hf_repo + "/manifests/" + tag;
549+
550+
std::string hf_endpoint = "https://huggingface.co/";
551+
const char * hf_endpoint_env = getenv("HF_ENDPOINT");
552+
if (hf_endpoint_env) {
553+
hf_endpoint = hf_endpoint_env;
554+
if (hf_endpoint.back() != '/') hf_endpoint += '/';
555+
}
556+
557+
std::string url = hf_endpoint + "v2/" + hf_repo + "/manifests/" + tag;
548558
curl_easy_setopt(curl.get(), CURLOPT_URL, url.c_str());
549559
curl_easy_setopt(curl.get(), CURLOPT_NOPROGRESS, 1L);
550560
typedef size_t(*CURLOPT_WRITEFUNCTION_PTR)(void * ptr, size_t size, size_t nmemb, void * data);

examples/run/run.cpp

Lines changed: 10 additions & 60 deletions
Original file line numberDiff line numberDiff line change
@@ -697,8 +697,15 @@ class LlamaData {
697697
std::vector<std::string> headers = { "User-Agent: llama-cpp", "Accept: application/json" };
698698
std::string url;
699699

700+
std::string hf_endpoint = "https://huggingface.co/";
701+
const char * hf_endpoint_env = getenv("HF_ENDPOINT");
702+
if (hf_endpoint_env) {
703+
hf_endpoint = hf_endpoint_env;
704+
if (hf_endpoint.back() != '/') hf_endpoint += '/';
705+
}
706+
700707
if (pos == std::string::npos) {
701-
auto [model_name, manifest_url] = extract_model_and_tag(model, "https://huggingface.co/v2/");
708+
auto [model_name, manifest_url] = extract_model_and_tag(model, hf_endpoint + "v2/");
702709
hfr = model_name;
703710

704711
nlohmann::json manifest;
@@ -713,59 +720,8 @@ class LlamaData {
713720
hff = model.substr(pos + 1);
714721
}
715722

716-
url = "https://huggingface.co/" + hfr + "/resolve/main/" + hff;
717-
718-
return download(url, bn, true, headers);
719-
}
720-
721-
int modelscope_dl(std::string & model, const std::string & bn) {
722-
//Download from ModelScope model repository, quant is optional and case-insensitive.
723-
//default to the input tag or Q4_K_M, will fall back to first GGUF file in the repo if quant is not specified and tag is not found.
724-
size_t pos = model.find('/');
725-
pos = model.find('/', pos + 1);
726-
std::string msr;
727-
std::string msf;
728-
std::vector<std::string> headers = { "User-Agent: llama-cpp", "Accept: application/json"};
729-
std::string url;
730-
auto endpoint = MODELSCOPE_DOMAIN_DEFINITION;
731-
732-
if (pos == std::string::npos) {
733-
auto [model_name, tag] = extract_model_and_tag(model, "");
734-
msr = model_name;
735-
rm_until_substring(tag, "/manifests/");
736-
std::transform(tag.begin(), tag.end(), std::begin(tag), ::tolower);
737-
if (tag == "latest" || tag.empty()) {
738-
//ModelScope does not support latest tag
739-
tag = "q4_k_m";
740-
}
741-
std::string manifest_str;
742-
url = endpoint + "/api/v1/models/" + msr + "/repo/files?Revision=master&Recursive=True";
743-
if (int ret = download(url, "", false, headers, &manifest_str)) {
744-
return ret;
745-
}
746-
auto all_files = nlohmann::json::parse(manifest_str)["Data"]["Files"];
747-
748-
std::vector<std::string> all_available_files;
749-
for (const auto & _file : all_files) {
750-
auto file = _file["Path"].get<std::string>();
751-
std::transform(file.begin(), file.end(), std::begin(file), ::tolower);
752-
if (!string_ends_with(file, ".gguf")) {
753-
continue;
754-
}
755-
if (file.find(tag) != std::string::npos) {
756-
msf = file;
757-
}
758-
all_available_files.push_back(file);
759-
}
760-
if (msf.empty()) {
761-
msf = all_available_files[0];
762-
}
723+
url = hf_endpoint + hfr + "/resolve/main/" + hff;
763724

764-
} else {
765-
msr = model.substr(0, pos);
766-
msf = model.substr(pos + 1);
767-
}
768-
url = endpoint + "/models/" + msr + "/resolve/master/" + msf;
769725
return download(url, bn, true, headers);
770726
}
771727

@@ -886,12 +842,6 @@ class LlamaData {
886842
rm_until_substring(model_, "hf.co/");
887843
rm_until_substring(model_, "://");
888844
ret = huggingface_dl(model_, bn);
889-
} else if (string_starts_with(model_, "ms://") || string_starts_with(model_, "modelscope://") ||
890-
model_.find("modelscope") != std::string::npos || LLAMACPP_USE_MODELSCOPE_DEFINITION) {
891-
rm_until_substring(model_, "modelscope.cn/");
892-
rm_until_substring(model_, "modelscope.ai/");
893-
rm_until_substring(model_, "://");
894-
ret = modelscope_dl(model_, bn);
895845
} else if ((string_starts_with(model_, "https://") || string_starts_with(model_, "http://")) &&
896846
!string_starts_with(model_, "https://ollama.com/library/")) {
897847
ret = download(model_, bn, true);
@@ -1299,4 +1249,4 @@ int main(int argc, const char ** argv) {
12991249
}
13001250

13011251
return 0;
1302-
}
1252+
}

0 commit comments

Comments
 (0)