@@ -1200,13 +1200,6 @@ static std::filesystem::path create_credential_path() {
12001200 const char * home_dir = nullptr ;
12011201#ifdef _WIN32
12021202 home_dir = getenv (" USERPROFILE" );
1203- if (!home_dir) {
1204- const char * homeDrive = getenv (" HOMEDRIVE" );
1205- const char * homePath = getenv (" HOMEPATH" );
1206- if (homeDrive && homePath) return std::string (homeDrive) + homePath;
1207- char documentsPath[MAX_PATH];
1208- if (SUCCEEDED (SHGetFolderPathA (NULL , CSIDL_PERSONAL, NULL , SHGFP_TYPE_CURRENT, documentsPath))) return std::string (documentsPath);
1209- }
12101203#else
12111204 home_dir = getenv (" HOME" );
12121205#endif
@@ -1610,8 +1603,8 @@ bool ms_login(const std::string & token) {
16101603 json response_json = json::parse (response_string);
16111604 json data = response_json[" Data" ];
16121605 auto access_token = data[" AccessToken" ].get <std::string>();
1613- save_to_file (git_token_file.c_str (), access_token);
1614- save_to_file (user_file.c_str (), data[" Username" ].get <std::string>() + " :" + data[" Email" ].get <std::string>());
1606+ save_to_file (git_token_file.generic_string (), access_token);
1607+ save_to_file (user_file.generic_string (), data[" Username" ].get <std::string>() + " :" + data[" Email" ].get <std::string>());
16151608 return true ;
16161609}
16171610
@@ -1802,6 +1795,26 @@ struct llama_model * common_load_model_from_url(
18021795 return nullptr ;
18031796}
18041797
1798+ struct llama_model * common_load_model_from_ms (
1799+ const std::string & /* repo*/ ,
1800+ const std::string & /* remote_path*/ ,
1801+ const std::string & /* local_path*/ ,
1802+ const std::string & /* ms_token*/ ,
1803+ const struct llama_model_params & /* params*/ ) {
1804+ LOG_WRN (" %s: llama.cpp built without libcurl, downloading from ModelScope not supported.\n " , __func__);
1805+ return nullptr ;
1806+ }
1807+
1808+ bool ms_login (const std::string & /* token*/ ) {
1809+ LOG_WRN (" %s: llama.cpp built without libcurl, downloading from ModelScope not supported.\n " , __func__);
1810+ return false ;
1811+ }
1812+
1813+ std::pair<std::string, std::string> common_get_ms_file (const std::string & /* ms_repo_with_tag*/ , const std::string & /* ms_token*/ ) {
1814+ LOG_WRN (" %s: llama.cpp built without libcurl, downloading from ModelScope not supported.\n " , __func__);
1815+ return std::make_pair (" " , " " );
1816+ }
1817+
18051818struct llama_model * common_load_model_from_hf (
18061819 const std::string & /* repo*/ ,
18071820 const std::string & /* remote_path*/ ,
0 commit comments