Skip to content

Commit 2e80737

Browse files
dschoderrickstolee
andcommitted
scalar clone: support GVFS-enabled remote repositories
With this change, we come a big step closer to feature parity with Scalar: this allows cloning from Azure Repos (which do not support partial clones at time of writing). We use the just-implemented JSON parser to parse the response we got from the `gvfs/config` endpoint; Please note that this response might, or might not, contain information about a cache server. The presence or absence of said cache server, however, has nothing to do with the ability to speak the GVFS protocol (but the presence of the `gvfs/config` endpoint does that). An alternative considered during the development of this patch was to perform simple string matching instead of parsing the JSON-formatted data; However, this would have been fragile, as the response contains free-form text (e.g. the repository's description) which might contain parts that would confuse a simple string matcher (but not a proper JSON parser). Note: we need to limit the re-try logic in `git clone` to handle only the non-GVFS case: the call to `set_config()` to un-set the partial clone settings would otherwise fail because those settings would not exist in the GVFS protocol case. This will at least give us a clearer reason why such a fetch fails. Co-authored-by: Derrick Stolee <[email protected]> Signed-off-by: Johannes Schindelin <[email protected]> Signed-off-by: Derrick Stolee <[email protected]>
1 parent 5a5e189 commit 2e80737

File tree

2 files changed

+135
-3
lines changed

2 files changed

+135
-3
lines changed

diagnose.c

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@
1313
#include "packfile.h"
1414
#include "parse-options.h"
1515
#include "write-or-die.h"
16+
#include "config.h"
1617

1718
struct archive_dir {
1819
const char *path;
@@ -184,6 +185,7 @@ int create_diagnostics_archive(struct strbuf *zip_path, enum diagnose_mode mode)
184185
struct strvec archiver_args = STRVEC_INIT;
185186
char **argv_copy = NULL;
186187
int stdout_fd = -1, archiver_fd = -1;
188+
char *cache_server_url = NULL;
187189
struct strbuf buf = STRBUF_INIT;
188190
int res;
189191
struct archive_dir archive_dirs[] = {
@@ -219,6 +221,11 @@ int create_diagnostics_archive(struct strbuf *zip_path, enum diagnose_mode mode)
219221
get_version_info(&buf, 1);
220222

221223
strbuf_addf(&buf, "Repository root: %s\n", the_repository->worktree);
224+
225+
git_config_get_string("gvfs.cache-server", &cache_server_url);
226+
strbuf_addf(&buf, "Cache Server: %s\n\n",
227+
cache_server_url ? cache_server_url : "None");
228+
222229
get_disk_info(&buf);
223230
write_or_die(stdout_fd, buf.buf, buf.len);
224231
strvec_pushf(&archiver_args,
@@ -276,6 +283,7 @@ int create_diagnostics_archive(struct strbuf *zip_path, enum diagnose_mode mode)
276283
free(argv_copy);
277284
strvec_clear(&archiver_args);
278285
strbuf_release(&buf);
286+
free(cache_server_url);
279287

280288
return res;
281289
}

scalar.c

Lines changed: 127 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@
1919
#include "help.h"
2020
#include "setup.h"
2121
#include "trace2.h"
22+
#include "json-parser.h"
2223

2324
static void setup_enlistment_directory(int argc, const char **argv,
2425
const char * const *usagestr,
@@ -339,6 +340,84 @@ static int set_config(const char *fmt, ...)
339340
return res;
340341
}
341342

343+
/* Find N for which .CacheServers[N].GlobalDefault == true */
344+
static int get_cache_server_index(struct json_iterator *it)
345+
{
346+
const char *p;
347+
char *q;
348+
long l;
349+
350+
if (it->type == JSON_TRUE &&
351+
skip_iprefix(it->key.buf, ".CacheServers[", &p) &&
352+
(l = strtol(p, &q, 10)) >= 0 && p != q &&
353+
!strcasecmp(q, "].GlobalDefault")) {
354+
*(long *)it->fn_data = l;
355+
return 1;
356+
}
357+
358+
return 0;
359+
}
360+
361+
struct cache_server_url_data {
362+
char *key, *url;
363+
};
364+
365+
/* Get .CacheServers[N].Url */
366+
static int get_cache_server_url(struct json_iterator *it)
367+
{
368+
struct cache_server_url_data *data = it->fn_data;
369+
370+
if (it->type == JSON_STRING &&
371+
!strcasecmp(data->key, it->key.buf)) {
372+
data->url = strbuf_detach(&it->string_value, NULL);
373+
return 1;
374+
}
375+
376+
return 0;
377+
}
378+
379+
/*
380+
* If `cache_server_url` is `NULL`, print the list to `stdout`.
381+
*
382+
* Since `gvfs-helper` requires a Git directory, this _must_ be run in
383+
* a worktree.
384+
*/
385+
static int supports_gvfs_protocol(const char *url, char **cache_server_url)
386+
{
387+
struct child_process cp = CHILD_PROCESS_INIT;
388+
struct strbuf out = STRBUF_INIT;
389+
390+
cp.git_cmd = 1;
391+
strvec_pushl(&cp.args, "gvfs-helper", "--remote", url, "config", NULL);
392+
if (!pipe_command(&cp, NULL, 0, &out, 512, NULL, 0)) {
393+
long l = 0;
394+
struct json_iterator it =
395+
JSON_ITERATOR_INIT(out.buf, get_cache_server_index, &l);
396+
struct cache_server_url_data data = { .url = NULL };
397+
398+
if (iterate_json(&it) < 0) {
399+
reset_iterator(&it);
400+
strbuf_release(&out);
401+
return error("JSON parse error");
402+
}
403+
data.key = xstrfmt(".CacheServers[%ld].Url", l);
404+
it.fn = get_cache_server_url;
405+
it.fn_data = &data;
406+
if (iterate_json(&it) < 0) {
407+
reset_iterator(&it);
408+
strbuf_release(&out);
409+
return error("JSON parse error");
410+
}
411+
*cache_server_url = data.url;
412+
free(data.key);
413+
reset_iterator(&it);
414+
strbuf_release(&out);
415+
return 1;
416+
}
417+
strbuf_release(&out);
418+
return 0; /* error out quietly */
419+
}
420+
342421
static char *remote_default_branch(const char *url)
343422
{
344423
struct child_process cp = CHILD_PROCESS_INIT;
@@ -438,6 +517,8 @@ static int cmd_clone(int argc, const char **argv)
438517
const char *branch = NULL;
439518
int full_clone = 0, single_branch = 0, show_progress = isatty(2);
440519
int src = 1, tags = 1;
520+
const char *cache_server_url = NULL;
521+
char *default_cache_server_url = NULL;
441522
struct option clone_options[] = {
442523
OPT_STRING('b', "branch", &branch, N_("<branch>"),
443524
N_("branch to checkout after clone")),
@@ -450,6 +531,9 @@ static int cmd_clone(int argc, const char **argv)
450531
N_("create repository within 'src' directory")),
451532
OPT_BOOL(0, "tags", &tags,
452533
N_("specify if tags should be fetched during clone")),
534+
OPT_STRING(0, "cache-server-url", &cache_server_url,
535+
N_("<url>"),
536+
N_("the url or friendly name of the cache server")),
453537
OPT_END(),
454538
};
455539
const char * const clone_usage[] = {
@@ -461,6 +545,7 @@ static int cmd_clone(int argc, const char **argv)
461545
char *enlistment = NULL, *dir = NULL;
462546
struct strbuf buf = STRBUF_INIT;
463547
int res;
548+
int gvfs_protocol;
464549

465550
argc = parse_options(argc, argv, NULL, clone_options, clone_usage, 0);
466551

@@ -526,9 +611,7 @@ static int cmd_clone(int argc, const char **argv)
526611
set_config("remote.origin.fetch="
527612
"+refs/heads/%s:refs/remotes/origin/%s",
528613
single_branch ? branch : "*",
529-
single_branch ? branch : "*") ||
530-
set_config("remote.origin.promisor=true") ||
531-
set_config("remote.origin.partialCloneFilter=blob:none")) {
614+
single_branch ? branch : "*")) {
532615
res = error(_("could not configure remote in '%s'"), dir);
533616
goto cleanup;
534617
}
@@ -538,6 +621,41 @@ static int cmd_clone(int argc, const char **argv)
538621
goto cleanup;
539622
}
540623

624+
if (set_config("credential.https://dev.azure.com.useHttpPath=true")) {
625+
res = error(_("could not configure credential.useHttpPath"));
626+
goto cleanup;
627+
}
628+
629+
gvfs_protocol = cache_server_url ||
630+
supports_gvfs_protocol(url, &default_cache_server_url);
631+
632+
if (gvfs_protocol) {
633+
if (!cache_server_url)
634+
cache_server_url = default_cache_server_url;
635+
if (set_config("core.useGVFSHelper=true") ||
636+
set_config("core.gvfs=150") ||
637+
set_config("http.version=HTTP/1.1")) {
638+
res = error(_("could not turn on GVFS helper"));
639+
goto cleanup;
640+
}
641+
if (cache_server_url &&
642+
set_config("gvfs.cache-server=%s", cache_server_url)) {
643+
res = error(_("could not configure cache server"));
644+
goto cleanup;
645+
}
646+
if (cache_server_url)
647+
fprintf(stderr, "Cache server URL: %s\n",
648+
cache_server_url);
649+
} else {
650+
if (set_config("core.useGVFSHelper=false") ||
651+
set_config("remote.origin.promisor=true") ||
652+
set_config("remote.origin.partialCloneFilter=blob:none")) {
653+
res = error(_("could not configure partial clone in "
654+
"'%s'"), dir);
655+
goto cleanup;
656+
}
657+
}
658+
541659
if (!full_clone &&
542660
(res = run_git("sparse-checkout", "init", "--cone", NULL)))
543661
goto cleanup;
@@ -550,6 +668,11 @@ static int cmd_clone(int argc, const char **argv)
550668
"origin",
551669
(tags ? NULL : "--no-tags"),
552670
NULL))) {
671+
if (gvfs_protocol) {
672+
res = error(_("failed to prefetch commits and trees"));
673+
goto cleanup;
674+
}
675+
553676
warning(_("partial clone failed; attempting full clone"));
554677

555678
if (set_config("remote.origin.promisor") ||
@@ -582,6 +705,7 @@ static int cmd_clone(int argc, const char **argv)
582705
free(enlistment);
583706
free(dir);
584707
strbuf_release(&buf);
708+
free(default_cache_server_url);
585709
return res;
586710
}
587711

0 commit comments

Comments
 (0)