Skip to content

Commit 87381ec

Browse files
dschoderrickstolee
authored andcommitted
scalar clone: support GVFS-enabled remote repositories
With this change, we come a big step closer to feature parity with Scalar: this allows cloning from Azure Repos (which do not support partial clones at time of writing). We use the just-implemented JSON parser to parse the response we got from the `gvfs/config` endpoint; Please note that this response might, or might not, contain information about a cache server. The presence or absence of said cache server, however, has nothing to do with the ability to speak the GVFS protocol (but the presence of the `gvfs/config` endpoint does that). An alternative considered during the development of this patch was to perform simple string matching instead of parsing the JSON-formatted data; However, this would have been fragile, as the response contains free-form text (e.g. the repository's description) which might contain parts that would confuse a simple string matcher (but not a proper JSON parser). Note: we need to limit the re-try logic in `git clone` to handle only the non-GVFS case: the call to `set_config()` to un-set the partial clone settings would otherwise fail because those settings would not exist in the GVFS protocol case. This will at least give us a clearer reason why such a fetch fails. Co-authored-by: Derrick Stolee <[email protected]> Signed-off-by: Johannes Schindelin <[email protected]> Signed-off-by: Derrick Stolee <[email protected]>
1 parent a3d686c commit 87381ec

File tree

2 files changed

+135
-3
lines changed

2 files changed

+135
-3
lines changed

diagnose.c

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@
1212
#include "parse-options.h"
1313
#include "repository.h"
1414
#include "write-or-die.h"
15+
#include "config.h"
1516

1617
struct archive_dir {
1718
const char *path;
@@ -185,6 +186,7 @@ int create_diagnostics_archive(struct repository *r,
185186
struct strvec archiver_args = STRVEC_INIT;
186187
char **argv_copy = NULL;
187188
int stdout_fd = -1, archiver_fd = -1;
189+
char *cache_server_url = NULL;
188190
struct strbuf buf = STRBUF_INIT;
189191
int res;
190192
struct archive_dir archive_dirs[] = {
@@ -220,6 +222,11 @@ int create_diagnostics_archive(struct repository *r,
220222
get_version_info(&buf, 1);
221223

222224
strbuf_addf(&buf, "Repository root: %s\n", r->worktree);
225+
226+
repo_config_get_string(r, "gvfs.cache-server", &cache_server_url);
227+
strbuf_addf(&buf, "Cache Server: %s\n\n",
228+
cache_server_url ? cache_server_url : "None");
229+
223230
get_disk_info(&buf);
224231
write_or_die(stdout_fd, buf.buf, buf.len);
225232
strvec_pushf(&archiver_args,
@@ -277,6 +284,7 @@ int create_diagnostics_archive(struct repository *r,
277284
free(argv_copy);
278285
strvec_clear(&archiver_args);
279286
strbuf_release(&buf);
287+
free(cache_server_url);
280288

281289
return res;
282290
}

scalar.c

Lines changed: 127 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@
1919
#include "help.h"
2020
#include "setup.h"
2121
#include "trace2.h"
22+
#include "json-parser.h"
2223

2324
static void setup_enlistment_directory(int argc, const char **argv,
2425
const char * const *usagestr,
@@ -339,6 +340,84 @@ static int set_config(const char *fmt, ...)
339340
return res;
340341
}
341342

343+
/* Find N for which .CacheServers[N].GlobalDefault == true */
344+
static int get_cache_server_index(struct json_iterator *it)
345+
{
346+
const char *p;
347+
char *q;
348+
long l;
349+
350+
if (it->type == JSON_TRUE &&
351+
skip_iprefix(it->key.buf, ".CacheServers[", &p) &&
352+
(l = strtol(p, &q, 10)) >= 0 && p != q &&
353+
!strcasecmp(q, "].GlobalDefault")) {
354+
*(long *)it->fn_data = l;
355+
return 1;
356+
}
357+
358+
return 0;
359+
}
360+
361+
struct cache_server_url_data {
362+
char *key, *url;
363+
};
364+
365+
/* Get .CacheServers[N].Url */
366+
static int get_cache_server_url(struct json_iterator *it)
367+
{
368+
struct cache_server_url_data *data = it->fn_data;
369+
370+
if (it->type == JSON_STRING &&
371+
!strcasecmp(data->key, it->key.buf)) {
372+
data->url = strbuf_detach(&it->string_value, NULL);
373+
return 1;
374+
}
375+
376+
return 0;
377+
}
378+
379+
/*
380+
* If `cache_server_url` is `NULL`, print the list to `stdout`.
381+
*
382+
* Since `gvfs-helper` requires a Git directory, this _must_ be run in
383+
* a worktree.
384+
*/
385+
static int supports_gvfs_protocol(const char *url, char **cache_server_url)
386+
{
387+
struct child_process cp = CHILD_PROCESS_INIT;
388+
struct strbuf out = STRBUF_INIT;
389+
390+
cp.git_cmd = 1;
391+
strvec_pushl(&cp.args, "gvfs-helper", "--remote", url, "config", NULL);
392+
if (!pipe_command(&cp, NULL, 0, &out, 512, NULL, 0)) {
393+
long l = 0;
394+
struct json_iterator it =
395+
JSON_ITERATOR_INIT(out.buf, get_cache_server_index, &l);
396+
struct cache_server_url_data data = { .url = NULL };
397+
398+
if (iterate_json(&it) < 0) {
399+
reset_iterator(&it);
400+
strbuf_release(&out);
401+
return error("JSON parse error");
402+
}
403+
data.key = xstrfmt(".CacheServers[%ld].Url", l);
404+
it.fn = get_cache_server_url;
405+
it.fn_data = &data;
406+
if (iterate_json(&it) < 0) {
407+
reset_iterator(&it);
408+
strbuf_release(&out);
409+
return error("JSON parse error");
410+
}
411+
*cache_server_url = data.url;
412+
free(data.key);
413+
reset_iterator(&it);
414+
strbuf_release(&out);
415+
return 1;
416+
}
417+
strbuf_release(&out);
418+
return 0; /* error out quietly */
419+
}
420+
342421
static char *remote_default_branch(const char *url)
343422
{
344423
struct child_process cp = CHILD_PROCESS_INIT;
@@ -439,6 +518,8 @@ static int cmd_clone(int argc, const char **argv)
439518
char *branch_to_free = NULL;
440519
int full_clone = 0, single_branch = 0, show_progress = isatty(2);
441520
int src = 1, tags = 1;
521+
const char *cache_server_url = NULL;
522+
char *default_cache_server_url = NULL;
442523
struct option clone_options[] = {
443524
OPT_STRING('b', "branch", &branch, N_("<branch>"),
444525
N_("branch to checkout after clone")),
@@ -451,6 +532,9 @@ static int cmd_clone(int argc, const char **argv)
451532
N_("create repository within 'src' directory")),
452533
OPT_BOOL(0, "tags", &tags,
453534
N_("specify if tags should be fetched during clone")),
535+
OPT_STRING(0, "cache-server-url", &cache_server_url,
536+
N_("<url>"),
537+
N_("the url or friendly name of the cache server")),
454538
OPT_END(),
455539
};
456540
const char * const clone_usage[] = {
@@ -462,6 +546,7 @@ static int cmd_clone(int argc, const char **argv)
462546
char *enlistment = NULL, *dir = NULL;
463547
struct strbuf buf = STRBUF_INIT;
464548
int res;
549+
int gvfs_protocol;
465550

466551
argc = parse_options(argc, argv, NULL, clone_options, clone_usage, 0);
467552

@@ -527,9 +612,7 @@ static int cmd_clone(int argc, const char **argv)
527612
set_config("remote.origin.fetch="
528613
"+refs/heads/%s:refs/remotes/origin/%s",
529614
single_branch ? branch : "*",
530-
single_branch ? branch : "*") ||
531-
set_config("remote.origin.promisor=true") ||
532-
set_config("remote.origin.partialCloneFilter=blob:none")) {
615+
single_branch ? branch : "*")) {
533616
res = error(_("could not configure remote in '%s'"), dir);
534617
goto cleanup;
535618
}
@@ -539,6 +622,41 @@ static int cmd_clone(int argc, const char **argv)
539622
goto cleanup;
540623
}
541624

625+
if (set_config("credential.https://dev.azure.com.useHttpPath=true")) {
626+
res = error(_("could not configure credential.useHttpPath"));
627+
goto cleanup;
628+
}
629+
630+
gvfs_protocol = cache_server_url ||
631+
supports_gvfs_protocol(url, &default_cache_server_url);
632+
633+
if (gvfs_protocol) {
634+
if (!cache_server_url)
635+
cache_server_url = default_cache_server_url;
636+
if (set_config("core.useGVFSHelper=true") ||
637+
set_config("core.gvfs=150") ||
638+
set_config("http.version=HTTP/1.1")) {
639+
res = error(_("could not turn on GVFS helper"));
640+
goto cleanup;
641+
}
642+
if (cache_server_url &&
643+
set_config("gvfs.cache-server=%s", cache_server_url)) {
644+
res = error(_("could not configure cache server"));
645+
goto cleanup;
646+
}
647+
if (cache_server_url)
648+
fprintf(stderr, "Cache server URL: %s\n",
649+
cache_server_url);
650+
} else {
651+
if (set_config("core.useGVFSHelper=false") ||
652+
set_config("remote.origin.promisor=true") ||
653+
set_config("remote.origin.partialCloneFilter=blob:none")) {
654+
res = error(_("could not configure partial clone in "
655+
"'%s'"), dir);
656+
goto cleanup;
657+
}
658+
}
659+
542660
if (!full_clone &&
543661
(res = run_git("sparse-checkout", "init", "--cone", NULL)))
544662
goto cleanup;
@@ -551,6 +669,11 @@ static int cmd_clone(int argc, const char **argv)
551669
"origin",
552670
(tags ? NULL : "--no-tags"),
553671
NULL))) {
672+
if (gvfs_protocol) {
673+
res = error(_("failed to prefetch commits and trees"));
674+
goto cleanup;
675+
}
676+
554677
warning(_("partial clone failed; attempting full clone"));
555678

556679
if (set_config("remote.origin.promisor") ||
@@ -584,6 +707,7 @@ static int cmd_clone(int argc, const char **argv)
584707
free(enlistment);
585708
free(dir);
586709
strbuf_release(&buf);
710+
free(default_cache_server_url);
587711
return res;
588712
}
589713

0 commit comments

Comments
 (0)