Skip to content

Commit c85252b

Browse files
dschoderrickstolee
andcommitted
scalar clone: support GVFS-enabled remote repositories
With this change, we come a big step closer to feature parity with Scalar: this allows cloning from Azure Repos (which do not support partial clones at time of writing). We use the just-implemented JSON parser to parse the response we got from the `gvfs/config` endpoint; Please note that this response might, or might not, contain information about a cache server. The presence or absence of said cache server, however, has nothing to do with the ability to speak the GVFS protocol (but the presence of the `gvfs/config` endpoint does that). An alternative considered during the development of this patch was to perform simple string matching instead of parsing the JSON-formatted data; However, this would have been fragile, as the response contains free-form text (e.g. the repository's description) which might contain parts that would confuse a simple string matcher (but not a proper JSON parser). Note: we need to limit the re-try logic in `git clone` to handle only the non-GVFS case: the call to `set_config()` to un-set the partial clone settings would otherwise fail because those settings would not exist in the GVFS protocol case. This will at least give us a clearer reason why such a fetch fails. The way the `gvfs-helper` command operates is apparently incompatible with HTTP/2, that's why we need to enforce HTTP/1.1 in Scalar clones accessing Azure Repos. Co-authored-by: Derrick Stolee <[email protected]> Signed-off-by: Johannes Schindelin <[email protected]> Signed-off-by: Derrick Stolee <[email protected]>
1 parent bff228b commit c85252b

File tree

2 files changed

+136
-3
lines changed

2 files changed

+136
-3
lines changed

diagnose.c

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@
1212
#include "parse-options.h"
1313
#include "repository.h"
1414
#include "write-or-die.h"
15+
#include "config.h"
1516

1617
struct archive_dir {
1718
const char *path;
@@ -185,6 +186,7 @@ int create_diagnostics_archive(struct repository *r,
185186
struct strvec archiver_args = STRVEC_INIT;
186187
char **argv_copy = NULL;
187188
int stdout_fd = -1, archiver_fd = -1;
189+
char *cache_server_url = NULL;
188190
struct strbuf buf = STRBUF_INIT;
189191
int res;
190192
struct archive_dir archive_dirs[] = {
@@ -220,6 +222,11 @@ int create_diagnostics_archive(struct repository *r,
220222
get_version_info(&buf, 1);
221223

222224
strbuf_addf(&buf, "Repository root: %s\n", r->worktree);
225+
226+
repo_config_get_string(r, "gvfs.cache-server", &cache_server_url);
227+
strbuf_addf(&buf, "Cache Server: %s\n\n",
228+
cache_server_url ? cache_server_url : "None");
229+
223230
get_disk_info(&buf);
224231
write_or_die(stdout_fd, buf.buf, buf.len);
225232
strvec_pushf(&archiver_args,
@@ -277,6 +284,7 @@ int create_diagnostics_archive(struct repository *r,
277284
free(argv_copy);
278285
strvec_clear(&archiver_args);
279286
strbuf_release(&buf);
287+
free(cache_server_url);
280288

281289
return res;
282290
}

scalar.c

Lines changed: 128 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@
1919
#include "help.h"
2020
#include "setup.h"
2121
#include "trace2.h"
22+
#include "json-parser.h"
2223

2324
static void setup_enlistment_directory(int argc, const char **argv,
2425
const char * const *usagestr,
@@ -339,6 +340,85 @@ static int set_config(const char *fmt, ...)
339340
return res;
340341
}
341342

343+
/* Find N for which .CacheServers[N].GlobalDefault == true */
344+
static int get_cache_server_index(struct json_iterator *it)
345+
{
346+
const char *p;
347+
char *q;
348+
long l;
349+
350+
if (it->type == JSON_TRUE &&
351+
skip_iprefix(it->key.buf, ".CacheServers[", &p) &&
352+
(l = strtol(p, &q, 10)) >= 0 && p != q &&
353+
!strcasecmp(q, "].GlobalDefault")) {
354+
*(long *)it->fn_data = l;
355+
return 1;
356+
}
357+
358+
return 0;
359+
}
360+
361+
struct cache_server_url_data {
362+
char *key, *url;
363+
};
364+
365+
/* Get .CacheServers[N].Url */
366+
static int get_cache_server_url(struct json_iterator *it)
367+
{
368+
struct cache_server_url_data *data = it->fn_data;
369+
370+
if (it->type == JSON_STRING &&
371+
!strcasecmp(data->key, it->key.buf)) {
372+
data->url = strbuf_detach(&it->string_value, NULL);
373+
return 1;
374+
}
375+
376+
return 0;
377+
}
378+
379+
/*
380+
* If `cache_server_url` is `NULL`, print the list to `stdout`.
381+
*
382+
* Since `gvfs-helper` requires a Git directory, this _must_ be run in
383+
* a worktree.
384+
*/
385+
static int supports_gvfs_protocol(const char *url, char **cache_server_url)
386+
{
387+
struct child_process cp = CHILD_PROCESS_INIT;
388+
struct strbuf out = STRBUF_INIT;
389+
390+
cp.git_cmd = 1;
391+
strvec_pushl(&cp.args, "-c", "http.version=HTTP/1.1",
392+
"gvfs-helper", "--remote", url, "config", NULL);
393+
if (!pipe_command(&cp, NULL, 0, &out, 512, NULL, 0)) {
394+
long l = 0;
395+
struct json_iterator it =
396+
JSON_ITERATOR_INIT(out.buf, get_cache_server_index, &l);
397+
struct cache_server_url_data data = { .url = NULL };
398+
399+
if (iterate_json(&it) < 0) {
400+
reset_iterator(&it);
401+
strbuf_release(&out);
402+
return error("JSON parse error");
403+
}
404+
data.key = xstrfmt(".CacheServers[%ld].Url", l);
405+
it.fn = get_cache_server_url;
406+
it.fn_data = &data;
407+
if (iterate_json(&it) < 0) {
408+
reset_iterator(&it);
409+
strbuf_release(&out);
410+
return error("JSON parse error");
411+
}
412+
*cache_server_url = data.url;
413+
free(data.key);
414+
reset_iterator(&it);
415+
strbuf_release(&out);
416+
return 1;
417+
}
418+
strbuf_release(&out);
419+
return 0; /* error out quietly */
420+
}
421+
342422
static char *remote_default_branch(const char *url)
343423
{
344424
struct child_process cp = CHILD_PROCESS_INIT;
@@ -439,6 +519,8 @@ static int cmd_clone(int argc, const char **argv)
439519
char *branch_to_free = NULL;
440520
int full_clone = 0, single_branch = 0, show_progress = isatty(2);
441521
int src = 1, tags = 1;
522+
const char *cache_server_url = NULL;
523+
char *default_cache_server_url = NULL;
442524
struct option clone_options[] = {
443525
OPT_STRING('b', "branch", &branch, N_("<branch>"),
444526
N_("branch to checkout after clone")),
@@ -451,6 +533,9 @@ static int cmd_clone(int argc, const char **argv)
451533
N_("create repository within 'src' directory")),
452534
OPT_BOOL(0, "tags", &tags,
453535
N_("specify if tags should be fetched during clone")),
536+
OPT_STRING(0, "cache-server-url", &cache_server_url,
537+
N_("<url>"),
538+
N_("the url or friendly name of the cache server")),
454539
OPT_END(),
455540
};
456541
const char * const clone_usage[] = {
@@ -462,6 +547,7 @@ static int cmd_clone(int argc, const char **argv)
462547
char *enlistment = NULL, *dir = NULL;
463548
struct strbuf buf = STRBUF_INIT;
464549
int res;
550+
int gvfs_protocol;
465551

466552
argc = parse_options(argc, argv, NULL, clone_options, clone_usage, 0);
467553

@@ -527,9 +613,7 @@ static int cmd_clone(int argc, const char **argv)
527613
set_config("remote.origin.fetch="
528614
"+refs/heads/%s:refs/remotes/origin/%s",
529615
single_branch ? branch : "*",
530-
single_branch ? branch : "*") ||
531-
set_config("remote.origin.promisor=true") ||
532-
set_config("remote.origin.partialCloneFilter=blob:none")) {
616+
single_branch ? branch : "*")) {
533617
res = error(_("could not configure remote in '%s'"), dir);
534618
goto cleanup;
535619
}
@@ -539,6 +623,41 @@ static int cmd_clone(int argc, const char **argv)
539623
goto cleanup;
540624
}
541625

626+
if (set_config("credential.https://dev.azure.com.useHttpPath=true")) {
627+
res = error(_("could not configure credential.useHttpPath"));
628+
goto cleanup;
629+
}
630+
631+
gvfs_protocol = cache_server_url ||
632+
supports_gvfs_protocol(url, &default_cache_server_url);
633+
634+
if (gvfs_protocol) {
635+
if (!cache_server_url)
636+
cache_server_url = default_cache_server_url;
637+
if (set_config("core.useGVFSHelper=true") ||
638+
set_config("core.gvfs=150") ||
639+
set_config("http.version=HTTP/1.1")) {
640+
res = error(_("could not turn on GVFS helper"));
641+
goto cleanup;
642+
}
643+
if (cache_server_url &&
644+
set_config("gvfs.cache-server=%s", cache_server_url)) {
645+
res = error(_("could not configure cache server"));
646+
goto cleanup;
647+
}
648+
if (cache_server_url)
649+
fprintf(stderr, "Cache server URL: %s\n",
650+
cache_server_url);
651+
} else {
652+
if (set_config("core.useGVFSHelper=false") ||
653+
set_config("remote.origin.promisor=true") ||
654+
set_config("remote.origin.partialCloneFilter=blob:none")) {
655+
res = error(_("could not configure partial clone in "
656+
"'%s'"), dir);
657+
goto cleanup;
658+
}
659+
}
660+
542661
if (!full_clone &&
543662
(res = run_git("sparse-checkout", "init", "--cone", NULL)))
544663
goto cleanup;
@@ -551,6 +670,11 @@ static int cmd_clone(int argc, const char **argv)
551670
"origin",
552671
(tags ? NULL : "--no-tags"),
553672
NULL))) {
673+
if (gvfs_protocol) {
674+
res = error(_("failed to prefetch commits and trees"));
675+
goto cleanup;
676+
}
677+
554678
warning(_("partial clone failed; attempting full clone"));
555679

556680
if (set_config("remote.origin.promisor") ||
@@ -584,6 +708,7 @@ static int cmd_clone(int argc, const char **argv)
584708
free(enlistment);
585709
free(dir);
586710
strbuf_release(&buf);
711+
free(default_cache_server_url);
587712
return res;
588713
}
589714

0 commit comments

Comments
 (0)