Skip to content

Commit f18482d

Browse files
committed
Add Repo.get_cache(url) routine which checks whether a cache dir exists. The return value is passed to scm.clone() which for Git uses --reference <cache_path> --dissociate.
Add Repo.set_cache(url) routine which sets local cache based on pre-existing repo in Repo.path
1 parent 5a26359 commit f18482d

File tree

1 file changed

+53
-17
lines changed

1 file changed

+53
-17
lines changed

mbed/mbed.py

Lines changed: 53 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@
2626
import stat
2727
import errno
2828
from itertools import chain, izip, repeat
29+
from urlparse import urlparse
2930
import urllib
3031
import zipfile
3132

@@ -250,7 +251,7 @@ def init(path, url):
250251
with cd(path):
251252
Bld.seturl(url)
252253

253-
def clone(url, path=None, depth=None, protocol=None):
254+
def clone(url, path=None, depth=None, protocol=None, cache=None):
254255
m = Bld.isurl(url)
255256
if not m:
256257
raise ProcessException(1, "Not an mbed library build URL")
@@ -355,16 +356,15 @@ class Hg(object):
355356

356357
def isurl(url):
357358
m_url = re.match(regex_url_ref, url.strip().replace('\\', '/'))
358-
if m_url:
359-
return ((re.match(regex_hg_url, m_url.group(1)) or re.match(regex_mbed_url, m_url.group(1)))
360-
and not re.match(regex_build_url, m_url.group(1)))
359+
if m_url and not re.match(regex_build_url, m_url.group(1)):
360+
return re.match(regex_hg_url, m_url.group(1)) or re.match(regex_mbed_url, m_url.group(1))
361361
else:
362362
return False
363363

364364
def init(path=None):
365365
popen([hg_cmd, 'init'] + ([path] if path else []) + (['-v'] if verbose else ['-q']))
366366

367-
def clone(url, name=None, depth=None, protocol=None):
367+
def clone(url, name=None, depth=None, protocol=None, cache=None):
368368
popen([hg_cmd, 'clone', formaturl(url, protocol), name] + (['-v'] if verbose else ['-q']))
369369

370370
def add(dest):
@@ -523,18 +523,17 @@ class Git(object):
523523

524524
def isurl(url):
525525
m_url = re.match(regex_url_ref, url.strip().replace('\\', '/'))
526-
if m_url:
527-
return (re.match(regex_git_url, m_url.group(1))
528-
and not re.match(regex_mbed_url, m_url.group(1))
529-
and not re.match(regex_build_url, m_url.group(1)))
526+
if m_url and not re.match(regex_build_url, m_url.group(1)) and not re.match(regex_mbed_url, m_url.group(1)):
527+
return re.match(regex_git_url, m_url.group(1))
530528
else:
531529
return False
532530

533531
def init(path=None):
534532
popen([git_cmd, 'init'] + ([path] if path else []) + ([] if verbose else ['-q']))
535533

536-
def clone(url, name=None, depth=None, protocol=None):
537-
popen([git_cmd, 'clone', formaturl(url, protocol), name] + (['--depth', depth] if depth else []) + (['-v'] if verbose else ['-q']))
534+
def clone(url, name=None, depth=None, protocol=None, cache=None):
535+
popen([git_cmd, 'clone', formaturl(url, protocol), name] + (['--depth', depth] if depth else []) +
536+
(['--reference', cache, '--dissociate'] if cache else []) + (['-v'] if verbose else ['-q']))
538537

539538
def add(dest):
540539
log("Adding reference "+dest)
@@ -591,7 +590,7 @@ def checkout(rev):
591590
refs = Git.getrefs(rev)
592591
for ref in refs: # re-associate with a local or remote branch (rev is the same)
593592
branch = re.sub(r'^(.*?)\/(.*?)$', r'\2', ref)
594-
log("Revision \"%s\" matches a branch \"%s\"reference. Re-associating with branch" % (rev, branch))
593+
log("Revision \"%s\" matches a branch \"%s\" reference. Re-associating with branch" % (rev, branch))
595594
popen([git_cmd, 'checkout', branch] + ([] if verbose else ['-q']))
596595
break
597596

@@ -755,6 +754,7 @@ class Repo(object):
755754
rev = None
756755
scm = None
757756
libs = []
757+
cache = '/tmp/repo_cache'
758758

759759
@classmethod
760760
def fromurl(cls, url, path=None):
@@ -945,16 +945,34 @@ def clone(self, url, path, depth=None, protocol=None, **kwargs):
945945
sorted_scms = sorted(sorted_scms, key=lambda (m, _): not m)
946946

947947
for _, scm in sorted_scms:
948-
try:
949-
scm.clone(url, path, depth=depth, protocol=protocol, **kwargs)
948+
if scm.isurl(formaturl(url, 'https')):
949+
main = True
950+
cache = self.get_cache(url)
951+
952+
# Try to clone with cache ref first
953+
if cache:
954+
try:
955+
scm.clone(url, path, depth=depth, protocol=protocol, cache=cache, **kwargs)
956+
main = False
957+
except ProcessException, e:
958+
if os.path.isdir(path):
959+
rmtree_readonly(path)
960+
961+
# Main clone routine if the clone with cache ref failed (might occur if cache ref is dirty)
962+
if main:
963+
try:
964+
scm.clone(url, path, depth=depth, protocol=protocol, **kwargs)
965+
except ProcessException:
966+
if os.path.isdir(path):
967+
rmtree_readonly(path)
968+
continue
969+
950970
self.scm = scm
951971
self.url = url
952972
self.path = os.path.abspath(path)
953973
self.ignores()
974+
self.set_cache(url)
954975
return True
955-
except ProcessException:
956-
if os.path.isdir(path):
957-
rmtree_readonly(path)
958976
else:
959977
return False
960978

@@ -994,6 +1012,24 @@ def rm_untracked(self):
9941012
action("Remove untracked library reference \"%s\"" % f)
9951013
os.remove(f)
9961014

1015+
def get_cache(self, url):
1016+
up = urlparse(formaturl(url, 'https'))
1017+
if self.cache and up and os.path.isdir(os.path.join(self.cache, up.netloc, re.sub(r'^/', '', up.path))):
1018+
return os.path.join(self.cache, up.netloc, re.sub(r'^/', '', up.path))
1019+
1020+
def set_cache(self, url):
1021+
up = urlparse(formaturl(url, 'https'))
1022+
if self.cache and up and os.path.isdir(self.path):
1023+
cpath = os.path.join(self.cache, up.netloc, re.sub(r'^/', '', up.path))
1024+
if not os.path.isdir(cpath):
1025+
os.makedirs(cpath)
1026+
1027+
scm_dir = '.'+self.scm.name
1028+
if os.path.isdir(os.path.join(cpath, scm_dir)):
1029+
rmtree_readonly(os.path.join(cpath, scm_dir))
1030+
shutil.copytree(os.path.join(self.path, scm_dir), os.path.join(cpath, scm_dir))
1031+
return False
1032+
9971033
def can_update(self, clean, force):
9981034
err = None
9991035
if (self.is_local or self.url is None) and not force:

0 commit comments

Comments
 (0)