Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
39 changes: 30 additions & 9 deletions src/west/app/project.py
Original file line number Diff line number Diff line change
Expand Up @@ -1177,9 +1177,9 @@ def do_add_parser(self, parser_adder):
'--auto-cache',
help='''automatically setup local cache repositories
in a flat folder hierarchy, but with an additional
subfolder (hashed name) for different remote URLs.
Each local cache repository is automatically cloned
on first usage and synced on subsequent clones.
subfolder (hashed name) for different remote URLs. Each
local cache repository is automatically cloned on first
usage and synced on subsequent fetches (if necessary).
This cache has the lowest priority (Prio 2).''',
)

Expand All @@ -1203,7 +1203,8 @@ def do_add_parser(self, parser_adder):
workspace setup.
Only in case of auto-cache the 'west update' process updates the local
caches first, which then serve as the source for pulling changes into
the workspace.
the workspace. Thereby, the auto-cache only fetches updates from remote
if the specified revision is not already present in the local cache.

Example: Assume your manifest describes this workspace structure:
(workspace)
Expand Down Expand Up @@ -1233,7 +1234,8 @@ def do_add_parser(self, parser_adder):
folder hierarchy is setup automatically. Each repository is stored under a
directory named after the basename of its remote URL. To prevent conflicts
between repos with same name, a hash of the remote URL is used as subfolder.
Note: Each local cache repo is automatically synced on subsequent updates.
Note: Each local cache repo is automatically synced on subsequent updates on
demand (if the requested revision is not already contained within the cache).
(auto cache directory)
├── bar.git
│ ├── <hash>
Expand Down Expand Up @@ -1757,13 +1759,29 @@ def handle_auto_cache(self, project):
# Then clone the repository into the local cache.
cache_dir_parent = Path(cache_dir).parent
cache_dir_parent.mkdir(parents=True, exist_ok=True)
self.dbg(f'{project.name}: create auto-cache for {project.url} in {cache_dir}')
project.git(
['clone', '--mirror', '--', project.url, os.fspath(cache_dir)], cwd=cache_dir_parent
)
self.create_auto_cache_info(project, cache_dir)
else:
# The local cache already exists. Sync it with remote.
project.git(['remote', 'update', '--prune'], cwd=cache_dir)
# check if the remote update can be skipped
if self.fs != 'always':
# Determine the type of the project revision by checking if it is
# already contained in the auto-cache.
# If it is an already available tag or a commit, the remote
# update can be skipped. Otherwise the auto-cache must be updated.
rev_type = _rev_type(project, cwd=cache_dir)
if rev_type in ('tag', 'commit'):
self.dbg(
f'{project.name}: auto-cache remote update is skipped '
f'as it already contains {rev_type} {project.revision}'
)
return

# The auto-cache needs to be updated. Sync with remote.
self.dbg(f'{project.name}: update auto-cache ({cache_dir}) with remote')
project.git(['remote', 'update', '--prune'], cwd=cache_dir, check=False)

def init_project(self, project):
# update() helper. Initialize an uncloned project repository.
Expand Down Expand Up @@ -2472,7 +2490,7 @@ def _maybe_sha(rev):
return len(rev) <= 40


def _rev_type(project, rev=None):
def _rev_type(project, rev=None, cwd=None):
# Returns a "refined" revision type of rev (default:
# project.revision) as one of the following strings: 'tag', 'tree',
# 'blob', 'commit', 'branch', 'other'.
Expand All @@ -2490,7 +2508,9 @@ def _rev_type(project, rev=None):
# update" specific logic.
if not rev:
rev = project.revision
cp = project.git(['cat-file', '-t', rev], check=False, capture_stdout=True, capture_stderr=True)
cp = project.git(
['cat-file', '-t', rev], cwd=cwd, check=False, capture_stdout=True, capture_stderr=True
)
stdout = cp.stdout.decode('utf-8').strip()
if cp.returncode:
return 'other'
Expand All @@ -2505,6 +2525,7 @@ def _rev_type(project, rev=None):
check=False,
capture_stdout=True,
capture_stderr=True,
cwd=cwd,
)
if cp.returncode:
# This can happen if the ref name is ambiguous, e.g.:
Expand Down
95 changes: 95 additions & 0 deletions tests/test_project_caching.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
from conftest import (
GIT,
add_commit,
chdir,
cmd,
create_branch,
create_repo,
Expand Down Expand Up @@ -314,6 +315,100 @@ def test_update_auto_cache(tmpdir):
assert bar_head_newer in rev_list(auto_cache_dir / "bar" / bar_hash)


def test_update_auto_cache_skipped_remote_update(tmpdir):
foo_remote = Path(tmpdir / 'remotes' / 'foo')
bar_remote = Path(tmpdir / 'remotes' / 'bar')
auto_cache_dir = Path(tmpdir / 'auto_cache_dir')

def create_foo_bar_commits():
add_commit(foo_remote, 'new commit')
add_commit(bar_remote, 'new commit')
foo_head = rev_parse(foo_remote, 'HEAD')
bar_head = rev_parse(bar_remote, 'HEAD')
return foo_head, bar_head

def setup_workspace_and_west_update(workspace, foo_head, bar_head):
setup_cache_workspace(
workspace,
foo_remote=foo_remote,
foo_head=foo_head,
bar_remote=bar_remote,
bar_head=bar_head,
)
with chdir(workspace):
stdout = cmd(['-v', 'update', '--auto-cache', auto_cache_dir])
return stdout

create_repo(foo_remote)
create_repo(bar_remote)
foo_commit1, bar_commit1 = create_foo_bar_commits()
foo_commit2, bar_commit2 = create_foo_bar_commits()

# run initial west update to setup auto-cache and get cache directories
setup_workspace_and_west_update(
tmpdir / 'workspace1',
foo_head=foo_commit1,
bar_head=bar_commit1,
)

# read the auto-cache hashes from foo and bar
(bar_hash,) = [p for p in (auto_cache_dir / 'bar').iterdir() if p.is_dir()]
auto_cache_dir_bar = auto_cache_dir / 'bar' / bar_hash
(foo_hash,) = [p for p in (auto_cache_dir / 'foo').iterdir() if p.is_dir()]
auto_cache_dir_foo = auto_cache_dir / 'foo' / foo_hash

# Imitate that foo remote is temporarily offline by moving it temporarily.
# Since foo and bar revisions are used which are already contained in the auto-cache,
# west update should work with according messages as there is no need to update remotes.
foo_moved = Path(tmpdir / 'remotes' / 'foo.moved')
shutil.move(foo_remote, foo_moved)
stdout = setup_workspace_and_west_update(
tmpdir / 'workspace2',
foo_head=foo_commit2,
bar_head=bar_commit2,
)
shutil.move(foo_moved, foo_remote)
msgs = [
f"foo: auto-cache remote update is skipped as it already contains commit {foo_commit2}",
f"foo: cloning from {auto_cache_dir_foo}",
f"bar: auto-cache remote update is skipped as it already contains commit {bar_commit2}",
f"bar: cloning from {auto_cache_dir_bar}",
]
for msg in msgs:
assert msg in stdout

# If a new commit is used, the auto-cache should be updated with remote
foo_commit3, bar_commit3 = create_foo_bar_commits()
stdout = setup_workspace_and_west_update(
tmpdir / 'workspace3',
foo_head=foo_commit3,
bar_head=bar_commit3,
)
msgs = [
f"foo: update auto-cache ({auto_cache_dir_foo}) with remote",
f"foo: cloning from {auto_cache_dir_foo}",
f"bar: update auto-cache ({auto_cache_dir_bar}) with remote",
f"bar: cloning from {auto_cache_dir_bar}",
]
for msg in msgs:
assert msg in stdout

# If a branch is used as revision, the auto-cache must be updated.
stdout = setup_workspace_and_west_update(
tmpdir / 'workspace4',
foo_head='master',
bar_head='master',
)
msgs = [
f"foo: update auto-cache ({auto_cache_dir_foo}) with remote",
f"foo: cloning from {auto_cache_dir_foo}",
f"bar: update auto-cache ({auto_cache_dir_bar}) with remote",
f"bar: cloning from {auto_cache_dir_bar}",
]
for msg in msgs:
assert msg in stdout


def test_update_caches_priorities(tmpdir):
# Test that the correct cache is used if multiple caches are specified
# e.g. if 'west update --name-cache X --path-cache Y --auto-cache Z'
Expand Down