diff --git a/.flake8 b/.flake8 index 7a4edca..aeb029b 100644 --- a/.flake8 +++ b/.flake8 @@ -3,5 +3,5 @@ # https://github.com/psf/black/blob/master/docs/the_black_code_style.md#line-length [flake8] # black wraps at 88 -max-line-length = 90 +max-line-length = 120 extend-ignore = E203 diff --git a/examples/buildkite-trigger.py b/examples/buildkite-trigger.py index 6cb3bc9..208d513 100644 --- a/examples/buildkite-trigger.py +++ b/examples/buildkite-trigger.py @@ -3,6 +3,7 @@ # my-pipeline change-commit //depot/... "python %//depot/scripts/buildkite-trigger.py% %changelist% %user%" import sys import subprocess + try: from urllib.request import urlopen, Request except ImportError: @@ -10,27 +11,30 @@ import json __BUILDKITE_TOKEN__ = "" - + __ORG_SLUG__ = "" pipeline_slug = sys.argv[1] changelist = sys.argv[2] user = sys.argv[3] -description = subprocess.check_output(["p4", "-Ztag", "-F", "%desc%", "describe", changelist]) +description = subprocess.check_output( + ["p4", "-Ztag", "-F", "%desc%", "describe", changelist] +) headers = { 'Content-Type': 'application/json', - 'Authorization': 'Bearer %s' % __BUILDKITE_TOKEN__ + 'Authorization': 'Bearer %s' % __BUILDKITE_TOKEN__, } payload = { 'commit': '@' + changelist, 'branch': 'master', 'message': description, - 'author': { - 'name': user - } + 'author': {'name': user}, } -url = "https://api.buildkite.com/v2/organizations/%s/pipelines/%s/builds" % (__ORG_SLUG__, pipeline_slug) +url = "https://api.buildkite.com/v2/organizations/%s/pipelines/%s/builds" % ( + __ORG_SLUG__, + pipeline_slug, +) params = json.dumps(payload).encode('utf8') req = Request(url, data=params, headers=headers) diff --git a/examples/cleanup-unused-workspaces.py b/examples/cleanup-unused-workspaces.py index be172de..9c6bb5c 100644 --- a/examples/cleanup-unused-workspaces.py +++ b/examples/cleanup-unused-workspaces.py @@ -27,16 +27,24 @@ # Filter by basic prefix matching. # May want to include filtering by user and other fields to avoid false positives. -bk_clients = [client for client in clients - if client.get('client', '').startswith('bk-p4-')] +bk_clients = [ + client for client in clients if client.get('client', '').startswith('bk-p4-') +] now = datetime.now() n_days_ago = (now - timedelta(days=__days_unused__)).timestamp() -unused_clients = [client for client in bk_clients - if int(client.get('Access')) < n_days_ago] +unused_clients = [ + client for client in bk_clients if int(client.get('Access')) < n_days_ago +] pprint(unused_clients) -proceed = input("Will delete %d/%d Buildkite clients. Continue? (y/n) " % (len(unused_clients),len(bk_clients))).lower() == 'y' +proceed = ( + input( + "Will delete %d/%d Buildkite clients. Continue? (y/n) " + % (len(unused_clients), len(bk_clients)) + ).lower() + == 'y' +) if proceed: for client in unused_clients: diff --git a/python/buildkite.py b/python/buildkite.py index bbed677..5804feb 100644 --- a/python/buildkite.py +++ b/python/buildkite.py @@ -12,24 +12,26 @@ __LOCAL_RUN__ = os.environ['BUILDKITE_AGENT_NAME'] == 'local' __REVISION_METADATA__ = 'buildkite-perforce-revision' -__REVISION_METADATA_DEPRECATED__ = 'buildkite:perforce:revision' # old metadata key, incompatible with `bk local run` +__REVISION_METADATA_DEPRECATED__ = ( + 'buildkite:perforce:revision' # old metadata key, incompatible with `bk local run` +) + def get_env(): """Get env vars passed in via plugin config""" - env = { - 'P4PORT': os.environ.get('P4PORT') or os.environ.get('BUILDKITE_REPO') - } + env = {'P4PORT': os.environ.get('P4PORT') or os.environ.get('BUILDKITE_REPO')} for p4var in ['P4PORT', 'P4USER', 'P4TICKETS', 'P4TRUST']: plugin_value = os.environ.get('BUILDKITE_PLUGIN_PERFORCE_%s' % p4var) if plugin_value: env[p4var] = plugin_value return env + def list_from_env_array(var): """Read list of values from either VAR or VAR_0, VAR_1 etc""" result = os.environ.get(var, []) if result: - return [result] # convert single value to list + return [result] # convert single value to list i = 0 while True: @@ -41,6 +43,7 @@ def list_from_env_array(var): return result + def get_config(): """Get configuration which will be passed directly to perforce.P4Repo as kwargs""" conf = {} @@ -54,7 +57,9 @@ def get_config(): if 'BUILDKITE_PLUGIN_PERFORCE_ROOT' in os.environ and not __LOCAL_RUN__: raise Exception("Custom P4 root is for use in unit tests only") - conf['root'] = os.environ.get('BUILDKITE_PLUGIN_PERFORCE_ROOT') or os.environ.get('BUILDKITE_BUILD_CHECKOUT_PATH') + conf['root'] = os.environ.get('BUILDKITE_PLUGIN_PERFORCE_ROOT') or os.environ.get( + 'BUILDKITE_BUILD_CHECKOUT_PATH' + ) # Coerce view into pairs of [depot client] paths view_parts = conf['view'].split(' ') @@ -63,6 +68,7 @@ def get_config(): conf['view'] = ['%s %s' % (v, next(view_iter)) for v in view_iter] return conf + def get_metadata(key): """If it exists, retrieve metadata from buildkite for a given key""" if not __ACCESS_TOKEN__: @@ -70,20 +76,27 @@ def get_metadata(key): return None if subprocess.call(['buildkite-agent', 'meta-data', 'exists', key]) == 0: - return subprocess.check_output(['buildkite-agent', 'meta-data', 'get', key]).decode(sys.stdout.encoding) + return subprocess.check_output( + ['buildkite-agent', 'meta-data', 'get', key] + ).decode(sys.stdout.encoding) + def set_metadata(key, value, overwrite=False): - """ Set metadata in buildkite for a given key. Optionally overwrite existing data. - Returns true if data was written + """Set metadata in buildkite for a given key. Optionally overwrite existing data. + Returns true if data was written """ if not __ACCESS_TOKEN__ or __LOCAL_RUN__: # Cannot set metadata outside of buildkite context, including `bk local run` return False - if overwrite or subprocess.call(['buildkite-agent', 'meta-data', 'exists', key]) == 100: - subprocess.call(['buildkite-agent', 'meta-data', 'set', key, value]) + if ( + overwrite + or subprocess.call(['buildkite-agent', 'meta-data', 'exists', key]) == 100 + ): + subprocess.call(['buildkite-agent', 'meta-data', 'set', key, value]) return True + def get_users_changelist(): """Get the shelved changelist supplied by the user, if applicable""" # Overrides the CL to unshelve via plugin config @@ -96,11 +109,14 @@ def get_users_changelist(): if branch.isdigit(): return branch + def get_build_revision(): """Get a p4 revision for the build from buildkite context""" - revision = get_metadata(__REVISION_METADATA__) or \ - get_metadata(__REVISION_METADATA_DEPRECATED__) or \ - os.environ['BUILDKITE_COMMIT'] # HEAD, user-defined revision or git-sha + revision = ( + get_metadata(__REVISION_METADATA__) + or get_metadata(__REVISION_METADATA_DEPRECATED__) + or os.environ['BUILDKITE_COMMIT'] + ) # HEAD, user-defined revision or git-sha # Convert bare changelist number to revision specifier # Note: Theoretically, its possible for all 40 characters of a git sha to be digits. @@ -113,12 +129,16 @@ def get_build_revision(): # Unable to establish a concrete revision for the build return None + def set_build_revision(revision): """Set the p4 revision for following jobs in this build""" set_metadata(__REVISION_METADATA__, revision) set_metadata(__REVISION_METADATA_DEPRECATED__, revision) + def set_build_info(revision, description): """Set the description and commit number in the UI for this build by mimicking a git repo""" - revision = revision.lstrip('@#') # revision must look like a git sha for buildkite to accept it + revision = revision.lstrip( + '@#' + ) # revision must look like a git sha for buildkite to accept it set_metadata('buildkite:git:commit', 'commit %s\n\n\t%s' % (revision, description)) diff --git a/python/checkout.py b/python/checkout.py index d898dfc..142e45d 100644 --- a/python/checkout.py +++ b/python/checkout.py @@ -6,8 +6,15 @@ import subprocess from perforce import P4Repo -from buildkite import (get_env, get_config, get_build_revision, set_build_revision, - get_users_changelist, set_build_info) +from buildkite import ( + get_env, + get_config, + get_build_revision, + set_build_revision, + get_users_changelist, + set_build_info, +) + def main(): """Main""" @@ -29,7 +36,8 @@ def main(): description = repo.description( # Prefer users change description over latest submitted change - user_changelist or repo.head_at_revision(revision) + user_changelist + or repo.head_at_revision(revision) ) set_build_info(revision, description) diff --git a/python/perforce.py b/python/perforce.py index 45b22f0..33b75e4 100644 --- a/python/perforce.py +++ b/python/perforce.py @@ -11,12 +11,23 @@ # Recommended reference: https://www.perforce.com/manuals/p4python/p4python.pdf -from P4 import P4, P4Exception, OutputHandler # pylint: disable=import-error +from P4 import P4, P4Exception, OutputHandler # pylint: disable=import-error + class P4Repo: """A class for manipulating perforce workspaces""" - def __init__(self, root=None, view=None, stream=None, sync=None, - client_options=None, client_type=None, parallel=0, fingerprint=None): + + def __init__( + self, + root=None, + view=None, + stream=None, + sync=None, + client_options=None, + client_type=None, + parallel=0, + fingerprint=None, + ): """ root: Directory in which to create the client workspace view: Client workspace mapping @@ -42,7 +53,7 @@ def __init__(self, root=None, view=None, stream=None, sync=None, self.p4config = os.path.join(self.root, 'p4config') self.perforce = P4() - self.perforce.disable_tmp_cleanup() # Required to use multiple P4 connections in parallel safely + self.perforce.disable_tmp_cleanup() # Required to use multiple P4 connections in parallel safely self.perforce.exception_level = 1 # Only errors are raised as exceptions logger = logging.getLogger("p4python") logger.setLevel(logging.INFO) @@ -59,8 +70,8 @@ def __init__(self, root=None, view=None, stream=None, sync=None, if self.perforce.port.startswith('ssl'): if self.fingerprint: self.perforce.run_trust( - '-r', # Install a replacement fingerprint - will replace primary if this matches the server - '-i', # Install the specified fingerprint + '-r', # Install a replacement fingerprint - will replace primary if this matches the server + '-i', # Install the specified fingerprint self.fingerprint, ) else: @@ -73,7 +84,10 @@ def __del__(self): def _get_clientname(self): """Get unique clientname for this host and location on disk""" - clientname = 'bk-p4-%s-%s' % (os.environ.get('BUILDKITE_AGENT_NAME', socket.gethostname()), os.path.basename(self.root)) + clientname = 'bk-p4-%s-%s' % ( + os.environ.get('BUILDKITE_AGENT_NAME', socket.gethostname()), + os.path.basename(self.root), + ) return re.sub(r'\W', '-', clientname) def _localize_view(self, view): @@ -86,6 +100,7 @@ def insert_clientname(mapping): """Insert client name into path mapping""" depot, local = mapping.split(' ') return '%s //%s/%s' % (depot, clientname, local) + return [insert_clientname(mapping) for mapping in view] def _flush_to_previous_client(self, current_client, prev_clientname): @@ -93,7 +108,10 @@ def _flush_to_previous_client(self, current_client, prev_clientname): prev_client = self.perforce.fetch_client(prev_clientname) stream_switch = self.stream and prev_client._stream != self.stream if stream_switch: - self.perforce.logger.info("previous client stream %s does not match %s, switching stream temporarily to flush" % (prev_client._stream, self.stream)) + self.perforce.logger.info( + "previous client stream %s does not match %s, switching stream temporarily to flush" + % (prev_client._stream, self.stream) + ) current_client._stream = prev_client._stream self.perforce.save_client(current_client) @@ -129,15 +147,22 @@ def _setup_client(self): if os.path.isfile(self.p4config): with open(self.p4config) as infile: - prev_clientname = next(line.split('=', 1)[-1] - for line in infile.read().splitlines() # removes \n - if line.startswith('P4CLIENT=')) + prev_clientname = next( + line.split('=', 1)[-1] + for line in infile.read().splitlines() # removes \n + if line.startswith('P4CLIENT=') + ) if prev_clientname != clientname: - self.perforce.logger.warning("p4config last client was %s, flushing workspace to match" % prev_clientname) + self.perforce.logger.warning( + "p4config last client was %s, flushing workspace to match" + % prev_clientname + ) self._flush_to_previous_client(client, prev_clientname) - elif 'Update' in client: # client was accessed previously - self.perforce.logger.warning("p4config missing for previously accessed client workspace. flushing to revision zero") + elif 'Update' in client: # client was accessed previously + self.perforce.logger.warning( + "p4config missing for previously accessed client workspace. flushing to revision zero" + ) self.perforce.run_flush(['//...@0']) self._write_p4config() @@ -148,7 +173,7 @@ def _write_p4config(self): config = { 'P4CLIENT': self.perforce.client, 'P4USER': self.perforce.user, - 'P4PORT': self.perforce.port + 'P4PORT': self.perforce.port, } if not os.path.exists(self.root): os.makedirs(self.root) @@ -164,15 +189,15 @@ def _read_patched(self): def _write_patched(self, files): """Write a marker to track which files have been modified in the workspace""" - content = list(set(files + self._read_patched())) # Combine and deduplicate + content = list(set(files + self._read_patched())) # Combine and deduplicate with open(self.patchfile, 'w') as outfile: json.dump(content, outfile) def clean(self): - """ Perform a p4clean on the workspace to - remove added and restore deleted files + """Perform a p4clean on the workspace to + remove added and restore deleted files - Does not detect modified files + Does not detect modified files """ self._setup_client() # TODO: Add a fast implementation of p4 clean here @@ -202,7 +227,7 @@ def head_at_revision(self, revision): # Resolve revision directly for automatic labels # Improves performance when label is significantly behind HEAD labelinfo = self.perforce.fetch_label(stripped_revision) - # Revision field is optional + # Revision field is optional revision = labelinfo.get('Revision') or revision except P4Exception: # revision may be clientname, datespec or something else @@ -210,11 +235,9 @@ def head_at_revision(self, revision): pass # Get last submitted change at revision spec - changeinfo = self.perforce.run_changes([ - '-m', '1', '-s', 'submitted', revision - ]) + changeinfo = self.perforce.run_changes(['-m', '1', '-s', 'submitted', revision]) if not changeinfo: - return None # Revision spec had no submitted changes + return None # Revision spec had no submitted changes return changeinfo[0]['change'] def description(self, changelist): @@ -232,8 +255,13 @@ def sync(self, revision=None): handler=SyncOutput(self.perforce.logger), ) if result: - self.perforce.logger.info("Synced %s files (%s)" % ( - result[0]['totalFileCount'], sizeof_fmt(int(result[0]['totalFileSize'])))) + self.perforce.logger.info( + "Synced %s files (%s)" + % ( + result[0]['totalFileCount'], + sizeof_fmt(int(result[0]['totalFileSize'])), + ) + ) return result def revert(self): @@ -255,6 +283,7 @@ def run(*args): perforce.run(*args) from concurrent.futures import ThreadPoolExecutor + with ThreadPoolExecutor(max_workers=max_parallel) as executor: executor.map(run, cmds) @@ -264,7 +293,9 @@ def p4print_unshelve(self, changelist): changeinfo = self.perforce.run_describe('-S', changelist) if not changeinfo: - raise Exception('Changelist %s does not contain any shelved files.' % changelist) + raise Exception( + 'Changelist %s does not contain any shelved files.' % changelist + ) changeinfo = changeinfo[0] depotfiles = changeinfo['depotFile'] @@ -285,13 +316,16 @@ def p4print_unshelve(self, changelist): os.chmod(localfile, stat.S_IWRITE) os.unlink(localfile) if any(depotfile.startswith(prefix) for prefix in sync_prefixes): - cmds.append(('print', '-o', localfile, '%s@=%s' % (depotfile, changelist))) + cmds.append( + ('print', '-o', localfile, '%s@=%s' % (depotfile, changelist)) + ) self.run_parallel_cmds(cmds) class SyncOutput(OutputHandler): """Log each synced file""" + def __init__(self, logger): OutputHandler.__init__(self) self.logger = logger @@ -299,7 +333,7 @@ def __init__(self, logger): def outputStat(self, stat): if 'depotFile' in stat: - self.sync_count += 1 + self.sync_count += 1 if self.sync_count < 1000: # Normal, verbose logging of synced file self.logger.info("%(depotFile)s#%(rev)s %(action)s" % stat) diff --git a/python/test_perforce.py b/python/test_perforce.py index 32472ce..e8d53f5 100644 --- a/python/test_perforce.py +++ b/python/test_perforce.py @@ -15,6 +15,14 @@ from perforce import P4Repo +<<<<<<< HEAD +======= +# Time after which the p4 server will automatically be shut-down. +__P4D_TIMEOUT__ = 30 +# __P4D_TIMEOUT__ = None + + +>>>>>>> 913a94a (Apply automatic fixes for python) def find_free_port(): """Find an open port that we could run a perforce server on""" # pylint: disable=no-member @@ -26,7 +34,7 @@ def find_free_port(): @contextmanager def run_p4d(p4port, from_zip=None): """Start a perforce server with the given hostname:port. - Optionally unzip server state from a file + Optionally unzip server state from a file """ prefix = 'bk-p4d-test-' parent = tempfile.gettempdir() @@ -34,7 +42,7 @@ def run_p4d(p4port, from_zip=None): if item.startswith(prefix): try: shutil.rmtree(os.path.join(parent, item)) - except Exception: # pylint: disable=broad-except + except Exception: # pylint: disable=broad-except print("Failed to remove", item) tmpdir = tempfile.mkdtemp(prefix=prefix) @@ -64,6 +72,7 @@ def server(): time.sleep(1) yield p4port + def store_server(repo, to_zip): """Zip up a server to use as a unit test fixture""" serverRoot = repo.info()['serverRoot'] @@ -75,6 +84,7 @@ def store_server(repo, to_zip): abs_path = os.path.join(root, filename) archive.write(abs_path, os.path.relpath(abs_path, serverRoot)) + def test_server_fixture(capsys, server): """Check that tests can start and connect to a local perforce server""" with capsys.disabled(): @@ -93,7 +103,9 @@ def test_server_fixture(capsys, server): # Validate contents of server fixture @HEAD depotfiles = [info['depotFile'] for info in repo.perforce.run_files('//...')] - depotfile_to_content = {depotfile: repo.perforce.run_print(depotfile)[1] for depotfile in depotfiles} + depotfile_to_content = { + depotfile: repo.perforce.run_print(depotfile)[1] for depotfile in depotfiles + } assert depotfile_to_content == { "//depot/file.txt": "Hello World\n", "//stream-depot/main/file.txt": "Hello Stream World\n", @@ -103,68 +115,72 @@ def test_server_fixture(capsys, server): # Check submitted changes submitted_changes = repo.perforce.run_changes('-s', 'submitted') - submitted_changeinfo = {change["change"]: repo.perforce.run_describe(change["change"])[0] for change in submitted_changes} + submitted_changeinfo = { + change["change"]: repo.perforce.run_describe(change["change"])[0] + for change in submitted_changes + } # Filter info to only contain relevant keys for submitted changes submitted_changeinfo = { - change: {key: info.get(key) - for key in ['depotFile', 'desc', 'action']} - for change, info in submitted_changeinfo.items() + change: {key: info.get(key) for key in ['depotFile', 'desc', 'action']} + for change, info in submitted_changeinfo.items() } assert submitted_changeinfo == { - '1' :{ + '1': { 'action': ['add'], 'depotFile': ['//depot/file.txt'], - 'desc': 'Initial Commit' + 'desc': 'Initial Commit', }, - '2' :{ + '2': { 'action': ['add'], 'depotFile': ['//stream-depot/main/file.txt'], - 'desc': 'Initial Commit to Stream\n' + 'desc': 'Initial Commit to Stream\n', }, - '6' :{ + '6': { 'action': ['edit'], 'depotFile': ['//depot/file.txt'], - 'desc': 'modify //depot/file.txt\n' + 'desc': 'modify //depot/file.txt\n', }, '7': { 'action': ['branch'], 'depotFile': ['//stream-depot/dev/file.txt'], - 'desc': 'Copy files from //stream-depot/main to //stream-depot/dev\n' + 'desc': 'Copy files from //stream-depot/main to //stream-depot/dev\n', }, '8': { 'action': ['edit'], 'depotFile': ['//stream-depot/dev/file.txt'], - 'desc': 'Update contents of //stream-depot/dev/file.txt\n' + 'desc': 'Update contents of //stream-depot/dev/file.txt\n', }, '9': { 'action': ['add'], 'depotFile': ['//stream-depot/main/file_2.txt'], - 'desc': 'file_2.txt - exists in main but not dev\n' - } + 'desc': 'file_2.txt - exists in main but not dev\n', + }, } # Check shelved changes shelved_changes = repo.perforce.run_changes('-s', 'pending') - shelved_changeinfo = {change["change"]: repo.perforce.run_describe('-S', change["change"])[0] for change in shelved_changes} + shelved_changeinfo = { + change["change"]: repo.perforce.run_describe('-S', change["change"])[0] + for change in shelved_changes + } # Filter info to only contain relevant keys for submitted changes shelved_changeinfo = { - change: {key: info.get(key) - for key in ['depotFile', 'desc', 'action']} - for change, info in shelved_changeinfo.items() + change: {key: info.get(key) for key in ['depotFile', 'desc', 'action']} + for change, info in shelved_changeinfo.items() } assert shelved_changeinfo == { - '3' :{ + '3': { 'action': ['edit'], 'depotFile': ['//depot/file.txt'], 'desc': 'Modify file in shelved change\n', # Change content from 'Hello World\n' to 'Goodbye World\n' }, - '4' :{ + '4': { 'action': ['delete'], 'depotFile': ['//depot/file.txt'], 'desc': 'Delete file in shelved change\n', }, - '5' :{ + '5': { 'action': ['add'], 'depotFile': ['//depot/newfile.txt'], 'desc': 'Add file in shelved change\n', @@ -174,14 +190,11 @@ def test_server_fixture(capsys, server): labels = repo.perforce.run_labels() # Filter info to only contain relevant keys labelinfo = { - label.get('label'): {key: label.get(key) - for key in ['Revision'] - } + label.get('label'): {key: label.get(key) for key in ['Revision']} for label in labels } - assert labelinfo == { - 'my-label': {'Revision': '@2'} - } + assert labelinfo == {'my-label': {'Revision': '@2'}} + def test_head(server, tmpdir): """Test resolve of HEAD changelist""" @@ -193,10 +206,15 @@ def test_head(server, tmpdir): assert repo.head() == "@8", "Unexpected HEAD revision for stream" repo = P4Repo(root=tmpdir, stream='//stream-depot/idontexist') - with pytest.raises(Exception, match=r"Stream '//stream-depot/idontexist' doesn't exist."): + with pytest.raises( + Exception, match=r"Stream '//stream-depot/idontexist' doesn't exist." + ): repo.head() - assert repo.head_at_revision("@my-label") == "2", "Unexpected HEAD revision for label" + assert ( + repo.head_at_revision("@my-label") == "2" + ), "Unexpected HEAD revision for label" + def test_checkout(server, tmpdir): """Test normal flow of checking out files""" @@ -204,8 +222,9 @@ def test_checkout(server, tmpdir): assert os.listdir(tmpdir) == [], "Workspace should be empty" repo.sync() - assert sorted(os.listdir(tmpdir)) == sorted([ - "file.txt", "p4config"]), "Workspace sync not as expected" + assert sorted(os.listdir(tmpdir)) == sorted( + ["file.txt", "p4config"] + ), "Workspace sync not as expected" with open(os.path.join(tmpdir, "file.txt")) as content: assert content.read() == "Hello World\n", "Unexpected content in workspace file" @@ -214,7 +233,10 @@ def test_checkout(server, tmpdir): # Validate p4config with open(os.path.join(tmpdir, "p4config")) as content: - assert "P4PORT=%s\n" % repo.perforce.port in content.readlines(), "Unexpected p4config content" + assert ( + "P4PORT=%s\n" % repo.perforce.port in content.readlines() + ), "Unexpected p4config content" + def test_checkout_partial_path(server, tmpdir): """Test checking out a subset of view with one path""" @@ -222,18 +244,21 @@ def test_checkout_partial_path(server, tmpdir): repo.sync() assert 'file.txt' in os.listdir(tmpdir) + def test_checkout_partial_dir(server, tmpdir): """Test checking out a subset of view with one directory""" repo = P4Repo(root=tmpdir, sync=['//depot/...']) repo.sync() assert 'file.txt' in os.listdir(tmpdir) + def test_checkout_partial_multiple(server, tmpdir): """Test checking out a subset of view with multiple paths""" repo = P4Repo(root=tmpdir, sync=['//depot/fake-dir/...', '//depot/file.txt']) repo.sync() assert 'file.txt' in os.listdir(tmpdir) + def test_checkout_stream(server, tmpdir): """Test checking out a stream depot""" repo = P4Repo(root=tmpdir, stream='//stream-depot/main') @@ -241,7 +266,10 @@ def test_checkout_stream(server, tmpdir): assert os.listdir(tmpdir) == [], "Workspace should be empty" repo.sync() with open(os.path.join(tmpdir, "file.txt")) as content: - assert content.read() == "Hello Stream World\n", "Unexpected content in workspace file" + assert ( + content.read() == "Hello Stream World\n" + ), "Unexpected content in workspace file" + def test_checkout_label(server, tmpdir): """Test checking out at a specific label""" @@ -254,27 +282,34 @@ def test_checkout_label(server, tmpdir): with open(os.path.join(tmpdir, "file.txt")) as content: assert content.read() == "Hello World\n", "Unexpected content in workspace file" + def test_readonly_client(server, tmpdir): """Test creation of a readonly client""" repo = P4Repo(root=tmpdir, client_type='readonly') repo.sync() assert "file.txt" in os.listdir(tmpdir), "Workspace file was not synced" + def test_partitioned_client(server, tmpdir): """Test creation of a partitioned client""" repo = P4Repo(root=tmpdir, client_type='partitioned') repo.sync() assert "file.txt" in os.listdir(tmpdir), "Workspace file was not synced" + def test_modify_client_type(server, tmpdir): """Test modifying a clients type""" repo = P4Repo(root=tmpdir, client_type='writeable') repo.sync() - with pytest.raises(Exception, match=r'Client storage type cannot be changed after client is created'): + with pytest.raises( + Exception, + match=r'Client storage type cannot be changed after client is created', + ): repo = P4Repo(root=tmpdir, client_type='readonly') repo.sync() + def test_workspace_recovery(server, tmpdir): """Test that we can detect and recover from various workspace snafus""" repo = P4Repo(root=tmpdir) @@ -283,7 +318,7 @@ def test_workspace_recovery(server, tmpdir): # partially synced writeable files may be left in the workspace if a machine was shutdown mid-sync with open(os.path.join(tmpdir, "file.txt"), 'w') as depotfile: depotfile.write("Overwrite this file") - repo.sync() # By default, would raise 'cannot clobber writable file' + repo.sync() # By default, would raise 'cannot clobber writable file' with open(os.path.join(tmpdir, "file.txt")) as content: assert content.read() == "Hello World\n", "Unexpected content in workspace file" @@ -291,15 +326,18 @@ def test_workspace_recovery(server, tmpdir): os.remove(os.path.join(tmpdir, "file.txt")) open(os.path.join(tmpdir, "added.txt"), 'a').close() repo.clean() - assert sorted(os.listdir(tmpdir)) == sorted([ - "file.txt", "p4config"]), "Failed to restore workspace file with repo.clean()" + assert sorted(os.listdir(tmpdir)) == sorted( + ["file.txt", "p4config"] + ), "Failed to restore workspace file with repo.clean()" os.remove(os.path.join(tmpdir, "file.txt")) os.remove(os.path.join(tmpdir, "p4config")) - repo = P4Repo(root=tmpdir) # Open a fresh tmpdir, as if this was a different job - repo.sync() # Normally: "You already have file.txt", but since p4config is missing it will restore the workspace - assert sorted(os.listdir(tmpdir)) == sorted([ - "file.txt", "p4config"]), "Failed to restore corrupt workspace due to missing p4config" + repo = P4Repo(root=tmpdir) # Open a fresh tmpdir, as if this was a different job + repo.sync() # Normally: "You already have file.txt", but since p4config is missing it will restore the workspace + assert sorted(os.listdir(tmpdir)) == sorted( + ["file.txt", "p4config"] + ), "Failed to restore corrupt workspace due to missing p4config" + def test_p4print_unshelve(server, tmpdir): """Test unshelving a pending changelist by p4printing content into a file""" @@ -308,36 +346,42 @@ def test_p4print_unshelve(server, tmpdir): with open(os.path.join(tmpdir, "file.txt")) as content: assert content.read() == "Hello World\n", "Unexpected content in workspace file" - repo.p4print_unshelve('3') # Modify a file + repo.p4print_unshelve('3') # Modify a file with open(os.path.join(tmpdir, "file.txt")) as content: - assert content.read() == "Goodbye World\n", "Unexpected content in workspace file" + assert ( + content.read() == "Goodbye World\n" + ), "Unexpected content in workspace file" - repo.p4print_unshelve('4') # Delete a file + repo.p4print_unshelve('4') # Delete a file assert not os.path.exists(os.path.join(tmpdir, "file.txt")) - repo.p4print_unshelve('5') # Add a file + repo.p4print_unshelve('5') # Add a file assert os.path.exists(os.path.join(tmpdir, "newfile.txt")) - with pytest.raises(Exception, match=r'Changelist 999 does not contain any shelved files.'): + with pytest.raises( + Exception, match=r'Changelist 999 does not contain any shelved files.' + ): repo.p4print_unshelve('999') - assert len(repo._read_patched()) == 2 # changes to file.txt and newfile.txt + assert len(repo._read_patched()) == 2 # changes to file.txt and newfile.txt # Unshelved changes are removed in following syncs repo.sync() with open(os.path.join(tmpdir, "file.txt")) as content: assert content.read() == "Hello World\n", "Unexpected content in workspace file" - assert not os.path.exists(os.path.join(tmpdir, "newfile.txt")), "File unshelved for add was not deleted" + assert not os.path.exists( + os.path.join(tmpdir, "newfile.txt") + ), "File unshelved for add was not deleted" # Shelved changes containing files not selected for sync are skipped repo = P4Repo(root=tmpdir, sync=['//depot/fake-dir/...']) repo.sync() - repo.p4print_unshelve('3') # Modify file.txt + repo.p4print_unshelve('3') # Modify file.txt assert not os.path.exists(os.path.join(tmpdir, "file.txt")) # Shelved changes containing files not mapped into this workspace do not throw an exception repo = P4Repo(root=tmpdir, stream='//stream-depot/main') - repo.p4print_unshelve('3') # Modify a file + repo.p4print_unshelve('3') # Modify a file def copytree(src, dst): @@ -350,6 +394,7 @@ def copytree(src, dst): else: shutil.copy2(s, d) + def test_client_migration(server, tmpdir): """Test re-use of workspace data when moved to another host""" repo = P4Repo(root=tmpdir) @@ -362,37 +407,46 @@ def test_client_migration(server, tmpdir): copytree(tmpdir, second_client) # Client names include path on disk, so this creates a new unique client repo = P4Repo(root=second_client) - synced = repo.sync() # Flushes to match previous client, since p4config is there on disk + synced = ( + repo.sync() + ) # Flushes to match previous client, since p4config is there on disk assert synced == [], "Should not have synced any files in second client" + def test_stream_switching(server, tmpdir): """Test stream-switching within the same depot""" repo = P4Repo(root=tmpdir, stream='//stream-depot/main') synced = repo.sync() assert len(synced) > 0, "Didn't sync any files" - assert set(os.listdir(tmpdir)) == set([ - "file.txt", "file_2.txt", "p4config"]) + assert set(os.listdir(tmpdir)) == set(["file.txt", "file_2.txt", "p4config"]) with open(os.path.join(tmpdir, "file.txt")) as content: - assert content.read() == "Hello Stream World\n", "Unexpected content in workspace file" + assert ( + content.read() == "Hello Stream World\n" + ), "Unexpected content in workspace file" # Re-use the same checkout directory, but switch streams repo = P4Repo(root=tmpdir, stream='//stream-depot/dev') repo.sync() assert len(synced) > 0, "Didn't sync any files" - assert set(os.listdir(tmpdir)) == set([ - "file.txt", "p4config"]) # file_2.txt was de-synced + assert set(os.listdir(tmpdir)) == set( + ["file.txt", "p4config"] + ) # file_2.txt was de-synced with open(os.path.join(tmpdir, "file.txt")) as content: - assert content.read() == "Hello Stream World (dev)\n", "Unexpected content in workspace file" + assert ( + content.read() == "Hello Stream World (dev)\n" + ), "Unexpected content in workspace file" + def test_stream_switching_migration(server, tmpdir): """Test stream-switching and client migration simultaneously""" repo = P4Repo(root=tmpdir, stream='//stream-depot/main') synced = repo.sync() assert len(synced) > 0, "Didn't sync any files" - assert set(os.listdir(tmpdir)) == set([ - "file.txt", "file_2.txt", "p4config"]) + assert set(os.listdir(tmpdir)) == set(["file.txt", "file_2.txt", "p4config"]) with open(os.path.join(tmpdir, "file.txt")) as content: - assert content.read() == "Hello Stream World\n", "Unexpected content in workspace file" + assert ( + content.read() == "Hello Stream World\n" + ), "Unexpected content in workspace file" with tempfile.TemporaryDirectory(prefix="bk-p4-test-") as second_client: copytree(tmpdir, second_client) @@ -401,15 +455,20 @@ def test_stream_switching_migration(server, tmpdir): repo = P4Repo(root=second_client, stream='//stream-depot/dev') repo.sync() assert len(synced) > 0, "Didn't sync any files" - assert set(os.listdir(second_client)) == set([ - "file.txt", "p4config"]) # file_2.txt was de-synced + assert set(os.listdir(second_client)) == set( + ["file.txt", "p4config"] + ) # file_2.txt was de-synced with open(os.path.join(second_client, "file.txt")) as content: - assert content.read() == "Hello Stream World (dev)\n", "Unexpected content in workspace file" + assert ( + content.read() == "Hello Stream World (dev)\n" + ), "Unexpected content in workspace file" + # fingerprint here matches to the cert in the test fixture directory, and you can check that with # P4SSLDIR=$(pwd)/python/fixture/insecure-ssl p4d -Gf __LEGIT_P4_FINGERPRINT__ = '7A:10:F6:00:95:87:5B:2E:D4:33:AB:44:42:05:85:94:1C:93:2E:A2' + def test_fingerprint_good(server, tmpdir): """Test supplying the correct fingerprint""" os.environ['P4TRUST'] = os.path.join(tmpdir, 'trust.txt') @@ -418,6 +477,7 @@ def test_fingerprint_good(server, tmpdir): synced = repo.sync() assert len(synced) > 0, "Didn't sync any files" + def test_fingerprint_bad(server, tmpdir): """Test supplying an incorrect fingerprint""" os.environ['P4TRUST'] = os.path.join(tmpdir, 'trust.txt') @@ -426,13 +486,14 @@ def test_fingerprint_bad(server, tmpdir): with pytest.raises(Exception, match=r"The authenticity of '.+' can't be established"): repo.sync() + def test_fingerprint_changed(server, tmpdir): """Test updating a fingerprint""" os.environ['P4TRUST'] = os.path.join(tmpdir, 'trust.txt') repo = P4Repo(root=tmpdir, fingerprint='FF:FF:FF:FF:FF:FF:FF:FF:FF:FF:FF:FF:FF:FF:FF:FF:FF:FF:FF:FF') with pytest.raises(Exception, match=r"The authenticity of '.*' can't be established"): - repo.sync() + repo.sync() repo = P4Repo(root=tmpdir, fingerprint=__LEGIT_P4_FINGERPRINT__) synced = repo.sync()