diff --git a/.github/workflows/check.yml b/.github/workflows/check.yml index 80e5239..4df9c12 100644 --- a/.github/workflows/check.yml +++ b/.github/workflows/check.yml @@ -10,7 +10,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ['3.8', '3.9', '3.10', '3.11', '3.12', '3.13'] + python-version: ['3.10', '3.11', '3.12', '3.13', '3.14'] steps: - uses: actions/checkout@v4 diff --git a/config.py b/config.py index 4f348e5..0a9832b 100644 --- a/config.py +++ b/config.py @@ -1,26 +1,26 @@ # Path of the Zope instance configuration to use to instantiate the application # object -conf_path = '/var/lib/zope4/ema/etc/zope.conf' +conf_path = "/var/lib/zope4/ema/etc/zope.conf" # Path to Data.fs which is needed for lookup of object IDs from transaction IDs # with zodbsync watch -datafs_path = '/var/lib/zope4/zeo/var/Data.fs' +datafs_path = "/var/lib/zope4/zeo/var/Data.fs" # user that is used to create commits -manager_user = 'perfact' +manager_user = "perfact" # create the manager user with a default password if not present create_manager_user = True # sets the default owner for objects that have no owner in the file system # representation -default_owner = 'perfact' +default_owner = "perfact" # use default owner even if we're told otherwise by meta file force_default_owner = False # Base directory of the repository -base_dir = '/opt/perfact/dbutils-zoperepo' +base_dir = "/opt/perfact/dbutils-zoperepo" # default settings for git repos commit_name = "Zope Developer" @@ -28,8 +28,8 @@ commit_message = "Generic commit message." # email address to send commit summaries of default commits to -#codechange_mail = "zope-devel@example.de" -#codechange_sender = "no-reply-zodbsync-changes@example.de" +# codechange_mail = "zope-devel@example.de" +# codechange_sender = "no-reply-zodbsync-changes@example.de" # Path to script which is called to define the phases of playback to be # executed. diff --git a/perfact/__init__.py b/perfact/__init__.py index 3ad9513..b36383a 100644 --- a/perfact/__init__.py +++ b/perfact/__init__.py @@ -1,2 +1,3 @@ from pkgutil import extend_path + __path__ = extend_path(__path__, __name__) diff --git a/perfact/zodbsync/__init__.py b/perfact/zodbsync/__init__.py index 4e0f3f2..c1fc3bc 100644 --- a/perfact/zodbsync/__init__.py +++ b/perfact/zodbsync/__init__.py @@ -1,11 +1,11 @@ -from .zodbsync import mod_read, mod_write from .extedit import launch as extedit_launch -from .helpers import obj_modtime, db_modtime +from .helpers import db_modtime, obj_modtime +from .zodbsync import mod_read, mod_write __all__ = [ - 'mod_read', - 'mod_write', - 'obj_modtime', - 'db_modtime', - 'extedit_launch', + "mod_read", + "mod_write", + "obj_modtime", + "db_modtime", + "extedit_launch", ] diff --git a/perfact/zodbsync/commands/checkout.py b/perfact/zodbsync/commands/checkout.py index d687053..2c3b808 100644 --- a/perfact/zodbsync/commands/checkout.py +++ b/perfact/zodbsync/commands/checkout.py @@ -4,48 +4,54 @@ class Checkout(SubCommand): - '''Switch to another branch''' + """Switch to another branch""" + @staticmethod def add_args(parser): parser.add_argument( - '--skip-errors', action='store_true', default=False, - help='Skip failed objects and continue', - ) - parser.add_argument( - '--dry-run', action='store_true', default=False, - help='Only check for conflicts and roll back at the end.', + "--skip-errors", + action="store_true", + default=False, + help="Skip failed objects and continue", ) parser.add_argument( - '-b', action='store_true', default=False, - help='Create branch.', + "--dry-run", + action="store_true", + default=False, + help="Only check for conflicts and roll back at the end.", ) parser.add_argument( - '-t', '--track', type=str, help='Set up upstream configuration.' + "-b", + action="store_true", + default=False, + help="Create branch.", ) parser.add_argument( - '--reset', type=str, - help='Reset branch onto given commit.', + "-t", "--track", type=str, help="Set up upstream configuration." ) parser.add_argument( - '--rebase', type=str, - help='Rebase branch onto given commit.', + "--reset", + type=str, + help="Reset branch onto given commit.", ) parser.add_argument( - 'branch', type=str, - help='''Branch name''' + "--rebase", + type=str, + help="Rebase branch onto given commit.", ) + parser.add_argument("branch", type=str, help="""Branch name""") @SubCommand.gitexec def run(self): - self.logger.info('Checking out %s.' % self.args.branch) - cmd = ['checkout'] + self.logger.info("Checking out %s." % self.args.branch) + cmd = ["checkout"] if self.args.b: - cmd.append('-b') + cmd.append("-b") cmd.append(self.args.branch) if self.args.b and self.args.track: - cmd.extend(['--track', self.args.track]) + cmd.extend(["--track", self.args.track]) self.gitcmd_run(*cmd) if self.args.reset: - self.gitcmd_run('reset', '--hard', self.args.reset) + self.gitcmd_run("reset", "--hard", self.args.reset) if self.args.rebase: - self.gitcmd_run('rebase', self.args.rebase) + self.gitcmd_run("rebase", self.args.rebase) diff --git a/perfact/zodbsync/commands/execute.py b/perfact/zodbsync/commands/execute.py index 01abb47..5894dde 100644 --- a/perfact/zodbsync/commands/execute.py +++ b/perfact/zodbsync/commands/execute.py @@ -6,25 +6,30 @@ class Exec(SubCommand): - '''Execute a command and play back any paths changed between old and new - HEAD''' + """Execute a command and play back any paths changed between old and new + HEAD""" + @staticmethod def add_args(parser): parser.add_argument( - '--skip-errors', action='store_true', default=False, - help='Skip failed objects and continue', - ) - parser.add_argument( - '--dry-run', action='store_true', default=False, - help='Only check for conflicts and roll back at the end.', + "--skip-errors", + action="store_true", + default=False, + help="Skip failed objects and continue", ) parser.add_argument( - '--nocd', action='store_true', default=False, - help='Do not cd to git repo for command', + "--dry-run", + action="store_true", + default=False, + help="Only check for conflicts and roll back at the end.", ) parser.add_argument( - 'cmd', type=str, help='''command to be executed''' + "--nocd", + action="store_true", + default=False, + help="Do not cd to git repo for command", ) + parser.add_argument("cmd", type=str, help="""command to be executed""") @SubCommand.gitexec def run(self): diff --git a/perfact/zodbsync/commands/fastforward.py b/perfact/zodbsync/commands/fastforward.py index 1e7bf5d..7403d0f 100644 --- a/perfact/zodbsync/commands/fastforward.py +++ b/perfact/zodbsync/commands/fastforward.py @@ -4,26 +4,28 @@ class FF(SubCommand): - ''' + """ Perform a fast-forward merge to the target commit and apply changed paths - ''' + """ + @staticmethod def add_args(parser): parser.add_argument( - '--skip-errors', action='store_true', default=False, - help='Skip failed objects and continue', - ) - parser.add_argument( - '--dry-run', action='store_true', default=False, - help='Only check for conflicts and roll back at the end.', + "--skip-errors", + action="store_true", + default=False, + help="Skip failed objects and continue", ) parser.add_argument( - 'commit', type=str, - help='''Target commit''' + "--dry-run", + action="store_true", + default=False, + help="Only check for conflicts and roll back at the end.", ) + parser.add_argument("commit", type=str, help="""Target commit""") @SubCommand.gitexec def run(self): target = self.args.commit - self.logger.info('Attempting fast-forward merge to %s.' % target) - self.gitcmd_run('merge', '--ff-only', target) + self.logger.info("Attempting fast-forward merge to %s." % target) + self.gitcmd_run("merge", "--ff-only", target) diff --git a/perfact/zodbsync/commands/freeze.py b/perfact/zodbsync/commands/freeze.py index 560166d..5973fe9 100644 --- a/perfact/zodbsync/commands/freeze.py +++ b/perfact/zodbsync/commands/freeze.py @@ -5,12 +5,15 @@ class Freeze(SubCommand): - '''Mark paths as frozen and record them''' + """Mark paths as frozen and record them""" + @staticmethod def add_args(parser): parser.add_argument( - 'path', type=str, nargs='*', - help='Sub-Path in Data.fs to be frozen', + "path", + type=str, + nargs="*", + help="Sub-Path in Data.fs to be frozen", ) @SubCommand.with_lock @@ -18,6 +21,6 @@ def run(self): for path in self.args.path: fullpath = self.sync.fs_path(path) os.makedirs(fullpath, exist_ok=True) - with open('{}/__frozen__'.format(fullpath), 'w'): + with open("{}/__frozen__".format(fullpath), "w"): pass self.sync.record(paths=self.args.path, recurse=True) diff --git a/perfact/zodbsync/commands/layer_init.py b/perfact/zodbsync/commands/layer_init.py index 1758495..cece8d4 100644 --- a/perfact/zodbsync/commands/layer_init.py +++ b/perfact/zodbsync/commands/layer_init.py @@ -8,29 +8,30 @@ class LayerInit(SubCommand): """Register layers from source to work_dir, assuming objects are already in the Data.FS, but are now to be provided by a new layer.""" - subcommand = 'layer-init' + + subcommand = "layer-init" @staticmethod def add_args(parser): parser.add_argument( - 'ident', type=str, nargs='*', - help='Layer identifier(s). May be * for all', + "ident", + type=str, + nargs="*", + help="Layer identifier(s). May be * for all", ) @SubCommand.with_lock def run(self): - layers = {layer['ident']: layer - for layer in self.sync.layers - if layer['ident']} + layers = {layer["ident"]: layer for layer in self.sync.layers if layer["ident"]} idents = self.args.ident - if idents == ['*']: + if idents == ["*"]: idents = layers.keys() for ident in idents: assert ident in layers, "Invalid ident" for ident in idents: layer = layers[ident] - source = layer['source'] - target = layer['workdir'] + source = layer["source"] + target = layer["workdir"] self.unpack_source(source, target) - sp.run(['git', 'add', '.'], cwd=target) - sp.run(['git', 'commit', '-m', 'zodbsync layer-init'], cwd=target) + sp.run(["git", "add", "."], cwd=target) + sp.run(["git", "commit", "-m", "zodbsync layer-init"], cwd=target) diff --git a/perfact/zodbsync/commands/layer_update.py b/perfact/zodbsync/commands/layer_update.py index d20b432..f5c51fc 100644 --- a/perfact/zodbsync/commands/layer_update.py +++ b/perfact/zodbsync/commands/layer_update.py @@ -8,91 +8,94 @@ class LayerUpdate(SubCommand): """Update layers.""" - subcommand = 'layer-update' + + subcommand = "layer-update" @staticmethod def add_args(parser): parser.add_argument( - '--dry-run', action='store_true', default=False, - help='Only check for conflicts and roll back at the end.', + "--dry-run", + action="store_true", + default=False, + help="Only check for conflicts and roll back at the end.", ) parser.add_argument( - '--skip-errors', action='store_true', + "--skip-errors", + action="store_true", help="Skip failed objects and continue", - default=False + default=False, ) parser.add_argument( - '--message', '-m', type=str, default='zodbsync layer-update', + "--message", + "-m", + type=str, + default="zodbsync layer-update", help="Commit message base", ) parser.add_argument( - 'ident', type=str, nargs='*', - help='Layer identifier(s). May be * for all', + "ident", + type=str, + nargs="*", + help="Layer identifier(s). May be * for all", ) def commit_all(self, target, msg): """Commit all unstaged changes in target, returning the commit ID or None if there is no change.""" - sp.run(['git', 'add', '.'], cwd=target) - if sp.run(['git', 'commit', '-m', msg], cwd=target).returncode == 0: - return sp.check_output(['git', 'rev-parse', 'HEAD'], - cwd=target, text=True).strip() + sp.run(["git", "add", "."], cwd=target) + if sp.run(["git", "commit", "-m", msg], cwd=target).returncode == 0: + return sp.check_output( + ["git", "rev-parse", "HEAD"], cwd=target, text=True + ).strip() def run_layer(self, layer): """ For given layer, commit any unstaged changes, update work_dir from source, commit that and play back any changes. """ - source = layer['source'] - target = layer['workdir'] + source = layer["source"] + target = layer["workdir"] msg = self.args.message - precommit = self.commit_all(target, f'{msg} (pre)') + precommit = self.commit_all(target, f"{msg} (pre)") self.unpack_source(source, target) changes = [ - line[3:] for line in sp.check_output( - ['git', 'status', '--porcelain', '-u', '--no-renames'], + line[3:] + for line in sp.check_output( + ["git", "status", "--porcelain", "-u", "--no-renames"], cwd=target, text=True, - ).split('\n') + ).split("\n") if line ] commit = None if changes: commit = self.commit_all(target, msg) - self.restore[layer['ident']] = (precommit, commit) + self.restore[layer["ident"]] = (precommit, commit) return { - os.path.dirname(line[len('__root__'):]) + os.path.dirname(line[len("__root__") :]) for line in changes - if line.startswith('__root__/') + if line.startswith("__root__/") } def restore_layer(self, layer): """ Restore layer for dry-run or in case of failure """ - (precommit, commit) = self.restore[layer['ident']] - target = layer['workdir'] + precommit, commit = self.restore[layer["ident"]] + target = layer["workdir"] if commit: - sp.run( - ['git', 'reset', '--hard', f'{commit}~'], - cwd=target, check=True - ) + sp.run(["git", "reset", "--hard", f"{commit}~"], cwd=target, check=True) if precommit: - sp.run( - ['git', '-reset', f'{precommit}~'], - cwd=target, check=True - ) + sp.run(["git", "-reset", f"{precommit}~"], cwd=target, check=True) @SubCommand.with_lock def run(self): "Process given layers" self.restore = {} # Info for restoring for dry-run paths = set() - layers = {layer['ident']: layer - for layer in self.sync.layers - if layer['ident']} + layers = {layer["ident"]: layer for layer in self.sync.layers if layer["ident"]} idents = self.args.ident - if idents == ['*']: + if idents == ["*"]: idents = layers.keys() for ident in idents: assert ident in layers, "Invalid ident" @@ -122,10 +125,9 @@ def run(self): for ident in idents: self.restore_layer(layers[ident]) else: - self.sync.record(paths, recurse=False, skip_errors=True, - ignore_removed=True) + self.sync.record( + paths, recurse=False, skip_errors=True, ignore_removed=True + ) for path in paths: - if self.sync.fs_pathinfo(path)['layeridx'] == 0: - self.logger.warning( - 'Conflict with object in custom layer: ' + path - ) + if self.sync.fs_pathinfo(path)["layeridx"] == 0: + self.logger.warning("Conflict with object in custom layer: " + path) diff --git a/perfact/zodbsync/commands/pick.py b/perfact/zodbsync/commands/pick.py index cfe084b..390b3f8 100644 --- a/perfact/zodbsync/commands/pick.py +++ b/perfact/zodbsync/commands/pick.py @@ -4,66 +4,77 @@ class Pick(SubCommand): - '''Cherry-pick commits, apply them and play back affected objects''' + """Cherry-pick commits, apply them and play back affected objects""" + @staticmethod def add_args(parser): parser.add_argument( - '--skip-errors', action='store_true', default=False, - help='Skip failed objects and continue', + "--skip-errors", + action="store_true", + default=False, + help="Skip failed objects and continue", ) parser.add_argument( - '--dry-run', action='store_true', default=False, - help='Only check for conflicts and roll back at the end.', + "--dry-run", + action="store_true", + default=False, + help="Only check for conflicts and roll back at the end.", ) parser.add_argument( - '--grep', type=str, help="""Find commits starting from the given + "--grep", + type=str, + help="""Find commits starting from the given ones, limiting to those with commit messages matching the pattern - like "git log --grep".""", ) parser.add_argument( - '--since', type=str, help="""Find commits since the given timestamp + "--since", + type=str, + help="""Find commits since the given timestamp - like "git log --since".""", ) parser.add_argument( - '--until', type=str, help="""Find commits until the given timestamp + "--until", + type=str, + help="""Find commits until the given timestamp - like "git log --until".""", ) parser.add_argument( - 'commit', type=str, nargs='*', - help='''Commits that are checked for compatibility and applied, - playing back all affected paths at the end.''' + "commit", + type=str, + nargs="*", + help="""Commits that are checked for compatibility and applied, + playing back all affected paths at the end.""", ) @SubCommand.gitexec def run(self): commits = [] if self.args.grep or self.args.since or self.args.until: - cmd = [ - 'log', '--format=%H', '--reverse' - ] + cmd = ["log", "--format=%H", "--reverse"] if self.args.grep: - cmd.extend(['--grep', self.args.grep]) + cmd.extend(["--grep", self.args.grep]) if self.args.since: - cmd.extend(['--since', self.args.since]) + cmd.extend(["--since", self.args.since]) if self.args.until: - cmd.extend(['--until', self.args.until]) - commits = self.gitcmd_output( - *cmd, *self.args.commit - ).split('\n') + cmd.extend(["--until", self.args.until]) + commits = self.gitcmd_output(*cmd, *self.args.commit).split("\n") else: for commit in self.args.commit: - if '..' in commit: + if ".." in commit: # commit range - commits.extend(self.gitcmd_output( - 'log', '--format=%H', '--reverse', commit - ).split('\n')) + commits.extend( + self.gitcmd_output( + "log", "--format=%H", "--reverse", commit + ).split("\n") + ) else: commits.append(commit) for commit in commits: if not commit: continue - self.logger.info('Checking and applying %s.' % commit) + self.logger.info("Checking and applying %s." % commit) # capture output and discard so we don't clutter stdout # Python 2 has no subprocess.DEVNULL. - self.gitcmd_output('cherry-pick', '--strategy', 'resolve', commit) + self.gitcmd_output("cherry-pick", "--strategy", "resolve", commit) diff --git a/perfact/zodbsync/commands/playback.py b/perfact/zodbsync/commands/playback.py index 98e7269..c785ba1 100644 --- a/perfact/zodbsync/commands/playback.py +++ b/perfact/zodbsync/commands/playback.py @@ -4,33 +4,42 @@ class Playback(SubCommand): - '''Play back objects from the file system to the Data.FS''' + """Play back objects from the file system to the Data.FS""" + @staticmethod def add_args(parser): parser.add_argument( - '--override', '-o', action='store_true', - help='Override object type changes when uploading', - default=False + "--override", + "-o", + action="store_true", + help="Override object type changes when uploading", + default=False, ) parser.add_argument( - '--no-recurse', action='store_true', - help='''Only upload metadata, do not remove elements or recurse. + "--no-recurse", + action="store_true", + help="""Only upload metadata, do not remove elements or recurse. Note: If a path no longer present on the file system is given, it - is still removed.''', - default=False + is still removed.""", + default=False, ) parser.add_argument( - '--skip-errors', action='store_true', + "--skip-errors", + action="store_true", help="Skip failed objects and continue", - default=False + default=False, ) parser.add_argument( - '--dry-run', action='store_true', default=False, - help='Roll back at the end.', + "--dry-run", + action="store_true", + default=False, + help="Roll back at the end.", ) parser.add_argument( - 'path', type=str, nargs='*', - help='Sub-Path in Data.fs to be played back', + "path", + type=str, + nargs="*", + help="Sub-Path in Data.fs to be played back", ) @SubCommand.with_lock diff --git a/perfact/zodbsync/commands/record.py b/perfact/zodbsync/commands/record.py index 76f3ab9..a934c96 100644 --- a/perfact/zodbsync/commands/record.py +++ b/perfact/zodbsync/commands/record.py @@ -1,8 +1,8 @@ #!/usr/bin/env python +import argparse import smtplib import subprocess -import argparse from email.mime.text import MIMEText from ..subcommand import SubCommand @@ -10,33 +10,45 @@ class Record(SubCommand): - '''Record objects from the Data.FS to the file system''' + """Record objects from the Data.FS to the file system""" + @staticmethod def add_args(parser): parser.add_argument( - '--lasttxn', action='store_true', default=False, - help='Add paths mentioned in transactions since the last used', + "--lasttxn", + action="store_true", + default=False, + help="Add paths mentioned in transactions since the last used", ) parser.add_argument( - '--commit', action='store_true', default=False, - help='Commit changes and send summary mail if there are any', + "--commit", + action="store_true", + default=False, + help="Commit changes and send summary mail if there are any", ) parser.add_argument( - '--autoreset', action='store_true', default=False, - help='Automatically reset changes after sending mail for --commit', + "--autoreset", + action="store_true", + default=False, + help="Automatically reset changes after sending mail for --commit", ) parser.add_argument( - '--no-recurse', action='store_true', default=False, - help='Record only specified paths without recursing', + "--no-recurse", + action="store_true", + default=False, + help="Record only specified paths without recursing", ) parser.add_argument( - '--skip-errors', action='store_true', + "--skip-errors", + action="store_true", help="Skip failed objects and continue", - default=False + default=False, ) parser.add_argument( - 'path', type=str, nargs='*', - help='Sub-Path in Data.fs to be recorded', + "path", + type=str, + nargs="*", + help="Sub-Path in Data.fs to be recorded", ) def commit(self): @@ -45,44 +57,43 @@ def commit(self): summary. """ commit_message = self.config["commit_message"] - self.gitcmd_run('add', '.') + self.gitcmd_run("add", ".") try: - self.gitcmd_run('commit', '-m', commit_message) + self.gitcmd_run("commit", "-m", commit_message) except subprocess.CalledProcessError: # Nothing to commit return # only send a mail if something has changed - codechg_mail = self.config.get('codechange_mail', False) + codechg_mail = self.config.get("codechange_mail", False) if codechg_mail: # pragma: no cover - self.logger.info('Commit was done! Sending mail...') - pfsystemid = open('/etc/pfsystemid').read().strip() - pfsystemname = open('/etc/pfsystemname').read().strip() + self.logger.info("Commit was done! Sending mail...") + pfsystemid = open("/etc/pfsystemid").read().strip() + pfsystemname = open("/etc/pfsystemname").read().strip() - status = self.gitcmd_output('show', '--name-status', 'HEAD') + status = self.gitcmd_output("show", "--name-status", "HEAD") - msg = MIMEText(status, 'plain', 'utf-8') - msg['Subject'] = 'Commit summary on {} ({})'.format(pfsystemname, - pfsystemid) + msg = MIMEText(status, "plain", "utf-8") + msg["Subject"] = "Commit summary on {} ({})".format( + pfsystemname, pfsystemid + ) recipients = codechg_mail.split() for recipient in recipients: - msg['To'] = recipient + msg["To"] = recipient - msg['From'] = self.config.get('codechange_sender', - 'codechanges@perfact.de') + msg["From"] = self.config.get("codechange_sender", "codechanges@perfact.de") - smtp = smtplib.SMTP('localhost') - smtp.sendmail(msg['From'], recipients, msg.as_string()) + smtp = smtplib.SMTP("localhost") + smtp.sendmail(msg["From"], recipients, msg.as_string()) smtp.quit() if self.args.autoreset: - reset = Reset(sync=self.sync, logger=self.logger, - config=self.config) + reset = Reset(sync=self.sync, logger=self.logger, config=self.config) parser = argparse.ArgumentParser() - parser.add_argument('--no-lock', action='store_true') + parser.add_argument("--no-lock", action="store_true") reset.add_args(parser) - reset.args = parser.parse_args(['--no-lock', 'HEAD~']) + reset.args = parser.parse_args(["--no-lock", "HEAD~"]) reset.run() @SubCommand.with_lock @@ -99,22 +110,22 @@ def run(self): txnid=lasttxn, limit=51, ) - newest_txnid = res['newest_txnid'] - if (res['search_limit_reached'] or res['limit_reached'] or - res['no_records']): + newest_txnid = res["newest_txnid"] + if res["search_limit_reached"] or res["limit_reached"] or res["no_records"]: # Limits reached mean we need to perform a full dump to # recover. The same if there is no transaction present, # probably due to a pack of the ZODB. - paths.append('/') + paths.append("/") recurse = True else: - paths.extend(res['paths']) + paths.extend(res["paths"]) - self.sync.record(paths=paths, recurse=recurse, - skip_errors=self.args.skip_errors) + self.sync.record( + paths=paths, recurse=recurse, skip_errors=self.args.skip_errors + ) if self.args.commit: self.commit() if self.args.lasttxn and (newest_txnid != lasttxn): - self.sync.txn_write(newest_txnid or '') + self.sync.txn_write(newest_txnid or "") diff --git a/perfact/zodbsync/commands/reformat.py b/perfact/zodbsync/commands/reformat.py index 2e0ca6f..6e2e9db 100644 --- a/perfact/zodbsync/commands/reformat.py +++ b/perfact/zodbsync/commands/reformat.py @@ -2,8 +2,8 @@ import os -from ..subcommand import SubCommand from ..helpers import StrRepr, literal_eval +from ..subcommand import SubCommand from ..zodbsync import mod_format @@ -11,6 +11,7 @@ class Reformat(SubCommand): """ Rewrite commits from given commit to HEAD to post-4.3.2 formatting """ + # Note that in contrast to most subcommands, this should probably be # executed on a local repository not directly in sync with a ZODB. No # rollback in case of error is implemented yet! @@ -20,52 +21,54 @@ class Reformat(SubCommand): @staticmethod def add_args(parser): parser.add_argument( - 'commit', type=str, - help="Starting point before first commit to rewrite" + "commit", type=str, help="Starting point before first commit to rewrite" ) def head(self): - return self.gitcmd_output('rev-parse', 'HEAD').strip() + return self.gitcmd_output("rev-parse", "HEAD").strip() @SubCommand.with_lock def run(self): start = self.args.commit commits_raw = self.gitcmd_output( - 'log', '--format=%H', '--reverse', - '{}..HEAD'.format(start) + "log", "--format=%H", "--reverse", "{}..HEAD".format(start) ) - commits = [c for c in commits_raw.strip().split('\n') if c] + commits = [c for c in commits_raw.strip().split("\n") if c] - self.gitcmd_run('reset', '--hard', start) - base = self.config['base_dir'] + self.gitcmd_run("reset", "--hard", start) + base = self.config["base_dir"] paths = [] - for root, dirs, files in os.walk(os.path.join(base, '__root__')): - if '__meta__' in files: - paths.append(os.path.join(root, '__meta__')) + for root, dirs, files in os.walk(os.path.join(base, "__root__")): + if "__meta__" in files: + paths.append(os.path.join(root, "__meta__")) if self.reformat(paths): - self.gitcmd_run('commit', '-a', '-m', 'zodbsync reformat') + self.gitcmd_run("commit", "-a", "-m", "zodbsync reformat") for idx, commit in enumerate(commits): - print("Processing commit {}/{}".format(idx+1, len(commits))) + print("Processing commit {}/{}".format(idx + 1, len(commits))) cur = self.head() - paths = list({ - os.path.join(base, line) - for line in self.gitcmd_output( - 'diff', '--name-only', '--no-renames', commit + '~', commit - ).strip().split('\n') - if line - }) - metas = {path for path in paths if path.endswith('/__meta__')} + paths = list( + { + os.path.join(base, line) + for line in self.gitcmd_output( + "diff", "--name-only", "--no-renames", commit + "~", commit + ) + .strip() + .split("\n") + if line + } + ) + metas = {path for path in paths if path.endswith("/__meta__")} if self.reformat(metas, True): - self.gitcmd_run('commit', '-a', '-m', 'reverse') - self.gitcmd_run('checkout', '--no-overlay', commit, '--', *paths) - self.gitcmd_try('commit', '--no-edit', '-c', commit) + self.gitcmd_run("commit", "-a", "-m", "reverse") + self.gitcmd_run("checkout", "--no-overlay", commit, "--", *paths) + self.gitcmd_try("commit", "--no-edit", "-c", commit) self.reformat(metas) # Squash commits together with original message - self.gitcmd_run('reset', cur) + self.gitcmd_run("reset", cur) while paths: - self.gitcmd_run('add', *paths[:100]) + self.gitcmd_run("add", *paths[:100]) del paths[:100] - self.gitcmd_try('commit', '--no-edit', '-c', commit) + self.gitcmd_try("commit", "--no-edit", "-c", commit) def reformat(self, paths, legacy=False): changed = False @@ -93,7 +96,7 @@ def reformat(self, paths, legacy=False): fmt = mod_format(data) if orig == fmt: continue - with open(path, 'w') as f: + with open(path, "w") as f: f.write(fmt) changed = True diff --git a/perfact/zodbsync/commands/reset.py b/perfact/zodbsync/commands/reset.py index f5e20bd..edecc25 100644 --- a/perfact/zodbsync/commands/reset.py +++ b/perfact/zodbsync/commands/reset.py @@ -4,24 +4,26 @@ class Reset(SubCommand): - '''Reset to some other commit and play back any changed paths''' + """Reset to some other commit and play back any changed paths""" + @staticmethod def add_args(parser): parser.add_argument( - '--skip-errors', action='store_true', default=False, - help='Skip failed objects and continue', - ) - parser.add_argument( - '--dry-run', action='store_true', default=False, - help='Only check for conflicts and roll back at the end.', + "--skip-errors", + action="store_true", + default=False, + help="Skip failed objects and continue", ) parser.add_argument( - 'commit', type=str, - help='''Target commit''' + "--dry-run", + action="store_true", + default=False, + help="Only check for conflicts and roll back at the end.", ) + parser.add_argument("commit", type=str, help="""Target commit""") @SubCommand.gitexec def run(self): target = self.args.commit - self.logger.info('Checking and resetting to %s.' % target) - self.gitcmd_run('reset', '--hard', target) + self.logger.info("Checking and resetting to %s." % target) + self.gitcmd_run("reset", "--hard", target) diff --git a/perfact/zodbsync/commands/upload.py b/perfact/zodbsync/commands/upload.py index afd4633..87110fb 100644 --- a/perfact/zodbsync/commands/upload.py +++ b/perfact/zodbsync/commands/upload.py @@ -7,98 +7,102 @@ from ..subcommand import SubCommand from ..zodbsync import mod_format - -PY2 = (sys.version_info.major == 2) +PY2 = sys.version_info.major == 2 def create_template(type, content_type=None): - result = {'type': type, 'title': ''} + result = {"type": type, "title": ""} if content_type is not None: - result['props'] = [[ - ('id', 'content_type'), - ('type', 'string'), - ('value', content_type) - ]] + result["props"] = [ + [("id", "content_type"), ("type", "string"), ("value", content_type)] + ] return result META_TEMPLATES = { - 'folder': create_template('Folder'), - 'js': create_template('File', 'application/javascript'), - 'css': create_template('File', 'text/css'), - 'html': create_template('File', 'text/html'), - 'odt': create_template('File', 'application/vnd.oasis.opendocument.text'), - 'ods': create_template( - 'File', 'application/vnd.oasis.opendocument.spreadsheet'), - 'pdf': create_template('File', 'application/pdf'), - 'svg': create_template('File', 'image/svg+xml'), + "folder": create_template("Folder"), + "js": create_template("File", "application/javascript"), + "css": create_template("File", "text/css"), + "html": create_template("File", "text/html"), + "odt": create_template("File", "application/vnd.oasis.opendocument.text"), + "ods": create_template("File", "application/vnd.oasis.opendocument.spreadsheet"), + "pdf": create_template("File", "application/pdf"), + "svg": create_template("File", "image/svg+xml"), } -META_DEFAULT = create_template('File', 'application/octet-stream') +META_DEFAULT = create_template("File", "application/octet-stream") class Upload(SubCommand): - '''[DEPRECATED] Upload a folder structure, e.g. a JS library, to zope - Data.FS''' + """[DEPRECATED] Upload a folder structure, e.g. a JS library, to zope + Data.FS""" @staticmethod def add_args(parser): parser.add_argument( - 'source', type=str, - help='Path of source library folder', + "source", + type=str, + help="Path of source library folder", ) parser.add_argument( - 'path', type=str, - help='Sub-Path in Data.fs to put source folder', + "path", + type=str, + help="Sub-Path in Data.fs to put source folder", ) parser.add_argument( - '--override', '-o', action='store_true', - help='Override object type changes when uploading', - default=False + "--override", + "-o", + action="store_true", + help="Override object type changes when uploading", + default=False, ) parser.add_argument( - '--skip-errors', action='store_true', + "--skip-errors", + action="store_true", help="Skip failed objects and continue", - default=False + default=False, ) parser.add_argument( - '--dry-run', action='store_true', default=False, - help='Roll back at the end.', + "--dry-run", + action="store_true", + default=False, + help="Roll back at the end.", ) parser.add_argument( - '--replace-periods', action='store_true', default=False, - help='Replace periods in file names with underscores', + "--replace-periods", + action="store_true", + default=False, + help="Replace periods in file names with underscores", ) parser.add_argument( - '--valid-extensions', type=str, + "--valid-extensions", + type=str, help=( - 'Only upload files with the extensions listed ' - '(comma separated list). Allow all extensions by default.' - ) + "Only upload files with the extensions listed " + "(comma separated list). Allow all extensions by default." + ), ) @SubCommand.with_lock def run(self): - ''' + """ Convert source folder into zodbsync compatible struct in repodir and upload it. - ''' + """ warnings.warn( - 'Static external assets should not be included in the Data.FS', - DeprecationWarning + "Static external assets should not be included in the Data.FS", + DeprecationWarning, ) self.check_repo() # we need both filesystem and Data.fs path representation - data_fs_path, filesystem_path = self.datafs_filesystem_path( - self.args.path - ) + data_fs_path, filesystem_path = self.datafs_filesystem_path(self.args.path) valid_extensions = self.args.valid_extensions if valid_extensions: # Parse comma separated list valid_extensions = [ item.strip() - for item in valid_extensions.split(',') + for item in valid_extensions.split(",") if len(item.strip()) > 0 ] @@ -117,50 +121,43 @@ def run(self): # do not forget meta file for folder self.create_file( - file_path=os.path.join(new_folder, '__meta__'), - content=mod_format(META_TEMPLATES['folder']) + file_path=os.path.join(new_folder, "__meta__"), + content=mod_format(META_TEMPLATES["folder"]), ) # now check files inside of folder for filename in files: - file_ending = filename.split('.')[-1] + file_ending = filename.split(".")[-1] # bail out if not a valid extension - if (valid_extensions is not None and - file_ending not in valid_extensions): + if valid_extensions is not None and file_ending not in valid_extensions: continue # read file content from source file - with open( - os.path.join(cur_dir_path, filename), 'rb' - ) as sourcefile: + with open(os.path.join(cur_dir_path, filename), "rb") as sourcefile: file_content = sourcefile.read() # choose the original filename, or replace periods if self.args.replace_periods: - repo_filename = filename.replace('.', '_') + repo_filename = filename.replace(".", "_") else: repo_filename = filename # in repo each file gets its own folder ... - new_file_folder = os.path.join( - new_folder, repo_filename - ) + new_file_folder = os.path.join(new_folder, repo_filename) os.makedirs(new_file_folder) # ... containing __meta__ and __source__ file self.create_file( - file_path=os.path.join(new_file_folder, '__meta__'), - content=mod_format( - META_TEMPLATES.get(file_ending, META_DEFAULT) - ) + file_path=os.path.join(new_file_folder, "__meta__"), + content=mod_format(META_TEMPLATES.get(file_ending, META_DEFAULT)), ) self.create_file( file_path=os.path.join( - new_file_folder, '__source__.' + file_ending + new_file_folder, "__source__." + file_ending ), content=file_content, - binary=True + binary=True, ) # conversion done, start playback @@ -177,6 +174,6 @@ def run(self): self.abort() except Exception: - self.logger.exception('Error uploading files. Resetting.') + self.logger.exception("Error uploading files. Resetting.") self.abort() raise diff --git a/perfact/zodbsync/commands/watch.py b/perfact/zodbsync/commands/watch.py index ce3f22e..9c8713e 100644 --- a/perfact/zodbsync/commands/watch.py +++ b/perfact/zodbsync/commands/watch.py @@ -1,30 +1,32 @@ #!/usr/bin/env python import base64 -import signal -import time -import threading -import sys import pickle +import signal import subprocess +import sys +import threading +import time # For reading the Data.FS in order to obtain affected object IDs from # transaction IDs import ZODB.FileStorage -from ..subcommand import SubCommand from ..helpers import increment_txnid +from ..subcommand import SubCommand from ..zodbsync import mod_read class TreeOutdatedException(Exception): """Exception which is raised if the internal tree structure is not matching the actual Filesystem anymore""" + pass class Watch(SubCommand): """Periodically check for changes and record them""" + # Connects to ZEO, builds a mirror of the tree structure of the objects, # periodically checks for new transactions, looks directly into the Data.FS # to get the object IDs affected by those transactions, and updates its @@ -33,10 +35,10 @@ class Watch(SubCommand): @staticmethod def add_args(parser): parser.add_argument( - '--init', - action='store_true', + "--init", + action="store_true", default=False, - help='Internal mode for initialization subprocess', + help="Internal mode for initialization subprocess", ) def __init__(self, **kw): @@ -68,23 +70,23 @@ def __init__(self, **kw): self.additional_oids = {} def _set_last_visible_txn(self): - ''' Set self.last_visible_txn to a transaction ID such that every + """Set self.last_visible_txn to a transaction ID such that every effect up to this ID is visible in the current transaction and every effect for transaction IDs above this are not yet visible. - ''' + """ self.last_visible_txn = self.app._p_jar._db.lastTransaction() def _store_last_visible_txn(self): - ''' + """ Store last visible transaction ID to disk if it changed. - ''' + """ if self.last_visible_txn != self.txnid_on_disk: self.txnid_on_disk = self.last_visible_txn self.sync.txn_write(base64.b64encode(self.last_visible_txn)) - def _init_tree(self, obj, parent_oid=None, path='/'): - ''' Insert obj and everything below into self.object_tree. ''' - if not hasattr(obj, '_p_oid'): + def _init_tree(self, obj, parent_oid=None, path="/"): + """Insert obj and everything below into self.object_tree.""" + if not hasattr(obj, "_p_oid"): # objects that have no oid are ignored return None # In some Python/Zope versions, _p_oid is a zodbpickle.binary, which is @@ -101,23 +103,21 @@ def _init_tree(self, obj, parent_oid=None, path='/'): self.last_report = now self.object_tree[oid] = { - 'parent': parent_oid, - 'children': children, - 'path': path, + "parent": parent_oid, + "children": children, + "path": path, } # If it turns out there are other objects needing such a hack, this # should probably be moved to object_types - if obj.meta_type == 'User Folder': + if obj.meta_type == "User Folder": self.additional_oids[bytes(obj.data._p_oid)] = oid for user in obj.getUsers(): self.additional_oids[bytes(user._p_oid)] = oid for child_id, child_obj in sorted(obj.objectItems()): child_oid = self._init_tree( - obj=child_obj, - parent_oid=oid, - path=path+child_id+'/' + obj=child_obj, parent_oid=oid, path=path + child_id + "/" ) if child_oid: children[child_oid] = child_id @@ -159,8 +159,7 @@ def _read_changed_oids(self, txn_start, txn_stop): # * plen: the size of the pickle data, which comes after the # header dlen = dhead.recordlen() - oid = self.additional_oids.get(bytes(dhead.oid), - bytes(dhead.oid)) + oid = self.additional_oids.get(bytes(dhead.oid), bytes(dhead.oid)) self.changed_oids.add(oid) pos = pos + dlen @@ -171,40 +170,37 @@ def _remove_subtree(self, oid): todo = [oid] while todo: oid = todo.pop() - todo.extend(self.object_tree[oid]['children']) + todo.extend(self.object_tree[oid]["children"]) del self.object_tree[oid] def _record_object(self, oid): - ''' + """ Store data of an object at the path stored in our object tree. - ''' - path = self.object_tree[oid]['path'] - self.logger.info('Recording %s' % path) - self.logger.debug('OID: ' + repr(oid)) + """ + path = self.object_tree[oid]["path"] + self.logger.info("Recording %s" % path) + self.logger.debug("OID: " + repr(oid)) obj = self.app._p_jar[oid] - data = mod_read( - obj=obj, - default_owner=self.sync.default_owner - ) + data = mod_read(obj=obj, default_owner=self.sync.default_owner) pathinfo = self.sync.fs_write(path=path, data=data) - path_layer = pathinfo['layers'][pathinfo['layeridx']]['ident'] - current_layer = getattr(obj, 'zodbsync_layer', None) + path_layer = pathinfo["layers"][pathinfo["layeridx"]]["ident"] + current_layer = getattr(obj, "zodbsync_layer", None) if current_layer != path_layer: with self.sync.tm: obj.zodbsync_layer = path_layer def _update_objects(self): - ''' + """ Run through the changed oids and update the tree and the file system accordingly. - ''' + """ if not len(self.changed_oids): return - self.logger.info('Found %s changed objects' % len(self.changed_oids)) - self.logger.debug('OIDs: ' + str(sorted(self.changed_oids))) + self.logger.info("Found %s changed objects" % len(self.changed_oids)) + self.logger.debug("OIDs: " + str(sorted(self.changed_oids))) while len(self.changed_oids): # not all oids are part of our object tree yet, so we have to @@ -229,51 +225,51 @@ def _update_objects(self): self.sync.fs_prune_empty_dirs() def _update_children(self, oid): - ''' + """ Check the current children of an object and compare with the stored children. Remove any superfluous children (oid not found or wrong id) and record any new children recursively. - ''' + """ obj = self.app._p_jar[oid] node = self.object_tree[oid] newchildren = {} for child_id, child_obj in obj.objectItems(): - if not hasattr(child_obj, '_p_oid'): + if not hasattr(child_obj, "_p_oid"): continue newchildren[bytes(child_obj._p_oid)] = child_id # Go through old children and check if some are to be deleted - for child_oid, child_id in list(node['children'].items()): + for child_oid, child_id in list(node["children"].items()): if newchildren.get(child_oid) == child_id: continue self._remove_subtree(child_oid) - del node['children'][child_oid] + del node["children"][child_oid] - pathinfo = self.sync.fs_pathinfo(node['path']) + pathinfo = self.sync.fs_pathinfo(node["path"]) self.sync.fs_prune(pathinfo, newchildren.values()) # Add new children to changed_oids so they will also be recorded for child_oid, child_id in list(newchildren.items()): - if child_oid in node['children']: + if child_oid in node["children"]: continue - newpath = node['path']+child_id+'/' + newpath = node["path"] + child_id + "/" if child_oid in self.object_tree: # The parent changed. Remove from there. Since the old parent # will also be changed, the call to fs_prune there will take # care of removing everything on the FS. - old_parent = self.object_tree[child_oid]['parent'] - del self.object_tree[old_parent]['children'][child_oid] + old_parent = self.object_tree[child_oid]["parent"] + del self.object_tree[old_parent]["children"][child_oid] self._remove_subtree(child_oid) self.changed_oids.add(child_oid) self.object_tree[child_oid] = { - 'parent': oid, - 'children': {}, - 'path': newpath, + "parent": oid, + "children": {}, + "path": newpath, } - node['children'][child_oid] = child_id + node["children"][child_oid] = child_id def quit(self, signo, _frame): """ @@ -283,12 +279,12 @@ def quit(self, signo, _frame): self.exit.set() def register_signals(self): - for sig in ('TERM', 'HUP', 'INT'): - signal.signal(getattr(signal, 'SIG'+sig), self.quit) + for sig in ("TERM", "HUP", "INT"): + signal.signal(getattr(signal, "SIG" + sig), self.quit) def unregister_signals(self): - for sig in ('TERM', 'HUP', 'INT'): - signal.signal(getattr(signal, 'SIG'+sig), signal.SIG_DFL) + for sig in ("TERM", "HUP", "INT"): + signal.signal(getattr(signal, "SIG" + sig), signal.SIG_DFL) def setup(self): """ @@ -328,22 +324,17 @@ def setup(self): if self.txnid_on_disk is None: # no txnid found, record everything - paths = ['/'] + paths = ["/"] else: self.txnid_on_disk = base64.b64decode(self.txnid_on_disk) txn_start = increment_txnid(self.txnid_on_disk) # obtain all object ids affected by transactions between (the one # in last_txn + 1) and (the currently visible one) (incl.) - self._read_changed_oids( - txn_start=txn_start, - txn_stop=self.last_visible_txn - ) + self._read_changed_oids(txn_start=txn_start, txn_stop=self.last_visible_txn) paths = [] while len(self.changed_oids): - next_oids = self.changed_oids.intersection( - self.object_tree.keys() - ) + next_oids = self.changed_oids.intersection(self.object_tree.keys()) if not len(next_oids): # The remaining oids are not reachable by any of the # currently existing nodes. This can happen during @@ -352,9 +343,7 @@ def setup(self): # affected objects are collected for earlier transactions, # but they might no longer exist break - paths.extend( - [self.object_tree[oid]['path'] for oid in next_oids] - ) + paths.extend([self.object_tree[oid]["path"] for oid in next_oids]) self.changed_oids.difference_update(next_oids) self.sync.record(paths) @@ -376,25 +365,31 @@ def spawned_setup(self): long time and uses much less memory. Therefore, we use a separate process. """ - cmd = [sys.executable, sys.argv[0], '--config', self.args.config, - 'watch', '--init'] + cmd = [ + sys.executable, + sys.argv[0], + "--config", + self.args.config, + "watch", + "--init", + ] data = pickle.loads(subprocess.check_output(cmd)) - self.object_tree = data['tree'] - self.additional_oids = data['add_oids'] - self.last_visible_txn = self.txnid_on_disk = data['txn'] + self.object_tree = data["tree"] + self.additional_oids = data["add_oids"] + self.last_visible_txn = self.txnid_on_disk = data["txn"] def dump_setup_data(self, stream=sys.stdout): """ Print pickled setup data for usage in main process. """ data = { - 'tree': self.object_tree, - 'add_oids': self.additional_oids, - 'txn': self.last_visible_txn, + "tree": self.object_tree, + "add_oids": self.additional_oids, + "txn": self.last_visible_txn, } # write binary to stdout - in Py3, this requires using # sys.stdout.buffer, in Py2 sys.stdout itself is used. - pickle.dump(data, file=getattr(stream, 'buffer', stream)) + pickle.dump(data, file=getattr(stream, "buffer", stream)) def step(self): """Read new transactions, update the object tree and record all @@ -418,15 +413,13 @@ def step(self): self._store_last_visible_txn() except TreeOutdatedException: - self.logger.info( - 'Exiting due to inconsistencies in filesystem' - ) + self.logger.info("Exiting due to inconsistencies in filesystem") self.exit.set() finally: self.release_lock() def run(self, interval=10): - """ Setup and run in a loop. """ + """Setup and run in a loop.""" if self.args.init: self.setup() self.dump_setup_data() @@ -438,4 +431,4 @@ def run(self, interval=10): # a wait that is interrupted immediately if exit.set() is called self.exit.wait(interval) - self.logger.info('Exited due to signal') + self.logger.info("Exited due to signal") diff --git a/perfact/zodbsync/commands/with_lock.py b/perfact/zodbsync/commands/with_lock.py index 7acaa90..ca3419c 100644 --- a/perfact/zodbsync/commands/with_lock.py +++ b/perfact/zodbsync/commands/with_lock.py @@ -7,13 +7,16 @@ class WithLock(SubCommand): """Execute a shell command by first grabbing the lock""" - subcommand = 'with-lock' + + subcommand = "with-lock" connect = False @staticmethod def add_args(parser): parser.add_argument( - 'cmd', type=str, help="Shell-command to be executed", + "cmd", + type=str, + help="Shell-command to be executed", ) @SubCommand.with_lock diff --git a/perfact/zodbsync/extedit.py b/perfact/zodbsync/extedit.py index ded5073..93aeeb6 100644 --- a/perfact/zodbsync/extedit.py +++ b/perfact/zodbsync/extedit.py @@ -1,14 +1,13 @@ #!/usr/bin/env python import json -from base64 import b64encode, b64decode +from base64 import b64decode, b64encode from .zodbsync import mod_read, mod_write -def launch(context, script, path, source=None, orig_source=None, - encoding=None): - ''' +def launch(context, script, path, source=None, orig_source=None, encoding=None): + """ Launcher for external edit. If called without a source, it is used to create a control file that @@ -24,19 +23,19 @@ def launch(context, script, path, source=None, orig_source=None, An encoding of None means the sources are Unicode. Other than that, only 'base64' is supported, which means the sources are interpreted as base64 encoded binary data. - ''' + """ resp = context.REQUEST.RESPONSE if source is None: - content_type = 'application/x-perfact-zopeedit' + content_type = "application/x-perfact-zopeedit" result = controlfile( context=context, path=path, url=script.absolute_url(), ) else: - content_type = 'application/json' + content_type = "application/json" result = update( context=context, path=path, @@ -46,12 +45,12 @@ def launch(context, script, path, source=None, orig_source=None, ) result = json.dumps(result) - resp.setHeader('Content-Type', content_type) + resp.setHeader("Content-Type", content_type) return result def read_obj(context, path, force_encoding=None): - ''' + """ Locate object at given path and return dictionary containing everything of interest. @@ -59,38 +58,38 @@ def read_obj(context, path, force_encoding=None): to 'base64' or the source can not be interpreted as UTF-8, it is a base64 representation of the actual source and the field 'encoding' is also set appropriately. - ''' + """ obj = context - for part in path.split('/'): + for part in path.split("/"): if not part: continue obj = getattr(obj, part) result = mod_read(obj) - result['path'] = '/' + '/'.join(obj.getPhysicalPath()) - result['parent'] = obj.aq_parent + result["path"] = "/" + "/".join(obj.getPhysicalPath()) + result["parent"] = obj.aq_parent encoding = force_encoding - if force_encoding and isinstance(result['source'], str): + if force_encoding and isinstance(result["source"], str): # We need bytes to encode with Base64 - result['source'] = result['source'].encode('utf-8') + result["source"] = result["source"].encode("utf-8") - if not force_encoding and isinstance(result['source'], bytes): + if not force_encoding and isinstance(result["source"], bytes): # Try to represent as UTF-8 for better readability. # If that does not work, switch to Base64. try: - result['source'] = result['source'].decode('utf-8') + result["source"] = result["source"].decode("utf-8") except UnicodeDecodeError: - encoding = 'base64' + encoding = "base64" - if encoding == 'base64': - result['source'] = b64encode(result['source']).decode('ascii') - result['encoding'] = encoding + if encoding == "base64": + result["source"] = b64encode(result["source"]).decode("ascii") + result["encoding"] = encoding return result def controlfile(context, path, url): - ''' + """ Creates a control file that can be used by an external editor to update the contents of an object. The control file contains * the entrypoint url (which should be a script in Zope wrapping the @@ -99,59 +98,60 @@ def controlfile(context, path, url): * the path to the object in question * the meta_type of the object * the source of the object - ''' + """ data = read_obj(context, path) headers = [ - ('url', url), - ('path', data['path']), - ('auth', context.REQUEST._auth), - ('meta-type', data['type']), + ("url", url), + ("path", data["path"]), + ("auth", context.REQUEST._auth), + ("meta-type", data["type"]), ] - encoding = data.get('encoding', None) + encoding = data.get("encoding", None) if encoding: - headers.append(('encoding', encoding)) + headers.append(("encoding", encoding)) - props = data.get('props', []) + props = data.get("props", []) for prop in props: - if ('id', 'content_type') in prop: - value = [pair for pair in prop if pair[0] == 'value'] + if ("id", "content_type") in prop: + value = [pair for pair in prop if pair[0] == "value"] assert len(value), "Invalid property" - headers.append(('content-type', value[0][1])) + headers.append(("content-type", value[0][1])) break - result = ''.join([ - '{}: {}\n'.format(*header) - for header in headers - ]) + '\n' + data['source'] + result = ( + "".join(["{}: {}\n".format(*header) for header in headers]) + + "\n" + + data["source"] + ) return result def update(context, path, source, orig_source, encoding): - ''' + """ Update the object with the given source, but only if its current source matches the expected orig_source. If encoding is set to base64, the sources are considered to be base64 encoded. - ''' - assert encoding in (None, 'base64'), "Invalid encoding" + """ + assert encoding in (None, "base64"), "Invalid encoding" try: data = read_obj(context, path, force_encoding=encoding) except AttributeError: - return {'error': path + ' not found'} + return {"error": path + " not found"} - if data['source'] != orig_source: - return {'error': 'Object was changed in the meantime. Please reload.'} + if data["source"] != orig_source: + return {"error": "Object was changed in the meantime. Please reload."} - if encoding == 'base64': - data['source'] = b64decode(source) + if encoding == "base64": + data["source"] = b64decode(source) elif encoding is None: - data['source'] = source - obj_id = path.rstrip('/').rsplit('/', 1)[-1] - mod_write(data, parent=data['parent'], obj_id=obj_id) + data["source"] = source + obj_id = path.rstrip("/").rsplit("/", 1)[-1] + mod_write(data, parent=data["parent"], obj_id=obj_id) - return {'success': True} + return {"success": True} diff --git a/perfact/zodbsync/helpers.py b/perfact/zodbsync/helpers.py index be3ef83..68c6535 100644 --- a/perfact/zodbsync/helpers.py +++ b/perfact/zodbsync/helpers.py @@ -1,20 +1,21 @@ # -*- coding: utf-8 -*- import ast -import operator import importlib +import operator class Namespace(object): """ Convert a dict to a namespace, allowing access via a.b instead of a['b'] """ + def __init__(self, data=None, **kw): if data: self.__dict__.update(data) self.__dict__.update(kw) -def to_string(value, enc='utf-8'): +def to_string(value, enc="utf-8"): """Convert input into a string""" if isinstance(value, str): return value @@ -23,7 +24,7 @@ def to_string(value, enc='utf-8'): return str(value) -def to_bytes(value, enc='utf-8'): +def to_bytes(value, enc="utf-8"): """Convert input to bytes (encoded strings)""" if isinstance(value, memoryview): return value.tobytes() @@ -35,16 +36,16 @@ def to_bytes(value, enc='utf-8'): def remove_redundant_paths(paths): - ''' + """ Sort list of paths and remove items that are redundant if remaining paths are processed recursively, i.e., if /a/b/ as well as /a/ are included, remove /a/b/. Works in-place and also returns the list. - ''' + """ paths.sort() i = 0 last = None while i < len(paths): - current = paths[i].rstrip('/') + '/' + current = paths[i].rstrip("/") + "/" if last is not None and current.startswith(last): del paths[i] continue @@ -54,7 +55,7 @@ def remove_redundant_paths(paths): class StrRepr: - '''Create a printable output of the given object data. + """Create a printable output of the given object data. Dicts are converted to sorted lists of tuples, tuples and lists recurse into their elements. The top-level element should be a dict. `seprules` is a dictionary mapping from keys of the top-level dict to a @@ -64,8 +65,9 @@ class StrRepr: `legacy` mode turns off line splitting for iterables with less than two items and puts the closing bracket on the same indentation level as the items except for the top level. - ''' - def _collect(self, data, level=0, nl='\n'): + """ + + def _collect(self, data, level=0, nl="\n"): "Internal recursion worker" if not isinstance(data, (list, tuple)): @@ -73,9 +75,12 @@ def _collect(self, data, level=0, nl='\n'): return # start new line for each element - linesep = (level == 0 - or level == 2 and isinstance(data, list) - or level in self.seprules.get(self.section, [])) + linesep = ( + level == 0 + or level == 2 + and isinstance(data, list) + or level in self.seprules.get(self.section, []) + ) if self.legacy and len(data) < 2: linesep = False # add separator after last element - usually only for lists that are @@ -83,30 +88,30 @@ def _collect(self, data, level=0, nl='\n'): lastsep = linesep if isinstance(data, list): - opn, cls = '[', ']' + opn, cls = "[", "]" if isinstance(data, tuple): - opn, cls = '(', ')' + opn, cls = "(", ")" if len(data) == 1: lastsep = True self.output.append(opn) - incnl = nl + ' ' + incnl = nl + " " for idx, item in enumerate(data): if level == 0: self.section = item[0] if linesep: self.output.append(incnl) - self._collect(item, level+1, incnl) - self.output.append(',') + self._collect(item, level + 1, incnl) + self.output.append(",") else: - self._collect(item, level+1, nl) + self._collect(item, level + 1, nl) if idx < len(data) - 1 or lastsep: - self.output.append(', ') + self.output.append(", ") if self.legacy and linesep and level > 0: - self.output.append(incnl+cls) + self.output.append(incnl + cls) else: - self.output.append(nl+cls if linesep else cls) + self.output.append(nl + cls if linesep else cls) def __call__(self, data, seprules=None, legacy=False): "Collect output parts recursively and return their concatenation" @@ -118,19 +123,19 @@ def __call__(self, data, seprules=None, legacy=False): if isinstance(data, dict): data = sorted(data.items()) self._collect(data) - return ''.join(self.output) + '\n' + return "".join(self.output) + "\n" def read_pdata(obj): - '''Avoid authentication problems when reading linked pdata.''' + """Avoid authentication problems when reading linked pdata.""" if isinstance(obj.data, (bytes, str)): source = obj.data else: data = obj.data if isinstance(data.data, bytes): - source = b'' + source = b"" elif isinstance(data.data, str): - source = '' + source = "" while data is not None: source += data.data data = data.next @@ -138,13 +143,13 @@ def read_pdata(obj): def literal_eval(value): - '''Literal evaluator (with a bit more power than PT). + """Literal evaluator (with a bit more power than PT). This evaluator is capable of parsing large data sets, and it has basic arithmetic operators included. - ''' + """ if isinstance(value, (bytes, str)): - value = ast.parse(value, mode='eval') + value = ast.parse(value, mode="eval") bin_ops = { ast.Add: operator.add, @@ -168,16 +173,15 @@ def _convert(node): elif isinstance(node, ast.List): return list(map(_convert, node.elts)) elif isinstance(node, ast.Dict): - return dict((_convert(k), _convert(v)) for k, v - in zip(node.keys, node.values)) - elif isinstance(node, ast.BinOp): - return bin_ops[type(node.op)]( - _convert(node.left), - _convert(node.right) + return dict( + (_convert(k), _convert(v)) for k, v in zip(node.keys, node.values) ) + elif isinstance(node, ast.BinOp): + return bin_ops[type(node.op)](_convert(node.left), _convert(node.right)) elif isinstance(node, ast.UnaryOp): return unary_ops[type(node.op)](_convert(node.operand)) - raise Exception('Unsupported type {}'.format(repr(node))) + raise Exception("Unsupported type {}".format(repr(node))) + return _convert(value) @@ -185,7 +189,7 @@ def prop_dict(data): props = {} # Get the properties from object data - p = dict(data).get('props', None) + p = dict(data).get("props", None) if not p: return props @@ -193,32 +197,28 @@ def prop_dict(data): for item in p: pd = dict(item) # Extract only the value - props[pd['id']] = pd['value'] + props[pd["id"]] = pd["value"] return props def load_config(filename): - '''Load the module at "filename" as module "name". Return the contents + """Load the module at "filename" as module "name". Return the contents as a dictionary. Skips contents starting with '_'. - ''' - loader = importlib.machinery.SourceFileLoader('config', filename) + """ + loader = importlib.machinery.SourceFileLoader("config", filename) spec = importlib.util.spec_from_loader(loader.name, loader) mod = importlib.util.module_from_spec(spec) loader.exec_module(mod) - return { - name: getattr(mod, name) - for name in dir(mod) - if not name.startswith('_') - } + return {name: getattr(mod, name) for name in dir(mod) if not name.startswith("_")} # Helper for handling transaction IDs (which are byte strings of length 8) def increment_txnid(s): - ''' add 1 to s, but for s being a string of bytes''' + """add 1 to s, but for s being a string of bytes""" arr = bytearray(s) - pos = len(arr)-1 + pos = len(arr) - 1 while pos >= 0: if arr[pos] == 255: arr[pos] = 0 @@ -230,9 +230,9 @@ def increment_txnid(s): def obj_modtime(obj): # pragma: no cover - ''' + """ Allow access to private method of an object to read out the modtime. - ''' + """ return obj._p_mtime diff --git a/perfact/zodbsync/main.py b/perfact/zodbsync/main.py index 845ccc2..e9b5dd4 100644 --- a/perfact/zodbsync/main.py +++ b/perfact/zodbsync/main.py @@ -1,9 +1,9 @@ #!/usr/bin/env python -import os -import sys import argparse import logging +import os +import sys import filelock @@ -12,59 +12,78 @@ except ImportError: pass -from .helpers import load_config -from .zodbsync import ZODBSync - -from .commands.record import Record -from .commands.playback import Playback -from .commands.watch import Watch -from .commands.pick import Pick -from .commands.upload import Upload -from .commands.with_lock import WithLock -from .commands.reset import Reset -from .commands.execute import Exec -from .commands.reformat import Reformat from .commands.checkout import Checkout +from .commands.execute import Exec +from .commands.fastforward import FF from .commands.freeze import Freeze from .commands.layer_init import LayerInit from .commands.layer_update import LayerUpdate -from .commands.fastforward import FF +from .commands.pick import Pick +from .commands.playback import Playback +from .commands.record import Record +from .commands.reformat import Reformat +from .commands.reset import Reset +from .commands.upload import Upload +from .commands.watch import Watch +from .commands.with_lock import WithLock +from .helpers import load_config +from .zodbsync import ZODBSync class Runner(object): """ Parses arguments to select the correct SubCommand subclass. """ - commands = [Record, Playback, Watch, Pick, Upload, WithLock, Reset, Exec, - Reformat, Checkout, Freeze, LayerInit, LayerUpdate, FF] + + commands = [ + Record, + Playback, + Watch, + Pick, + Upload, + WithLock, + Reset, + Exec, + Reformat, + Checkout, + Freeze, + LayerInit, + LayerUpdate, + FF, + ] def __init__(self): """ Set up the argument parser with the possible subcommands """ - parser = argparse.ArgumentParser(description=''' + parser = argparse.ArgumentParser( + description=""" Tool to sync objects between a ZODB and a git-controlled folder on the file system. - ''') - default_configfile = '/etc/perfact/modsync/zodb.py' + """ + ) + default_configfile = "/etc/perfact/modsync/zodb.py" parser.add_argument( - '--config', '-c', type=str, - help='Path to config (default: %s)' % default_configfile, - default=default_configfile + "--config", + "-c", + type=str, + help="Path to config (default: %s)" % default_configfile, + default=default_configfile, ) parser.add_argument( - '--no-lock', action='store_true', - help='Do not acquire lock. Only use inside a with-lock wrapper.', + "--no-lock", + action="store_true", + help="Do not acquire lock. Only use inside a with-lock wrapper.", ) - if 'perfact.loggingtools' in sys.modules: - perfact.loggingtools.addArgs(parser, name='ZODBSync') + if "perfact.loggingtools" in sys.modules: + perfact.loggingtools.addArgs(parser, name="ZODBSync") # Add all available SubCommand classes as sub-command runners, using # either the property "subcommand" or the name of the class. # The chosen subcommand class will be available as args.command subs = parser.add_subparsers() for cls in self.commands: - name = getattr(cls, 'subcommand', cls.__name__.lower()) + name = getattr(cls, "subcommand", cls.__name__.lower()) subparser = subs.add_parser( name, help=cls.__doc__, @@ -92,18 +111,16 @@ def parse(self, *argv): args = self.parser.parse_args(argv if argv else None) self.args = args - if 'perfact.loggingtools' in sys.modules: - logger = perfact.loggingtools.createLogger( - args=args, name='ZODBSync' - ) + if "perfact.loggingtools" in sys.modules: + logger = perfact.loggingtools.createLogger(args=args, name="ZODBSync") else: - logger = logging.getLogger('ZODBSync') + logger = logging.getLogger("ZODBSync") logger.setLevel(logging.INFO) logger.addHandler(logging.StreamHandler()) logger.propagate = True self.logger = logger - if getattr(args.command, 'use_config', True): + if getattr(args.command, "use_config", True): config = load_config(args.config) if self.config is not None and config != self.config: self.logger.warning("Reusing runner with different config") @@ -112,12 +129,12 @@ def parse(self, *argv): # Usually, each command needs a connection to the ZODB, but it might # explicitly disable it. - if self.sync is None and getattr(args.command, 'connect', True): + if self.sync is None and getattr(args.command, "connect", True): self.sync = ZODBSync(config=self.config, logger=logger) if self.config and not args.no_lock: self.lock = filelock.FileLock( - os.path.join(self.config['base_dir'], '.zodbsync.lock') + os.path.join(self.config["base_dir"], ".zodbsync.lock") ) self.command = args.command( diff --git a/perfact/zodbsync/object_mixins.py b/perfact/zodbsync/object_mixins.py index 0801776..5e3ab4d 100644 --- a/perfact/zodbsync/object_mixins.py +++ b/perfact/zodbsync/object_mixins.py @@ -40,10 +40,9 @@ def roles(obj): def local_roles(obj): """Read currently set local roles""" # Ignore local owner role if it is trivial - return list(sorted([ - role for role in obj.get_local_roles() - if role[1] != ('Owner',)] - )) + return list( + sorted([role for role in obj.get_local_roles() if role[1] != ("Owner",)]) + ) @staticmethod def implements(obj): @@ -55,18 +54,18 @@ def read(obj): is_root = obj.isTopLevelPrincipiaApplicationObject if is_root: - result['is_root'] = True + result["is_root"] = True roles = AccessControlObj.roles(obj) if roles: - result['roles'] = roles + result["roles"] = roles local_roles = AccessControlObj.local_roles(obj) if local_roles: - result['local_roles'] = local_roles + result["local_roles"] = local_roles try: - result['owner'] = obj._owner + result["owner"] = obj._owner except AttributeError: pass @@ -79,8 +78,7 @@ def read(obj): # Permission.getRoles() perm_set = obj.ac_inherited_permissions(1) perm_set = [ - AccessControl.Permission.Permission(p[0], p[1], obj) - for p in perm_set + AccessControl.Permission.Permission(p[0], p[1], obj) for p in perm_set ] except AttributeError: perm_set = [] @@ -93,14 +91,14 @@ def read(obj): acquire = isinstance(roles, list) roles = list(roles) roles.sort() - if acquire and not len(roles) or is_root and roles == ['Manager']: + if acquire and not len(roles) or is_root and roles == ["Manager"]: # Does not deviate from default continue perms.append((perm.name, acquire, roles)) if perms: perms.sort() - result['perms'] = perms + result["perms"] = perms return result @@ -109,7 +107,7 @@ def write(obj, data): # Set userdef roles cur = AccessControlObj.roles(obj) - tgt = data.get('roles', []) + tgt = data.get("roles", []) for role in tgt: if role not in cur: obj._addRole(role) @@ -119,7 +117,7 @@ def write(obj, data): # Set local roles cur = dict(AccessControlObj.local_roles(obj)) - tgt = dict(data.get('local_roles', tuple())) + tgt = dict(data.get("local_roles", tuple())) users = set(cur.keys()) | set(tgt.keys()) for user in users: if user not in tgt: @@ -132,8 +130,7 @@ def write(obj, data): # no additional roles being granted this permission # An exception is the root application object, which can not acquire stored_perms = { - name: (acquire, roles) - for name, acquire, roles in data.get('perms', []) + name: (acquire, roles) for name, acquire, roles in data.get("perms", []) } for role in obj.ac_inherited_permissions(1): name = role[0] @@ -147,45 +144,45 @@ def write(obj, data): # for the top-level object, where it is not to acquire and # allow Manager if obj.isTopLevelPrincipiaApplicationObject: - roles = ('Manager',) + roles = ("Manager",) else: roles = [] AccessControl.Permission.Permission(name, [], obj).setRoles(roles) # set ownership - if 'owner' in data: - owner = data['owner'] + if "owner" in data: + owner = data["owner"] if isinstance(owner, str): # backward compatibility for older behavior, where the # corresponding UserFolder was not included - owner = (['acl_users'], owner) + owner = (["acl_users"], owner) - obj._owner = data['owner'] + obj._owner = data["owner"] class PropertiesObj(MixinModObj): @staticmethod def implements(obj): - if hasattr(obj, 'aq_explicit'): + if hasattr(obj, "aq_explicit"): me = obj.aq_explicit else: me = obj - return hasattr(me, 'propertyMap') + return hasattr(me, "propertyMap") @staticmethod def read(obj): props = obj.propertyMap() # Optional: Ignore the "title" property if it exists - props = list([a for a in props if a['id'] != 'title']) + props = list([a for a in props if a["id"] != "title"]) for prop in props: - prop['value'] = obj.getProperty(prop['id']) + prop["value"] = obj.getProperty(prop["id"]) # Handle inherited properties correctly - if 'mode' in prop: - val = getattr(obj, prop['id']) - del prop['mode'] - prop['value'] = val + if "mode" in prop: + val = getattr(obj, prop["id"]) + del prop["mode"] + prop["value"] = val props = [sorted(a.items()) for a in props] @@ -193,29 +190,29 @@ def read(obj): props.sort() if props: - return {'props': props} + return {"props": props} return {} @staticmethod def write(obj, data): - props = [dict(prop) for prop in data.get('props', [])] - ids = {prop['id'] for prop in props} - vals = {prop['id']: prop['value'] for prop in props} - types = {prop['id']: prop['type'] for prop in props} + props = [dict(prop) for prop in data.get("props", [])] + ids = {prop["id"] for prop in props} + vals = {prop["id"]: prop["value"] for prop in props} + types = {prop["id"]: prop["type"] for prop in props} cur = obj.propertyIds() # Delete any property that is superfluous or has the wrong type del_ids = [ - p for p in cur - if p != 'title' - and (p not in ids or types[p] != obj.getPropertyType(p)) + p + for p in cur + if p != "title" and (p not in ids or types[p] != obj.getPropertyType(p)) ] for p in del_ids: try: obj.manage_delProperties(ids=[p]) except zExceptions.BadRequest as e: - if str(e) == 'Cannot delete output_encoding': + if str(e) == "Cannot delete output_encoding": print("Ignoring failed attempt to delete output_encoding") else: raise @@ -238,18 +235,18 @@ def write(obj, data): class ZCacheableObj(MixinModObj): @staticmethod def implements(obj): - return hasattr(obj, 'ZCacheable_getManagerId') + return hasattr(obj, "ZCacheable_getManagerId") @staticmethod def read(obj): meta = {} zcachemanager = obj.ZCacheable_getManagerId() if zcachemanager: - meta['zcachemanager'] = zcachemanager + meta["zcachemanager"] = zcachemanager return meta @staticmethod def write(obj, data): - zcachemanager = data.get('zcachemanager', '') + zcachemanager = data.get("zcachemanager", "") obj.ZCacheable_setManagerId(zcachemanager) return diff --git a/perfact/zodbsync/object_types.py b/perfact/zodbsync/object_types.py index fdf2e11..3da6df0 100644 --- a/perfact/zodbsync/object_types.py +++ b/perfact/zodbsync/object_types.py @@ -24,46 +24,47 @@ def collect_handlers(cls): Create a dictionary mapping the supported meta_types of each class to the handler class by recursing into subclasses. """ - result = { - meta_type: cls - for meta_type in cls.meta_types - } + result = {meta_type: cls for meta_type in cls.meta_types} for sub in cls.__subclasses__(): result.update(sub.collect_handlers()) return result class UserFolderObj(ModObj): - meta_types = ['User Folder', ] + meta_types = [ + "User Folder", + ] @staticmethod def create(obj, data, obj_id): - obj.manage_addProduct['OFSP'].manage_addUserFolder() + obj.manage_addProduct["OFSP"].manage_addUserFolder() @staticmethod def read(obj): users = [] for user in obj.getUsers(): - users.append(( - user.getUserName(), - user._getPassword(), - user.roles, - user.getDomains(), - )) - return {'users': users} + users.append( + ( + user.getUserName(), + user._getPassword(), + user.roles, + user.getDomains(), + ) + ) + return {"users": users} @staticmethod def write(obj, data): users = obj.getUsers() current_users = [user.getUserName() for user in users] - target_users = [user[0] for user in data['users']] + target_users = [user[0] for user in data["users"]] obj._doDelUsers([u for u in current_users if u not in target_users]) - for user in data['users']: + for user in data["users"]: # according to AccessControl/userfolder.py, an existing user of the # same name is simply overwritten by _doAddUser obj._doAddUser( user[0], # username - '', # password is set separately + "", # password is set separately user[2], # roles user[3], # domains ) @@ -73,52 +74,58 @@ def write(obj, data): class DTMLDocumentObj(ModObj): - meta_types = ['DTML Document', ] + meta_types = [ + "DTML Document", + ] @staticmethod def create(obj, data, obj_id): - obj.manage_addProduct['OFSP'].manage_addDTMLDocument(id=obj_id) + obj.manage_addProduct["OFSP"].manage_addDTMLDocument(id=obj_id) @staticmethod def read(obj): - return { - 'source': helpers.to_bytes(obj.raw) - } + return {"source": helpers.to_bytes(obj.raw)} @staticmethod def write(obj, data): obj.manage_edit( - data=helpers.to_string(data['source']), - title=data['title'], + data=helpers.to_string(data["source"]), + title=data["title"], ) class DTMLMethodObj(DTMLDocumentObj): - meta_types = ['DTML Method', ] + meta_types = [ + "DTML Method", + ] @staticmethod def create(obj, data, obj_id): - obj.manage_addProduct['OFSP'].manage_addDTMLMethod(id=obj_id) + obj.manage_addProduct["OFSP"].manage_addDTMLMethod(id=obj_id) class DTMLTeXObj(DTMLDocumentObj): - meta_types = ['DTML TeX', ] + meta_types = [ + "DTML TeX", + ] @staticmethod def create(obj, data, obj_id): - obj.manage_addProduct['DTMLTeX'].manage_addDTMLTeX(id=obj_id) + obj.manage_addProduct["DTMLTeX"].manage_addDTMLTeX(id=obj_id) class ZForceObj(ModObj): - meta_types = ['ZForce', ] + meta_types = [ + "ZForce", + ] @staticmethod def create(obj, data, obj_id): - obj.manage_addProduct['ZForce'].manage_addZForce( + obj.manage_addProduct["ZForce"].manage_addZForce( id=obj_id, - title='', - query_id='', - fields_id='', + title="", + query_id="", + fields_id="", ) @staticmethod @@ -127,92 +134,100 @@ def read(obj): @staticmethod def write(obj, data): - obj.manage_changeProperties(title=data['title']) + obj.manage_changeProperties(title=data["title"]) class ZSQLMethodObj(ModObj): - meta_types = ['Z SQL Method', ] + meta_types = [ + "Z SQL Method", + ] @staticmethod def create(obj, data, obj_id): - obj.manage_addProduct['ZSQLMethods'].manage_addZSQLMethod( + obj.manage_addProduct["ZSQLMethods"].manage_addZSQLMethod( id=obj_id, - title=data['title'], - connection_id=data['connection_id'], - arguments=data['args'], - template=helpers.to_string(data['source']), + title=data["title"], + connection_id=data["connection_id"], + arguments=data["args"], + template=helpers.to_string(data["source"]), ) @staticmethod def read(obj): return { - 'args': obj.arguments_src, - 'connection_id': obj.connection_id, - 'source': helpers.to_bytes(obj.src), - 'advanced': sorted([ - ('connection_hook', obj.connection_hook), - ('max_rows', obj.max_rows_), - ('max_cache', obj.max_cache_), - ('cache_time', obj.cache_time_), - ('class_name', obj.class_name_), - ('class_file', obj.class_file_), - ]), + "args": obj.arguments_src, + "connection_id": obj.connection_id, + "source": helpers.to_bytes(obj.src), + "advanced": sorted( + [ + ("connection_hook", obj.connection_hook), + ("max_rows", obj.max_rows_), + ("max_cache", obj.max_cache_), + ("cache_time", obj.cache_time_), + ("class_name", obj.class_name_), + ("class_file", obj.class_file_), + ] + ), } @staticmethod def write(obj, data): obj.manage_edit( - title=data['title'], - connection_id=data['connection_id'], - arguments=data['args'], - template=helpers.to_string(data['source']), + title=data["title"], + connection_id=data["connection_id"], + arguments=data["args"], + template=helpers.to_string(data["source"]), ) # Advanced settings - adv = dict(data['advanced']) + adv = dict(data["advanced"]) obj.manage_advanced(**adv) class ExternalMethodObj(ModObj): - meta_types = ['External Method', ] + meta_types = [ + "External Method", + ] @staticmethod def create(obj, data, obj_id): - obj.manage_addProduct['ExternalMethod'].manage_addExternalMethod( + obj.manage_addProduct["ExternalMethod"].manage_addExternalMethod( id=obj_id, - title=data['title'], - module=data['module'], - function=data['function'], + title=data["title"], + module=data["module"], + function=data["function"], ) @staticmethod def read(obj): return { - 'function': obj.function(), - 'module': obj.module(), + "function": obj.function(), + "module": obj.module(), } @staticmethod def write(obj, data): obj.manage_edit( - title=data['title'], - module=data['module'], - function=data['function'], + title=data["title"], + module=data["module"], + function=data["function"], ) class FileObj(ModObj): - meta_types = ['File', ] + meta_types = [ + "File", + ] @staticmethod def create(obj, data, obj_id): - obj.manage_addProduct['OFSP'].manage_addFile(id=obj_id) + obj.manage_addProduct["OFSP"].manage_addFile(id=obj_id) @staticmethod def read(obj): # XXX Precondition # Read chunked source from File/Image objects. - return {'source': helpers.read_pdata(obj)} + return {"source": helpers.read_pdata(obj)} @staticmethod def write(obj, data): @@ -220,82 +235,88 @@ def write(obj, data): # XXX Precondition? obj.manage_edit( - filedata=data['source'], - content_type=pd['content_type'], - title=data['title'], + filedata=data["source"], + content_type=pd["content_type"], + title=data["title"], ) class ImageObj(FileObj): - meta_types = ['Image', ] + meta_types = [ + "Image", + ] @staticmethod def create(obj, data, obj_id): - obj.manage_addProduct['OFSP'].manage_addImage(id=obj_id, file='') + obj.manage_addProduct["OFSP"].manage_addImage(id=obj_id, file="") class FolderObj(ModObj): - meta_types = ['Folder', ] + meta_types = [ + "Folder", + ] @staticmethod def create(obj, data, obj_id): - obj.manage_addProduct['OFSP'].manage_addFolder(id=obj_id) + obj.manage_addProduct["OFSP"].manage_addFolder(id=obj_id) @staticmethod def read(obj): # Site Access try: - get_ar = obj.manage_addProduct['SiteAccess'].manage_getAccessRule + get_ar = obj.manage_addProduct["SiteAccess"].manage_getAccessRule except (AttributeError, KeyError): get_ar = None if get_ar: accessrule = get_ar and get_ar() if accessrule: - return {'accessrule': accessrule} + return {"accessrule": accessrule} return {} @staticmethod def write(obj, data): - obj.manage_changeProperties(title=data['title']) + obj.manage_changeProperties(title=data["title"]) # Access Rule - accessrule = data.get('accessrule', None) + accessrule = data.get("accessrule", None) if accessrule: - obj.manage_addProduct['SiteAccess'].manage_addAccessRule( + obj.manage_addProduct["SiteAccess"].manage_addAccessRule( method_id=accessrule ) class FolderOrderedObj(FolderObj): - meta_types = ['Folder (Ordered)', ] + meta_types = [ + "Folder (Ordered)", + ] @staticmethod def create(obj, data, obj_id): - obj.manage_addProduct['OFSP'].manage_addOrderedFolder(id=obj_id) + obj.manage_addProduct["OFSP"].manage_addOrderedFolder(id=obj_id) @staticmethod def read(obj): result = FolderObj.read(obj) # ordered folders store their contents to represent the ordering - result['contents'] = [a[0] for a in obj.objectItems()] + result["contents"] = [a[0] for a in obj.objectItems()] return result @staticmethod def write(obj, data): - obj.manage_changeProperties(title=data['title']) + obj.manage_changeProperties(title=data["title"]) # Access Rule - accessrule = data.get('accessrule', None) + accessrule = data.get("accessrule", None) if accessrule: - obj.manage_addProduct['SiteAccess'].manage_addAccessRule( + obj.manage_addProduct["SiteAccess"].manage_addAccessRule( method_id=accessrule ) @staticmethod def fix_order(obj, data): # sort children for ordered folders - contents = data.get('contents', []) + contents = data.get("contents", []) srv_contents = [a[0] for a in obj.objectItems()] # only use contents that are present in the object @@ -304,270 +325,284 @@ def fix_order(obj, data): class PageTemplateObj(ModObj): - meta_types = ['Page Template', ] + meta_types = [ + "Page Template", + ] @staticmethod def create(obj, data, obj_id): - obj.manage_addProduct['PageTemplates'].manage_addPageTemplate( - id=obj_id, - text='' + obj.manage_addProduct["PageTemplates"].manage_addPageTemplate( + id=obj_id, text="" ) @staticmethod def read(obj): - return {'source': obj._text} + return {"source": obj._text} @staticmethod def write(obj, data): - obj.pt_setTitle(data['title'], 'utf-8') - obj.write(data['source']) + obj.pt_setTitle(data["title"], "utf-8") + obj.write(data["source"]) class RAMCacheManagerObj(ModObj): - meta_types = ['RAM Cache Manager', ] + meta_types = [ + "RAM Cache Manager", + ] @staticmethod def create(obj, data, obj_id): - obj.manage_addProduct[ - 'StandardCacheManagers' - ].manage_addRAMCacheManager(id=obj_id) + obj.manage_addProduct["StandardCacheManagers"].manage_addRAMCacheManager( + id=obj_id + ) @staticmethod def read(obj): - return { - 'settings': sorted(obj.getSettings().items()) - } + return {"settings": sorted(obj.getSettings().items())} @staticmethod def write(obj, data): obj.manage_editProps( - title=data['title'], - settings=dict(data['settings']), + title=data["title"], + settings=dict(data["settings"]), ) class AcceleratedHTTPCacheManagerObj(RAMCacheManagerObj): - meta_types = ['Accelerated HTTP Cache Manager', ] + meta_types = [ + "Accelerated HTTP Cache Manager", + ] @staticmethod def create(obj, data, obj_id): obj.manage_addProduct[ - 'StandardCacheManagers' + "StandardCacheManagers" ].manage_addAcceleratedHTTPCacheManager(id=obj_id) class ScriptPythonObj(ModObj): - meta_types = ['Script (Python)', ] + meta_types = [ + "Script (Python)", + ] @staticmethod def create(obj, data, obj_id): - obj.manage_addProduct['PythonScripts'].manage_addPythonScript( - id=obj_id - ) + obj.manage_addProduct["PythonScripts"].manage_addPythonScript(id=obj_id) @staticmethod def read(obj): return { - 'bindings': sorted( - obj.getBindingAssignments().getAssignedNames().items() - ), - 'args': obj.params(), - 'source': helpers.to_bytes(obj.body()), - 'proxy_roles': sorted(list(obj._proxy_roles)), + "bindings": sorted(obj.getBindingAssignments().getAssignedNames().items()), + "args": obj.params(), + "source": helpers.to_bytes(obj.body()), + "proxy_roles": sorted(list(obj._proxy_roles)), } @staticmethod def write(obj, data): - obj.ZPythonScript_setTitle(title=data['title']) - obj.ZPythonScript_edit(params=data['args'], - body=helpers.to_string(data['source'])) - obj.ZBindings_edit(mapping=dict(data['bindings'])) - obj.manage_proxy(roles=data['proxy_roles']) + obj.ZPythonScript_setTitle(title=data["title"]) + obj.ZPythonScript_edit( + params=data["args"], body=helpers.to_string(data["source"]) + ) + obj.ZBindings_edit(mapping=dict(data["bindings"])) + obj.manage_proxy(roles=data["proxy_roles"]) class ZPsycopgDAObj(ModObj): - meta_types = ['Z Psycopg 2 Database Connection', - 'Z Psycopg Database Connection', ] + meta_types = [ + "Z Psycopg 2 Database Connection", + "Z Psycopg Database Connection", + ] @staticmethod def create(obj, data, obj_id): # id, title, connection_string, check, zdatetime, tilevel, autocommit, # encoding - obj.manage_addProduct['ZPsycopgDA'].manage_addZPsycopgConnection( + obj.manage_addProduct["ZPsycopgDA"].manage_addZPsycopgConnection( id=obj_id, - title=data['title'], - connection_string=data['connection_string'], + title=data["title"], + connection_string=data["connection_string"], ) @staticmethod def read(obj): # late additions may not yet be everywhere in the Data.fs obj_dict = { - 'autocommit': getattr(obj, 'autocommit', False), - 'readonlymode': getattr(obj, 'readonlymode', False), - 'connection_string': obj.connection_string, - 'encoding': obj.encoding, - 'tilevel': obj.tilevel, - 'zdatetime': obj.zdatetime, + "autocommit": getattr(obj, "autocommit", False), + "readonlymode": getattr(obj, "readonlymode", False), + "connection_string": obj.connection_string, + "encoding": obj.encoding, + "tilevel": obj.tilevel, + "zdatetime": obj.zdatetime, } # Place additional parameters into the object dict only if # they're set to non-default values - if hasattr(obj, 'use_tpc') and obj.use_tpc: - obj_dict['use_tpc'] = obj.use_tpc - if hasattr(obj, 'datetime_str') and obj.datetime_str: - obj_dict['datetime_str'] = obj.datetime_str + if hasattr(obj, "use_tpc") and obj.use_tpc: + obj_dict["use_tpc"] = obj.use_tpc + if hasattr(obj, "datetime_str") and obj.datetime_str: + obj_dict["datetime_str"] = obj.datetime_str return obj_dict @staticmethod def write(obj, data): parameters = { - 'title': data['title'], - 'connection_string': data['connection_string'], - 'zdatetime': data['zdatetime'], - 'tilevel': data['tilevel'], - 'autocommit': data.get('autocommit', False), - 'readonlymode': data.get('readonlymode', False), - 'encoding': data['encoding'], + "title": data["title"], + "connection_string": data["connection_string"], + "zdatetime": data["zdatetime"], + "tilevel": data["tilevel"], + "autocommit": data.get("autocommit", False), + "readonlymode": data.get("readonlymode", False), + "encoding": data["encoding"], } - if hasattr(obj, 'use_tpc'): - parameters['use_tpc'] = data.get('use_tpc', False) - if hasattr(obj, 'datetime_str'): - parameters['datetime_str'] = data.get('datetime_str', False) + if hasattr(obj, "use_tpc"): + parameters["use_tpc"] = data.get("use_tpc", False) + if hasattr(obj, "datetime_str"): + parameters["datetime_str"] = data.get("datetime_str", False) obj.manage_edit(**parameters) class ZPyODBCDAObj(ModObj): - meta_types = ['Z PyODBC Database Connection', ] + meta_types = [ + "Z PyODBC Database Connection", + ] @staticmethod def create(obj, data, obj_id): # id, title, connection_string, check, zdatetime, tilevel, autocommit, # encoding - obj.manage_addProduct['ZPyODBCDA'].addpyodbcConnectionBrowser( + obj.manage_addProduct["ZPyODBCDA"].addpyodbcConnectionBrowser( id=obj_id, - title=data['title'], - connection_string=data['connection_string'], - auto_commit=data['autocommit'], - MaxRows=data['maxrows'], + title=data["title"], + connection_string=data["connection_string"], + auto_commit=data["autocommit"], + MaxRows=data["maxrows"], ) @staticmethod def read(obj): return { - 'autocommit': obj.auto_commit, - 'connection_string': obj.connx_string, - 'maxrows': obj.MaxRows, + "autocommit": obj.auto_commit, + "connection_string": obj.connx_string, + "maxrows": obj.MaxRows, } @staticmethod def write(obj, data): obj.manage_edit( - title=data['title'], - connection_string=data['connection_string'], - auto_commit=data['autocommit'], - MaxRows=data['maxrows'], + title=data["title"], + connection_string=data["connection_string"], + auto_commit=data["autocommit"], + MaxRows=data["maxrows"], ) class ZcxOracleDAObj(ModObj): - meta_types = ['Z cxOracle Database Connection', ] + meta_types = [ + "Z cxOracle Database Connection", + ] @staticmethod def create(obj, data, obj_id): # id, title, connection_string, check, zdatetime, tilevel, autocommit, # encoding - obj.manage_addProduct['ZcxOracleDA'].manage_addZcxOracleConnection( + obj.manage_addProduct["ZcxOracleDA"].manage_addZcxOracleConnection( id=obj_id, - title=data['title'], - connection_string=data['connection_string'], + title=data["title"], + connection_string=data["connection_string"], ) @staticmethod def read(obj): return { - 'connection_string': obj.connection_string, + "connection_string": obj.connection_string, } @staticmethod def write(obj, data): obj.manage_edit( - title=data['title'], - connection_string=data['connection_string'], + title=data["title"], + connection_string=data["connection_string"], ) class ZsapdbDAObj(ModObj): - meta_types = ['Z sap Database Connection', ] + meta_types = [ + "Z sap Database Connection", + ] @staticmethod def create(obj, data, obj_id): # id, title, connection_string, check, zdatetime, tilevel, autocommit, # encoding - obj.manage_addProduct['ZsapdbDA'].manage_addZsapdbConnection( + obj.manage_addProduct["ZsapdbDA"].manage_addZsapdbConnection( id=obj_id, - title=data['title'], - connection_string=data['connection_string'], + title=data["title"], + connection_string=data["connection_string"], ) @staticmethod def read(obj): return { - 'connection_string': obj.connection_string, + "connection_string": obj.connection_string, } @staticmethod def write(obj, data): obj.manage_edit( - title=data['title'], - connection_string=data['connection_string'], + title=data["title"], + connection_string=data["connection_string"], ) class SimpleUserFolderObj(FolderObj): - meta_types = ['Simple User Folder', ] + meta_types = [ + "Simple User Folder", + ] @staticmethod def create(obj, data, obj_id): - obj.manage_addProduct['SimpleUserFolder'].addSimpleUserFolder() + obj.manage_addProduct["SimpleUserFolder"].addSimpleUserFolder() class MailHostObj(ModObj): - meta_types = ['Mail Host', ] + meta_types = [ + "Mail Host", + ] @staticmethod def create(obj, data, obj_id): # id, title, connection_string, check, zdatetime, tilevel, autocommit, # encoding - obj.manage_addProduct['MailHost'].manage_addMailHost( + obj.manage_addProduct["MailHost"].manage_addMailHost( id=obj_id, - title=data['title'], - smtp_host=data['smtp_host'], - smtp_port=data['smtp_port'], + title=data["title"], + smtp_host=data["smtp_host"], + smtp_port=data["smtp_port"], ) @staticmethod def read(obj): return { - 'smtp_host': obj.smtp_host, - 'smtp_port': obj.smtp_port, - 'smtp_uid': obj.smtp_uid, - 'smtp_pwd': obj.smtp_pwd, - 'force_tls': obj.force_tls, - 'smtp_queue': obj.smtp_queue, - 'smtp_queue_directory': obj.smtp_queue_directory, + "smtp_host": obj.smtp_host, + "smtp_port": obj.smtp_port, + "smtp_uid": obj.smtp_uid, + "smtp_pwd": obj.smtp_pwd, + "force_tls": obj.force_tls, + "smtp_queue": obj.smtp_queue, + "smtp_queue_directory": obj.smtp_queue_directory, } @staticmethod def write(obj, data): obj.manage_makeChanges( - title=data['title'], - smtp_host=data['smtp_host'], - smtp_port=data['smtp_port'], - smtp_uid=data.get('smtp_uid', ''), - smtp_pwd=data.get('smtp_pwd', ''), - force_tls=data.get('force_tls', False), - smtp_queue=data.get('smtp_queue', ''), - smtp_queue_directory=data.get('smtp_queue_directory', ''), + title=data["title"], + smtp_host=data["smtp_host"], + smtp_port=data["smtp_port"], + smtp_uid=data.get("smtp_uid", ""), + smtp_pwd=data.get("smtp_pwd", ""), + force_tls=data.get("force_tls", False), + smtp_queue=data.get("smtp_queue", ""), + smtp_queue_directory=data.get("smtp_queue_directory", ""), ) @@ -581,6 +616,5 @@ def mod_implemented_handlers(obj, meta_type): the mixins. """ return [object_handlers[meta_type]] + [ - cls for cls in MixinModObj.__subclasses__() - if cls.implements(obj) + cls for cls in MixinModObj.__subclasses__() if cls.implements(obj) ] diff --git a/perfact/zodbsync/scripts.py b/perfact/zodbsync/scripts.py index 15a5a13..dd312c0 100644 --- a/perfact/zodbsync/scripts.py +++ b/perfact/zodbsync/scripts.py @@ -1,6 +1,7 @@ import argparse from perfact.zodbsync.main import Runner + try: # psql dump for backwards compatibility import perfact.dbbackup @@ -14,20 +15,24 @@ def zodbsync(): def zoperecord(): parser = argparse.ArgumentParser( - description='Record the Data.fs', - epilog='''This script is deprecated in favor of zodbsync. Only bare + description="Record the Data.fs", + epilog="""This script is deprecated in favor of zodbsync. Only bare functionality is provided for backwards compatibility with existing cron entries. - ''' + """, + ) + parser.add_argument( + "--lasttxn", + action="store_true", + default=False, + help="Record only transactions since the last used.", ) - parser.add_argument('--lasttxn', action='store_true', default=False, - help='Record only transactions since the last used.') args = parser.parse_args() if args.lasttxn: - cmd = 'record --lasttxn' + cmd = "record --lasttxn" else: - cmd = 'record --commit /' + cmd = "record --commit /" runner = Runner().parse(*cmd.split()) @@ -35,14 +40,14 @@ def zoperecord(): # variables are found in the config - this is only for backwards # compatibility, this should be done by perfact-dbrecord instead. config = runner.config - databases = getattr(config, 'databases', None) + databases = getattr(config, "databases", None) if not args.lasttxn and databases is not None: runner.logger.warn( - 'Deprecation warning: dumping PostgreSQL schema and tables, which' - ' should be done by perfact-dbrecord instead.' + "Deprecation warning: dumping PostgreSQL schema and tables, which" + " should be done by perfact-dbrecord instead." ) msgbak = config.commit_message - config.commit_message += ' (Database)' + config.commit_message += " (Database)" perfact.dbbackup.git_snapshot(config) config.commit_message = msgbak diff --git a/perfact/zodbsync/subcommand.py b/perfact/zodbsync/subcommand.py index c495770..5758527 100644 --- a/perfact/zodbsync/subcommand.py +++ b/perfact/zodbsync/subcommand.py @@ -1,30 +1,35 @@ #!/usr/bin/env python -import sys -import subprocess +import json import os import shutil +import subprocess +import sys import filelock -import json from .helpers import Namespace class SubCommand(Namespace): - ''' + """ Base class for different sub-commands to be used by zodbsync. - ''' + """ # The presence of one of these in the .git folder indicates that some # process was not finished correctly, which is used to trigger a rollback # in some operations. Are these all? - git_state_indicators = ['rebase-merge', 'rebase-apply', 'CHERRY_PICK_HEAD', - 'MERGE_HEAD', 'REVERT_HEAD'] + git_state_indicators = [ + "rebase-merge", + "rebase-apply", + "CHERRY_PICK_HEAD", + "MERGE_HEAD", + "REVERT_HEAD", + ] @staticmethod def add_args(parser): - ''' Overwrite to add arguments specific to sub-command. ''' + """Overwrite to add arguments specific to sub-command.""" pass def acquire_lock(self, timeout=10): @@ -35,7 +40,7 @@ def acquire_lock(self, timeout=10): except filelock.Timeout: self.logger.debug("Acquiring exclusive lock...") try: - self.lock.acquire(timeout=timeout-1) + self.lock.acquire(timeout=timeout - 1) except filelock.Timeout: self.logger.error("Unable to acquire lock.") sys.exit(1) @@ -49,6 +54,7 @@ def with_lock(func): """ Decorator for instance methods that are enveloped by a lock """ + def wrapper(self, *args, **kwargs): self.acquire_lock() try: @@ -61,44 +67,35 @@ def wrapper(self, *args, **kwargs): def gitcmd(self, *args): # use "--no-pager" instead of "-P" for compatibility / readability - return ['git', '--no-pager', '-C', self.config['base_dir'] - ] + list(args) + return ["git", "--no-pager", "-C", self.config["base_dir"]] + list(args) def gitcmd_run(self, *args): - '''Wrapper to run a git command.''' + """Wrapper to run a git command.""" subprocess.check_call(self.gitcmd(*args)) def gitcmd_try(self, *args): - '''Wrapper to run a git command, returning return code.''' + """Wrapper to run a git command, returning return code.""" return subprocess.call(self.gitcmd(*args)) def gitcmd_output(self, *args): - '''Wrapper to run a git command and return the output.''' - return subprocess.check_output( - self.gitcmd(*args), universal_newlines=True - ) + """Wrapper to run a git command and return the output.""" + return subprocess.check_output(self.gitcmd(*args), universal_newlines=True) def datafs_filesystem_path(self, path): - '''Create absolute filesystem path from Data.fs path - ''' + """Create absolute filesystem path from Data.fs path""" - if path.startswith('./'): + if path.startswith("./"): path = path[2:] - if path.startswith('/'): + if path.startswith("/"): path = path[1:] data_fs_path = path if path.startswith(self.sync.site): - filesystem_path = os.path.join( - self.config["base_dir"], - path - ) + filesystem_path = os.path.join(self.config["base_dir"], path) else: filesystem_path = os.path.join( - self.config["base_dir"], - self.sync.site, - path + self.config["base_dir"], self.sync.site, path ) return data_fs_path, filesystem_path @@ -113,67 +110,71 @@ def unpack_source(src, tgt): targetitems = [] srcitems = os.listdir(src) for entry in srcitems: - if entry.startswith('.'): + if entry.startswith("."): continue - path = f'{src}/{entry}' + path = f"{src}/{entry}" if os.path.isdir(path): # p.e. __root__ or __schema__ as folders # Sometimes, there might be some residual folder with .dpkg-new # files or similar, even though this is now supplied as file. - other = [other for other in srcitems - if other.startswith(entry) and other != entry] + other = [ + other + for other in srcitems + if other.startswith(entry) and other != entry + ] if other: continue targetitems.append(entry) - cmd = ['rsync', '-a', '--delete-during', - f'{path}/', f'{tgt}/{entry}/'] + cmd = ["rsync", "-a", "--delete-during", f"{path}/", f"{tgt}/{entry}/"] else: # p.e. __root__.tar.gz -> Unpack to __root__/ - basename = entry.split('.')[0] + basename = entry.split(".")[0] targetitems.append(basename) - os.makedirs(f'{tgt}/{basename}', exist_ok=True) - cmd = ['tar', 'xf', path, '-C', f'{tgt}/{basename}/', - '--recursive-unlink'] + os.makedirs(f"{tgt}/{basename}", exist_ok=True) + cmd = [ + "tar", + "xf", + path, + "-C", + f"{tgt}/{basename}/", + "--recursive-unlink", + ] subprocess.run(cmd, check=True) for entry in os.listdir(tgt): - if entry.startswith('.') or entry in targetitems: + if entry.startswith(".") or entry in targetitems: continue shutil.rmtree(f"{tgt}/{entry}") def _branch_info(self): """Returns currently checked out branch as well as where each branch points.""" - current = self.gitcmd_output( - 'rev-parse', '--abbrev-ref', 'HEAD' - ).strip() + current = self.gitcmd_output("rev-parse", "--abbrev-ref", "HEAD").strip() branches = {} # branchname -> commitid - output = self.gitcmd_output('show-ref', '--heads') - for line in output.strip().split('\n'): + output = self.gitcmd_output("show-ref", "--heads") + for line in output.strip().split("\n"): commit, refname = line.split() - refname = refname[len('refs/heads/'):] + refname = refname[len("refs/heads/") :] branches[refname] = commit return (current, branches) def check_repo(self): - '''Check for unstaged changes and memorize current commit. Move - unstaged changes away via git stash''' + """Check for unstaged changes and memorize current commit. Move + unstaged changes away via git stash""" self.unstaged_changes = [ line[3:] - for line in self.gitcmd_output( - 'status', '--untracked-files', '-z' - ).split('\0') + for line in self.gitcmd_output("status", "--untracked-files", "-z").split( + "\0" + ) if line ] self.orig_branch, self.branches = self._branch_info() if self.unstaged_changes: - self.logger.warning( - "Unstaged changes found. Moving them out of the way." - ) - self.gitcmd_run('stash', 'push', '--include-untracked') + self.logger.warning("Unstaged changes found. Moving them out of the way.") + self.gitcmd_run("stash", "push", "--include-untracked") # The commit to compare to with regards to changed files self.orig_commit = self.branches[self.orig_branch] @@ -182,33 +183,35 @@ def _playback_paths(self, paths): paths = self.sync.prepare_paths(paths) dryrun = self.args.dry_run - playback_hook = self.config.get('playback_hook', None) + playback_hook = self.config.get("playback_hook", None) if playback_hook and os.path.isfile(playback_hook): proc = subprocess.Popen( - playback_hook, stdin=subprocess.PIPE, - stdout=subprocess.PIPE, stderr=subprocess.STDOUT, - universal_newlines=True) - out, _ = proc.communicate(json.dumps({'paths': paths})) + playback_hook, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + universal_newlines=True, + ) + out, _ = proc.communicate(json.dumps({"paths": paths})) returncode = proc.returncode if returncode: raise AssertionError( - "Error calling playback hook, returncode " - "{}, [[{}]] on {}".format( + "Error calling playback hook, returncode {}, [[{}]] on {}".format( returncode, playback_hook, out ) ) phases = json.loads(out) else: - phases = [{'name': 'playback', 'paths': paths}] - if self.config.get('run_after_playback', None): - phases[-1]['cmd'] = self.config['run_after_playback'] + phases = [{"name": "playback", "paths": paths}] + if self.config.get("run_after_playback", None): + phases[-1]["cmd"] = self.config["run_after_playback"] for ix, phase in enumerate(phases): - phase_name = phase.get('name') or str(ix) - phase_cmd = phase.get('cmd') + phase_name = phase.get("name") or str(ix) + phase_cmd = phase.get("cmd") self.sync.playback_paths( - paths=phase['paths'], + paths=phase["paths"], recurse=False, override=True, skip_errors=self.args.skip_errors, @@ -218,32 +221,26 @@ def _playback_paths(self, paths): if dryrun or not (phase_cmd and os.path.isfile(phase_cmd)): continue - self.logger.info( - 'Calling phase %s, command: %s', phase_name, phase_cmd - ) + self.logger.info("Calling phase %s, command: %s", phase_name, phase_cmd) proc = subprocess.Popen( - phase_cmd, stdin=subprocess.PIPE, - stdout=subprocess.PIPE, stderr=subprocess.STDOUT, - universal_newlines=True) - out, _ = proc.communicate(json.dumps( - {'paths': phase['paths']} - )) + phase_cmd, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + universal_newlines=True, + ) + out, _ = proc.communicate(json.dumps({"paths": phase["paths"]})) returncode = proc.returncode if returncode: - self.logger.error( - "Error during phase command %s, %s", - returncode, out - ) + self.logger.error("Error during phase command %s, %s", returncode, out) if sys.stdin.isatty(): print("Enter 'y' to continue, other to rollback") res = input() - if res == 'y': + if res == "y": continue - raise AssertionError( - "Unrecoverable error in phase command" - ) + raise AssertionError("Unrecoverable error in phase command") else: self.logger.info(out) @@ -271,54 +268,59 @@ def wrapper(self, *args, **kwargs): # Fail and roll back for any of the markers of an interrupted # git process (merge/rebase/cherry-pick/etc.) for fname in self.git_state_indicators: - path = os.path.join(self.sync.base_dir, '.git', fname) + path = os.path.join(self.sync.base_dir, ".git", fname) assert not os.path.exists(path), "Git state not clean" files = { - line for line in self.gitcmd_output( - 'diff', self.orig_commit, '--name-only', '--no-renames' - ).strip().split('\n') + line + for line in self.gitcmd_output( + "diff", self.orig_commit, "--name-only", "--no-renames" + ) + .strip() + .split("\n") if line } conflicts = files & set(self.unstaged_changes) assert not conflicts, "Change in unstaged files, aborting" # Make unique and sort - self.paths = sorted({ - file for file in files if file.startswith(self.sync.site) - }) + self.paths = sorted( + {file for file in files if file.startswith(self.sync.site)} + ) self._playback_paths(self.paths) if self.args.dry_run: self.abort() elif self.unstaged_changes: - self.gitcmd_run('stash', 'pop') + self.gitcmd_run("stash", "pop") except Exception: - self.logger.error('Error during operation. Resetting.') + self.logger.error("Error during operation. Resetting.") # Special handling in case of interrupted cherry-pick: show # differences in affected files - cpfname = os.path.join(self.sync.base_dir, - '.git/CHERRY_PICK_HEAD') + cpfname = os.path.join(self.sync.base_dir, ".git/CHERRY_PICK_HEAD") if os.path.exists(cpfname): with open(cpfname) as f: failed_commit = f.read().strip() output = self.gitcmd_output( - 'diff-tree', '--no-commit-id', '--name-only', - '-r', failed_commit, + "diff-tree", + "--no-commit-id", + "--name-only", + "-r", + failed_commit, ) affected_files = [ - line - for line in output.strip().split('\n') - if line + line for line in output.strip().split("\n") if line ] - self.logger.error("The cherry-pick failed due to the" - " following difference:") + self.logger.error( + "The cherry-pick failed due to the following difference:" + ) try: - self.gitcmd_run('diff', failed_commit + '~', 'HEAD', - '--', *affected_files) + self.gitcmd_run( + "diff", failed_commit + "~", "HEAD", "--", *affected_files + ) except subprocess.CalledProcessError: # Make sure the call to abort is still done, even if # for example the list of affected_files is too long @@ -342,36 +344,37 @@ def wrapper(self, *args, **kwargs): is_ancestor = ( self.gitcmd_try( "merge-base", "--is-ancestor", self.orig_commit, "HEAD" - ) == 0 + ) + == 0 ) if is_ancestor: merge_commits = self.gitcmd_output( - "log", "--oneline", "--min-parents=2", - f"{self.orig_commit}..HEAD" + "log", + "--oneline", + "--min-parents=2", + f"{self.orig_commit}..HEAD", ).strip() if not merge_commits: - head_commit = self.gitcmd_output( - "rev-parse", "HEAD" - ).strip() + head_commit = self.gitcmd_output("rev-parse", "HEAD").strip() cmd = ( - f'zodbsync exec "git revert ' - f'{self.orig_commit}..{head_commit}"' + f'zodbsync exec "git revert ' + f'{self.orig_commit}..{head_commit}"' ) self.logger.info( - "Prepared Command for Rollback:\n%s", - cmd, + "Prepared Command for Rollback:\n%s", + cmd, ) return wrapper def create_file(self, file_path, content, binary=False): - flags = 'wb' if binary else 'w' + flags = "wb" if binary else "w" with open(file_path, flags) as create_file: create_file.write(content) def abort(self): - '''Abort actions on repo and revert stash. check_repo must be - called before this can be used''' + """Abort actions on repo and revert stash. check_repo must be + called before this can be used""" current, branches = self._branch_info() # reset currently checked out branch target = self.branches.get(current) @@ -379,7 +382,7 @@ def abort(self): # The branch was not originally present - we still need to reset it # to abort any operation target = branches[current] - self.gitcmd_run('reset', '--hard', target) + self.gitcmd_run("reset", "--hard", target) # reset all other branches for branch in self.branches: @@ -387,18 +390,18 @@ def abort(self): continue if branches[branch] == self.branches[branch]: continue - self.gitcmd_run('branch', '-f', branch, self.branches[branch]) + self.gitcmd_run("branch", "-f", branch, self.branches[branch]) # check out original branch if current != self.orig_branch: - self.gitcmd_run('checkout', self.orig_branch) + self.gitcmd_run("checkout", self.orig_branch) if self.unstaged_changes: - self.gitcmd_run('stash', 'pop') + self.gitcmd_run("stash", "pop") def run(self): - ''' + """ Overwrite for the action that is to be performed if this subcommand is chosen. - ''' + """ print(self.args) diff --git a/perfact/zodbsync/tests/environment.py b/perfact/zodbsync/tests/environment.py index fbf50bd..50ffb79 100644 --- a/perfact/zodbsync/tests/environment.py +++ b/perfact/zodbsync/tests/environment.py @@ -1,38 +1,35 @@ import os -import tempfile import shutil import subprocess +import tempfile -''' +""" These classes together define an environment containing of a running ZEO instance and a git repository (both running in temporary directories) as well as a Zope instance configuration connecting to this ZEO and a ZODBSync configuration connecting everything. They are used by the fixture defined in conftest.py to provide an environment for the tests. -''' +""" -class ZeoInstance(): +class ZeoInstance: def __init__(self): self.path = tempfile.mkdtemp() - subprocess.check_call(['mkzeoinstance', self.path]) + subprocess.check_call(["mkzeoinstance", self.path]) # replace address line to use a socket - fname = self.path + '/etc/zeo.conf' + fname = self.path + "/etc/zeo.conf" with open(fname) as f: lines = f.readlines() - subst = ' address ' + self.sockpath() + '\n' - lines = [ - subst if ' address' in line else line - for line in lines - ] - with open(fname, 'w') as f: + subst = " address " + self.sockpath() + "\n" + lines = [subst if " address" in line else line for line in lines] + with open(fname, "w") as f: f.writelines(lines) - self.zeo = subprocess.Popen([self.path + '/bin/runzeo']) + self.zeo = subprocess.Popen([self.path + "/bin/runzeo"]) def sockpath(self): - return self.path + '/var/zeo.sock' + return self.path + "/var/zeo.sock" def cleanup(self): self.zeo.terminate() @@ -40,27 +37,27 @@ def cleanup(self): shutil.rmtree(self.path) -class Repository(): +class Repository: def __init__(self): self.path = tempfile.mkdtemp() commands = [ - ['init'], - ['branch', '-m', 'autotest'], - ['config', 'user.email', 'test@zodbsync.org'], - ['config', 'user.name', 'testrepo'], + ["init"], + ["branch", "-m", "autotest"], + ["config", "user.email", "test@zodbsync.org"], + ["config", "user.name", "testrepo"], ] for cmd in commands: - subprocess.check_call(['git', '-C', self.path] + cmd) + subprocess.check_call(["git", "-C", self.path] + cmd) def cleanup(self): shutil.rmtree(self.path) -class ZopeConfig(): +class ZopeConfig: def __init__(self, zeosock): self.path = tempfile.mkdtemp() - self.config = self.path + '/zope.conf' - content = ''' + self.config = self.path + "/zope.conf" + content = """ %define INSTANCE {path} %define ZEO_SERVER {zeosock} @@ -76,22 +73,23 @@ def __init__(self, zeosock): mount-point / - '''.format(zeosock=zeosock, path=self.path) + """.format(zeosock=zeosock, path=self.path) - with open(self.config, 'w') as f: + with open(self.config, "w") as f: f.write(content) def cleanup(self): shutil.rmtree(self.path) -class ZODBSyncConfig(): +class ZODBSyncConfig: def __init__(self, env): self.folder = tempfile.mkdtemp() - os.mkdir(self.folder + '/layers') - self.path = self.folder + '/zodb.py' - with open(self.path, 'w') as f: - f.write(''' + os.mkdir(self.folder + "/layers") + self.path = self.folder + "/zodb.py" + with open(self.path, "w") as f: + f.write( + """ conf_path = '{zopeconf}' datafs_path = '{zeopath}/var/Data.fs' manager_user = 'perfact' @@ -102,36 +100,38 @@ def __init__(self, env): commit_email = "zope-devel@example.de" commit_message = "Generic commit message." layers = "{root}/layers" - '''.format( - zopeconf=env['zopeconfig'].config, - zeopath=env['zeo'].path, - repodir=env['repo'].path, - root=self.folder, - )) + """.format( + zopeconf=env["zopeconfig"].config, + zeopath=env["zeo"].path, + repodir=env["repo"].path, + root=self.folder, + ) + ) def cleanup(self): shutil.rmtree(self.folder) -class JSLib(): - ''' +class JSLib: + """ A test JS library containing some JS and CSS files - ''' + """ + def __init__(self): self.path = tempfile.mkdtemp() - self.js_folder = os.path.join(self.path, 'js', 'plugins') + self.js_folder = os.path.join(self.path, "js", "plugins") os.makedirs(self.js_folder) - with open(os.path.join(self.js_folder, 'something.js'), 'w') as jsfile: - jsfile.write('alert(1);\n') + with open(os.path.join(self.js_folder, "something.js"), "w") as jsfile: + jsfile.write("alert(1);\n") - self.css_folder = os.path.join(self.path, 'css', 'skins') + self.css_folder = os.path.join(self.path, "css", "skins") os.makedirs(self.css_folder) - with open(os.path.join(self.css_folder, 'dark.css'), 'w') as cssfile: - cssfile.write('body { background-color: black; }\n') + with open(os.path.join(self.css_folder, "dark.css"), "w") as cssfile: + cssfile.write("body { background-color: black; }\n") - with open(os.path.join(self.path, 'ignoreme'), 'w') as ignorefile: - ignorefile.write('something to ignore') + with open(os.path.join(self.path, "ignoreme"), "w") as ignorefile: + ignorefile.write("something to ignore") def cleanup(self): shutil.rmtree(self.path) diff --git a/perfact/zodbsync/tests/test_helpers.py b/perfact/zodbsync/tests/test_helpers.py index 1ca1452..14a17bd 100644 --- a/perfact/zodbsync/tests/test_helpers.py +++ b/perfact/zodbsync/tests/test_helpers.py @@ -9,13 +9,13 @@ def test_remove_redundant_paths(): Check that redundant subpaths are actually removed """ paths = [ - '/test', - '/test/sub', - '/another', + "/test", + "/test/sub", + "/another", ] target = [ - '/another', - '/test', + "/another", + "/test", ] helpers.remove_redundant_paths(paths) assert paths == target @@ -26,7 +26,7 @@ def test_remove_redundant_paths_only_real_subpaths(): Check that paths are only recognized as redundant if they are actually subpaths, not if the last path component starts with the other. """ - paths = ['/test', '/test2'] + paths = ["/test", "/test2"] new_paths = paths[:] helpers.remove_redundant_paths(new_paths) assert paths == new_paths @@ -36,12 +36,12 @@ def test_converters(): """ Several tests for to_* methods """ - for value in ['test', b'test']: - assert helpers.to_bytes(value) == b'test' - assert helpers.to_string(value) == 'test' - assert helpers.to_string([1]) == '[1]' - assert helpers.to_bytes([1]) == b'[1]' - assert helpers.to_bytes(memoryview(b'test')) == b'test' + for value in ["test", b"test"]: + assert helpers.to_bytes(value) == b"test" + assert helpers.to_string(value) == "test" + assert helpers.to_string([1]) == "[1]" + assert helpers.to_bytes([1]) == b"[1]" + assert helpers.to_bytes(memoryview(b"test")) == b"test" def test_StrRepr(): @@ -50,7 +50,8 @@ def test_StrRepr(): is split to occupy one line for each element, reproducing the shown formatting. """ - fmt = """ + fmt = ( + """ [ ('content', [ 'a', @@ -77,12 +78,14 @@ def test_StrRepr(): [('id', 'scalar'), ('type', 'string'), ('value', 'test')], ]), ] - """.strip() + '\n' + """.strip() + + "\n" + ) data = dict(helpers.literal_eval(fmt)) rules = { - 'perms': [4], - 'props': [5], + "perms": [4], + "props": [5], } assert fmt == helpers.StrRepr()(data, rules) @@ -91,7 +94,8 @@ def test_StrReprLegacy(): """ Reproduce the shown formatting of StrRepr when using legacy mode """ - fmt = """ + fmt = ( + """ [ ('content', [ 'a', @@ -105,14 +109,16 @@ def test_StrReprLegacy(): [('id', 'scalar'), ('type', 'string'), ('value', 'test')], ]), ] - """.strip() + '\n' + """.strip() + + "\n" + ) data = dict(helpers.literal_eval(fmt)) assert fmt == helpers.StrRepr()(data, legacy=True) def test_literal_eval(): tests = [ - ["b'test'", b'test'], + ["b'test'", b"test"], ["{1: 2}", {1: 2}], ["[1, 2, 3]", [1, 2, 3]], ["None", None], @@ -122,21 +128,21 @@ def test_literal_eval(): assert helpers.literal_eval("1 + 2") == 3 assert helpers.literal_eval("-True") == -1 with pytest.raises(Exception): - helpers.literal_eval('f(1)') + helpers.literal_eval("f(1)") def test_path_diff(): """Check that path_diff also handles cases where the last element is not the same in both lists.""" old = [ - ('Abc', '1234'), - ('Def', 'afaf'), - ('Xyz', 'yzyz'), + ("Abc", "1234"), + ("Def", "afaf"), + ("Xyz", "yzyz"), ] new = [ - ('Abc', '1234'), - ('Def', 'axax'), - ('Yyy', 'yzyz'), + ("Abc", "1234"), + ("Def", "axax"), + ("Yyy", "yzyz"), ] result = helpers.path_diff(old, new) - assert result == {'Def', 'Xyz', 'Yyy'} + assert result == {"Def", "Xyz", "Yyy"} diff --git a/perfact/zodbsync/tests/test_sync.py b/perfact/zodbsync/tests/test_sync.py index d1e9348..e17c28d 100644 --- a/perfact/zodbsync/tests/test_sync.py +++ b/perfact/zodbsync/tests/test_sync.py @@ -1,20 +1,20 @@ -import os -import time -import os.path import base64 import io import json -import subprocess +import os +import os.path import pickle -import pytest +import random import shutil -import tempfile import string -import random +import subprocess +import tempfile +import time from contextlib import contextmanager -import ZEO +import pytest import transaction +import ZEO from AccessControl.SecurityManagement import newSecurityManager try: @@ -22,18 +22,16 @@ except ImportError: import mock +from .. import extedit, helpers, object_types, zodbsync from ..main import Runner -from .. import zodbsync -from .. import helpers -from .. import extedit -from .. import object_types from . import environment as env -class DummyResponse(): +class DummyResponse: """ For mocking the request in extedit test """ + def __init__(self, app): self.headers = {} self.app = app @@ -41,7 +39,7 @@ def __init__(self, app): def __enter__(self): self.orig_request = self.app.REQUEST self.app.REQUEST = helpers.Namespace( - _auth='dummy', + _auth="dummy", RESPONSE=self, ) return self @@ -53,34 +51,34 @@ def setHeader(self, key, value): self.headers[key] = value -class TestSync(): - ''' +class TestSync: + """ All tests defined in this class automatically use the environment fixture (ZEO, repo etc.) - ''' + """ - @pytest.fixture(scope='class', autouse=True) + @pytest.fixture(scope="class", autouse=True) def environment(self, request): - ''' + """ Fixture that is automatically used by all tests. Initializes environment and injects the elements of it into the class. - ''' + """ myenv = dict( zeo=env.ZeoInstance(), repo=env.Repository(), jslib=env.JSLib(), ) - myenv['zopeconfig'] = env.ZopeConfig(zeosock=myenv['zeo'].sockpath()) - myenv['config'] = env.ZODBSyncConfig(env=myenv) + myenv["zopeconfig"] = env.ZopeConfig(zeosock=myenv["zeo"].sockpath()) + myenv["config"] = env.ZODBSyncConfig(env=myenv) # inject items into class so methods can use them for key, value in myenv.items(): setattr(request.cls, key, value) # Initially record everything and commit it - self.run('record', '/') - self.gitrun('add', '.') - self.gitrun('commit', '-m', 'init') + self.run("record", "/") + self.gitrun("add", ".") + self.gitrun("commit", "-m", "init") request.cls.initial_commit = self.get_head_id() # at this point, the test is called @@ -90,32 +88,32 @@ def environment(self, request): for item in myenv.values(): item.cleanup() - @pytest.fixture(scope='function', autouse=True) + @pytest.fixture(scope="function", autouse=True) def envreset(self, request): """ Reset the environment after each test. """ - self.run('record', '/') + self.run("record", "/") # Call test yield - if getattr(self, 'runner', None): + if getattr(self, "runner", None): self.runner.sync.tm.abort() cmds = [ - 'reset --hard', - 'clean -dfx', - 'checkout autotest', - 'reset --hard {}'.format(self.initial_commit), + "reset --hard", + "clean -dfx", + "checkout autotest", + "reset --hard {}".format(self.initial_commit), ] for cmd in cmds: self.gitrun(*cmd.split()) - output = self.gitoutput('show-ref', '--heads') - for line in output.strip().split('\n'): + output = self.gitoutput("show-ref", "--heads") + for line in output.strip().split("\n"): commit, refname = line.split() - refname = refname[len('refs/heads/'):] - if refname != 'autotest': - self.gitrun('branch', '-D', refname) + refname = refname[len("refs/heads/") :] + if refname != "autotest": + self.gitrun("branch", "-D", refname) - self.run('playback', '--skip-errors', '/') + self.run("playback", "--skip-errors", "/") @contextmanager def newconn(self): @@ -127,14 +125,14 @@ def newconn(self): with tm: # Log in, manage_renameObject checks permissions userfolder = app.acl_users - user = userfolder.getUser('perfact').__of__(userfolder) + user = userfolder.getUser("perfact").__of__(userfolder) newSecurityManager(None, user) - yield helpers.Namespace({'tm': tm, 'app': app}) + yield helpers.Namespace({"tm": tm, "app": app}) tm.abort() conn.close() - @pytest.fixture(scope='function') + @pytest.fixture(scope="function") def conn(self, request): """ Fixture that provides a secondary connection to the same ZEO @@ -143,12 +141,12 @@ def conn(self, request): yield conn def mkrunner(self, *cmd): - ''' + """ Create or update runner for given zodbsync command - ''' - if not hasattr(self, 'runner'): + """ + if not hasattr(self, "runner"): self.runner = Runner() - result = self.runner.parse('--config', self.config.path, *cmd) + result = self.runner.parse("--config", self.config.path, *cmd) self.app = self.runner.sync.app if self.runner.sync else None return result @@ -157,192 +155,176 @@ def run(self, *cmd): self.mkrunner(*cmd).run() def gitrun(self, *cmd): - ''' + """ Run git command. - ''' - subprocess.check_call( - ['git', '-C', self.repo.path] + list(cmd) - ) + """ + subprocess.check_call(["git", "-C", self.repo.path] + list(cmd)) def gitoutput(self, *cmd): - ''' + """ Run git command, returning output. - ''' + """ return subprocess.check_output( - ['git', '-C', self.repo.path] + list(cmd), + ["git", "-C", self.repo.path] + list(cmd), universal_newlines=True, ) def upload_checks(self, replace_periods=True, ignore=True): - '''A bunch of asserts to call after an upload test has been performed - ''' - assert 'lib' in self.app.objectIds() - assert 'js' in self.app.lib.objectIds() - assert 'plugins' in self.app.lib.js.objectIds() - something_js = 'something_js' if replace_periods else 'something.js' + """A bunch of asserts to call after an upload test has been performed""" + assert "lib" in self.app.objectIds() + assert "js" in self.app.lib.objectIds() + assert "plugins" in self.app.lib.js.objectIds() + something_js = "something_js" if replace_periods else "something.js" assert something_js in self.app.lib.js.plugins.objectIds() - content = 'alert(1);\n' - data = helpers.to_string( - getattr(self.app.lib.js.plugins, something_js).data - ) + content = "alert(1);\n" + data = helpers.to_string(getattr(self.app.lib.js.plugins, something_js).data) assert content == data - assert 'css' in self.app.lib.objectIds() - assert 'skins' in self.app.lib.css.objectIds() - dark_css = 'dark_css' if replace_periods else 'dark.css' + assert "css" in self.app.lib.objectIds() + assert "skins" in self.app.lib.css.objectIds() + dark_css = "dark_css" if replace_periods else "dark.css" assert dark_css in self.app.lib.css.skins.objectIds() - content = 'body { background-color: black; }\n' - data = helpers.to_string( - getattr(self.app.lib.css.skins, dark_css).data - ) + content = "body { background-color: black; }\n" + data = helpers.to_string(getattr(self.app.lib.css.skins, dark_css).data) assert content == data # dont forget ignored files! if ignore: - assert 'ignoreme' not in self.app.lib + assert "ignoreme" not in self.app.lib def test_record(self): - '''Recorder tests''' + """Recorder tests""" # Record everything and make sure acl_users exists - assert os.path.isfile( - self.repo.path + '/__root__/acl_users/__meta__' - ) + assert os.path.isfile(self.repo.path + "/__root__/acl_users/__meta__") # Recording a non-existent object fails with pytest.raises(AttributeError): - self.run('record', '/nonexist') + self.run("record", "/nonexist") # ... unless --skip-errors is given - self.run('record', '/nonexist', '--skip-errors') + self.run("record", "/nonexist", "--skip-errors") # Recording with --lasttxn will create the file - self.run('record', '--lasttxn') - assert os.path.isfile(os.path.join(self.repo.path, '__last_txn__')) + self.run("record", "--lasttxn") + assert os.path.isfile(os.path.join(self.repo.path, "__last_txn__")) # Making a change with a comment indicating the path will make lasttxn # pick it up - tm = self.runner.sync.start_transaction(note='/testpt') - self.app.manage_addProduct['PageTemplates'].manage_addPageTemplate( - id='testpt', - text='test1' + tm = self.runner.sync.start_transaction(note="/testpt") + self.app.manage_addProduct["PageTemplates"].manage_addPageTemplate( + id="testpt", text="test1" ) tm.commit() - self.run('record', '--lasttxn') - assert os.path.isdir(os.path.join(self.repo.path, '__root__/testpt')) + self.run("record", "--lasttxn") + assert os.path.isdir(os.path.join(self.repo.path, "__root__/testpt")) def test_record_commit(self): - '''Record with --commit (but no mail and no autoreset)''' - add = ( - self.app.manage_addProduct['PageTemplates'].manage_addPageTemplate - ) + """Record with --commit (but no mail and no autoreset)""" + add = self.app.manage_addProduct["PageTemplates"].manage_addPageTemplate with self.runner.sync.tm: - add(id='test', text='test') - self.run('record', '/', '--commit') + add(id="test", text="test") + self.run("record", "/", "--commit") # Additional run that does no commit since nothing changed - self.run('record', '/', '--commit') - assert os.path.isdir(os.path.join(self.repo.path, '__root__/test')) - commits = self.gitoutput('log', '--format=%s') + self.run("record", "/", "--commit") + assert os.path.isdir(os.path.join(self.repo.path, "__root__/test")) + commits = self.gitoutput("log", "--format=%s") assert commits == "Generic commit message.\ninit\n" def test_record_autoreset(self): - '''Record with --commit --autoreset.''' - add = ( - self.app.manage_addProduct['PageTemplates'].manage_addPageTemplate - ) + """Record with --commit --autoreset.""" + add = self.app.manage_addProduct["PageTemplates"].manage_addPageTemplate with self.runner.sync.tm: - add(id='test', text='test') - self.run('record', '/', '--commit', '--autoreset') - assert not os.path.isdir(os.path.join(self.repo.path, '__root__/test')) - commits = self.gitoutput('log', '--format=%s') + add(id="test", text="test") + self.run("record", "/", "--commit", "--autoreset") + assert not os.path.isdir(os.path.join(self.repo.path, "__root__/test")) + commits = self.gitoutput("log", "--format=%s") assert commits == "init\n" - assert 'test' not in self.app.objectIds() + assert "test" not in self.app.objectIds() def test_record_unsupported(self): """Check that reading /error_log yields an unsupported marker or an error.""" obj = self.runner.sync.app.error_log - assert 'unsupported' in zodbsync.mod_read(obj) + assert "unsupported" in zodbsync.mod_read(obj) with pytest.raises(AssertionError): zodbsync.mod_read(obj, onerrorstop=True) def test_omit_callable_title(self): """It omits title attributes which are callable.""" app = self.app - obj = app.manage_addProduct['PageTemplates'].manage_addPageTemplate( - id='test_pt', title='Not-visible', text='test text') + obj = app.manage_addProduct["PageTemplates"].manage_addPageTemplate( + id="test_pt", title="Not-visible", text="test text" + ) def patch_title(): """Callable to test callable titles.""" - return 'Show-me' + return "Show-me" # Normal case result = zodbsync.mod_read(obj) - assert 'Not-visible' in result['title'] + assert "Not-visible" in result["title"] # with callable title - with mock.patch.object(obj, 'title', patch_title): + with mock.patch.object(obj, "title", patch_title): result = zodbsync.mod_read(obj) - assert 'title' not in result + assert "title" not in result def test_playback(self): - ''' + """ Record everything, change /index_html, play it back and check if the contents are correct. - ''' - path = self.repo.path + '/__root__/index_html/__source-utf8__.html' - content = '' - with open(path, 'w') as f: + """ + path = self.repo.path + "/__root__/index_html/__source-utf8__.html" + content = "" + with open(path, "w") as f: f.write(content) - self.run('playback', '/index_html') + self.run("playback", "/index_html") assert self.app.index_html() == content - def add_folder(self, name, msg=None, parent=''): + def add_folder(self, name, msg=None, parent=""): """ Add a folder to the root directory and commit it if msg is given """ - folder = os.path.join(self.repo.path, '__root__', parent, name) + folder = os.path.join(self.repo.path, "__root__", parent, name) os.mkdir(folder) - with open(folder + '/__meta__', 'w') as f: - f.write(zodbsync.mod_format({ - 'title': '', - 'type': 'Folder' - })) + with open(folder + "/__meta__", "w") as f: + f.write(zodbsync.mod_format({"title": "", "type": "Folder"})) if msg is not None: - self.gitrun('add', '.') - self.gitrun('commit', '-m', msg) + self.gitrun("add", ".") + self.gitrun("commit", "-m", msg) def get_head_id(self): """Return commit ID of current HEAD.""" - return self.gitoutput('show-ref', '--head', '--hash', 'HEAD').strip() + return self.gitoutput("show-ref", "--head", "--hash", "HEAD").strip() - def prepare_pick(self, name='TestFolder', msg='Second commit'): - ''' + def prepare_pick(self, name="TestFolder", msg="Second commit"): + """ Prepare a commit containing a new folder that can be picked onto the initialized repository. Returns the commit ID. - ''' + """ # Add a folder, commit it self.add_folder(name, msg) commit = self.get_head_id() # Reset the commit - self.gitrun('reset', '--hard', 'HEAD~') + self.gitrun("reset", "--hard", "HEAD~") return commit def test_pick(self): - ''' + """ Pick a prepared commit and check that the folder exists. - ''' + """ commit = self.prepare_pick() - self.run('pick', commit) + self.run("pick", commit) - assert 'TestFolder' in self.app.objectIds() + assert "TestFolder" in self.app.objectIds() def test_pick_dryrun(self): - ''' + """ Pick a prepared commit in dry-run mode and check that the folder does not exist. - ''' + """ commit = self.prepare_pick() - self.run('pick', commit, '--dry-run') + self.run("pick", commit, "--dry-run") - assert 'TestFolder' not in self.app.objectIds() + assert "TestFolder" not in self.app.objectIds() def test_pick_grep(self): """ @@ -350,33 +332,33 @@ def test_pick_grep(self): in the commit message, then pick only those. """ msgs = [ - 'T123: first commit', - 'T456: second commit', - 'T123: third commit', + "T123: first commit", + "T456: second commit", + "T123: third commit", ] for nr, msg in enumerate(msgs): - self.add_folder('Test' + str(nr), msg) + self.add_folder("Test" + str(nr), msg) commit = self.get_head_id() - self.gitrun('reset', '--hard', 'HEAD~3') - self.run('pick', '--grep=T123', commit) + self.gitrun("reset", "--hard", "HEAD~3") + self.run("pick", "--grep=T123", commit) ids = self.app.objectIds() - assert 'Test0' in ids - assert 'Test1' not in ids - assert 'Test2' in ids + assert "Test0" in ids + assert "Test1" not in ids + assert "Test2" in ids def test_pick_range(self): """ Prepare three commits and pick them as a range """ for i in range(3): - self.add_folder('Test' + str(i), 'Commit ' + str(i)) + self.add_folder("Test" + str(i), "Commit " + str(i)) commit = self.get_head_id() - self.gitrun('reset', '--hard', 'HEAD~3') - self.run('pick', 'HEAD..' + commit) + self.gitrun("reset", "--hard", "HEAD~3") + self.run("pick", "HEAD.." + commit) ids = self.app.objectIds() for i in range(3): - assert 'Test' + str(i) in ids + assert "Test" + str(i) in ids def test_pick_fail(self): """ @@ -384,112 +366,126 @@ def test_pick_fail(self): Also pick one applyable and one unknown commit. """ commit = self.prepare_pick() - for second in [commit, 'unknown']: + for second in [commit, "unknown"]: with pytest.raises(subprocess.CalledProcessError): - self.run('pick', commit, second) - assert 'TestFolder' not in self.app.objectIds() - assert not os.path.isdir(self.repo.path + '/__root__/TestFolder') + self.run("pick", commit, second) + assert "TestFolder" not in self.app.objectIds() + assert not os.path.isdir(self.repo.path + "/__root__/TestFolder") def test_upload_relpath(self): - ''' + """ Upload JS library from test environment and check for it in Data.fs Provide Data.fs path only - ''' + """ target_jslib_path = self.jslib.path - target_repo_path = os.path.join('__root__', 'lib') + target_repo_path = os.path.join("__root__", "lib") self.run( - 'upload', '--replace-periods', - '--valid-extensions', 'css,js', - target_jslib_path, target_repo_path + "upload", + "--replace-periods", + "--valid-extensions", + "css,js", + target_jslib_path, + target_repo_path, ) self.upload_checks() # we may even omit __root__ in path! target_jslib_path = self.jslib.path - target_repo_path = os.path.join('lib') + target_repo_path = os.path.join("lib") self.run( - 'upload', '--replace-periods', - '--valid-extensions', 'css,js', - target_jslib_path, target_repo_path + "upload", + "--replace-periods", + "--valid-extensions", + "css,js", + target_jslib_path, + target_repo_path, ) self.upload_checks() # add another test case showing dot notation also works target_jslib_path = self.jslib.path - target_repo_path = os.path.join('.', 'lib') + target_repo_path = os.path.join(".", "lib") self.run( - 'upload', '--replace-periods', - '--valid-extensions', 'css,js', - target_jslib_path, target_repo_path + "upload", + "--replace-periods", + "--valid-extensions", + "css,js", + target_jslib_path, + target_repo_path, ) self.upload_checks() def test_upload_options(self): - ''' + """ Test upload with different options settings. - ''' + """ target_jslib_path = self.jslib.path - target_repo_path = os.path.join('__root__', 'lib') + target_repo_path = os.path.join("__root__", "lib") self.run( - 'upload', - '--valid-extensions', 'css,js', - target_jslib_path, target_repo_path + "upload", + "--valid-extensions", + "css,js", + target_jslib_path, + target_repo_path, ) self.upload_checks(replace_periods=False) - self.run( - 'upload', '--replace-periods', - target_jslib_path, target_repo_path - ) + self.run("upload", "--replace-periods", target_jslib_path, target_repo_path) self.upload_checks(ignore=False) self.run( - 'upload', '--replace-periods', - '--valid-extensions', ' ,,css,js, ', - target_jslib_path, target_repo_path + "upload", + "--replace-periods", + "--valid-extensions", + " ,,css,js, ", + target_jslib_path, + target_repo_path, ) self.upload_checks(ignore=True) - self.run( - 'upload', - target_jslib_path, target_repo_path - ) + self.run("upload", target_jslib_path, target_repo_path) self.upload_checks(replace_periods=False, ignore=False) def test_upload_relpath_fromrepo(self): - ''' + """ change working directory to repository before upload to simulate calling upload from repo leveraging bash path completion - ''' + """ cur_path = os.getcwd() os.chdir(self.repo.path) target_jslib_path = self.jslib.path - target_repo_path = os.path.join('.', '__root__', 'lib') + target_repo_path = os.path.join(".", "__root__", "lib") self.run( - 'upload', '--replace-periods', - '--valid-extensions', 'css,js', - target_jslib_path, target_repo_path + "upload", + "--replace-periods", + "--valid-extensions", + "css,js", + target_jslib_path, + target_repo_path, ) self.upload_checks() target_jslib_path = self.jslib.path - target_repo_path = os.path.join('__root__', 'lib') + target_repo_path = os.path.join("__root__", "lib") self.run( - 'upload', '--replace-periods', - '--valid-extensions', 'css,js', - target_jslib_path, target_repo_path + "upload", + "--replace-periods", + "--valid-extensions", + "css,js", + target_jslib_path, + target_repo_path, ) self.upload_checks() @@ -497,20 +493,23 @@ def test_upload_relpath_fromrepo(self): os.chdir(cur_path) def test_upload_dryrun(self): - ''' + """ Upload files in dryrun mode, make sure folder is not found in Data.fs - ''' + """ target_jslib_path = self.jslib.path - target_repo_path = os.path.join('__root__', 'lib') + target_repo_path = os.path.join("__root__", "lib") self.run( - 'upload', '--replace-periods', - '--valid-extensions', 'css,js', - target_jslib_path, target_repo_path, - '--dry-run' + "upload", + "--replace-periods", + "--valid-extensions", + "css,js", + target_jslib_path, + target_repo_path, + "--dry-run", ) - assert 'lib' not in self.app.objectIds() + assert "lib" not in self.app.objectIds() def test_emptying_userdefined_roles(self): """ @@ -518,14 +517,14 @@ def test_emptying_userdefined_roles(self): able to remove them. """ with self.runner.sync.tm: - self.app._addRole('TestRole') - self.run('record', '/') - fname = self.repo.path + '/__root__/__meta__' - with open(fname, 'r') as f: + self.app._addRole("TestRole") + self.run("record", "/") + fname = self.repo.path + "/__root__/__meta__" + with open(fname, "r") as f: lines = f.readlines() - with open(fname, 'w') as f: - f.writelines([line for line in lines if 'TestRole' not in line]) - self.runner.sync.playback_paths(paths=['/'], recurse=False) + with open(fname, "w") as f: + f.writelines([line for line in lines if "TestRole" not in line]) + self.runner.sync.playback_paths(paths=["/"], recurse=False) assert self.app.userdefined_roles() == () def test_userdefined_roles_playback(self): @@ -536,87 +535,85 @@ def test_userdefined_roles_playback(self): recording matches the first one. """ with self.runner.sync.tm: - self.app._addRole('TestRole') - self.app.manage_setLocalRoles('perfact', ('TestRole',)) - self.run('record', '/') + self.app._addRole("TestRole") + self.app.manage_setLocalRoles("perfact", ("TestRole",)) + self.run("record", "/") - fname = self.repo.path + '/__root__/__meta__' - with open(fname, 'r') as f: + fname = self.repo.path + "/__root__/__meta__" + with open(fname, "r") as f: recording = f.read() - self.runner.sync.playback_paths(paths=['/'], recurse=False) - assert self.app.get_local_roles() == (('perfact', ('TestRole',)),) - self.runner.sync.record('/', recurse=False) - with open(fname, 'r') as f: + self.runner.sync.playback_paths(paths=["/"], recurse=False) + assert self.app.get_local_roles() == (("perfact", ("TestRole",)),) + self.runner.sync.record("/", recurse=False) + with open(fname, "r") as f: assert recording == f.read() def test_addprop(self): "Add a property to the root object" - fname = self.repo.path + '/__root__/__meta__' - with open(fname, 'r') as f: + fname = self.repo.path + "/__root__/__meta__" + with open(fname, "r") as f: content = f.read() data = dict(helpers.literal_eval(content)) prop = { - 'id': 'testprop', - 'type': 'string', - 'value': 'test', + "id": "testprop", + "type": "string", + "value": "test", } - data['props'] = [list(prop.items())] - with open(fname, 'w') as f: + data["props"] = [list(prop.items())] + with open(fname, "w") as f: f.write(zodbsync.mod_format(data)) - self.run('playback', '/') - assert self.app.getProperty('testprop') == 'test' + self.run("playback", "/") + assert self.app.getProperty("testprop") == "test" def test_addtokenprop(self): "Validate tokens are correctly written" - fname = self.repo.path + '/__root__/__meta__' - with open(fname, 'r') as f: + fname = self.repo.path + "/__root__/__meta__" + with open(fname, "r") as f: content = f.read() data = dict(helpers.literal_eval(content)) prop = { - 'id': 'testprop', - 'type': 'tokens', - 'value': ('123', '518'), + "id": "testprop", + "type": "tokens", + "value": ("123", "518"), } - data['props'] = [list(prop.items())] - with open(fname, 'w') as f: + data["props"] = [list(prop.items())] + with open(fname, "w") as f: f.write(zodbsync.mod_format(data)) - self.run('playback', '/') - assert self.app.getProperty('testprop') == ('123', '518') + self.run("playback", "/") + assert self.app.getProperty("testprop") == ("123", "518") def test_changeprop(self): "Change first the value and then the type of a property" with self.runner.sync.tm: - self.app.manage_addProperty( - 'testprop', 'test', 'string' - ) - fname = self.repo.path + '/__root__/__meta__' - self.run('record', '/') - with open(fname, 'r') as f: + self.app.manage_addProperty("testprop", "test", "string") + fname = self.repo.path + "/__root__/__meta__" + self.run("record", "/") + with open(fname, "r") as f: content = f.read() data = dict(helpers.literal_eval(content)) - for ptype, pval in [('string', 'changed'), ('int', 1)]: + for ptype, pval in [("string", "changed"), ("int", 1)]: prop = { - 'id': 'testprop', - 'type': ptype, - 'value': pval, + "id": "testprop", + "type": ptype, + "value": pval, } - data['props'] = [list(prop.items())] - with open(fname, 'w') as f: + data["props"] = [list(prop.items())] + with open(fname, "w") as f: f.write(zodbsync.mod_format(data)) - self.run('playback', '/') - assert self.app.getProperty('testprop') == pval - assert self.app.getPropertyType('testprop') == ptype + self.run("playback", "/") + assert self.app.getProperty("testprop") == pval + assert self.app.getPropertyType("testprop") == ptype def test_cacheable(self): "Add a RamCacheManager and use it for index_html" - self.app.manage_addProduct[ - 'StandardCacheManagers' - ].manage_addRAMCacheManager(id="http_cache") + self.app.manage_addProduct["StandardCacheManagers"].manage_addRAMCacheManager( + id="http_cache" + ) self.app.index_html.ZCacheable_setManagerId("http_cache") - self.run('record', '/') - fname = self.repo.path + '/__root__/index_html/__meta__' + self.run("record", "/") + fname = self.repo.path + "/__root__/index_html/__meta__" assert "http_cache" in open(fname).read() - self.run('playback', '/') + self.run("playback", "/") assert self.app.index_html.ZCacheable_getManagerId() == "http_cache" def watcher_step_until(self, watcher, cond): @@ -643,16 +640,15 @@ def test_watch_change(self, conn): yet visible, then commit the change and do another step, making sure that it is now present. """ - fname = self.repo.path + '/__root__/__meta__' - watcher = self.mkrunner('watch') + fname = self.repo.path + "/__root__/__meta__" + watcher = self.mkrunner("watch") watcher.setup() conn.tm.begin() - conn.app._addRole('TestRole') + conn.app._addRole("TestRole") watcher.step() - assert 'TestRole' not in open(fname).read() + assert "TestRole" not in open(fname).read() conn.tm.commit() - self.watcher_step_until(watcher, - lambda: 'TestRole' in open(fname).read()) + self.watcher_step_until(watcher, lambda: "TestRole" in open(fname).read()) def test_watch_move(self, conn): """ @@ -661,58 +657,57 @@ def test_watch_move(self, conn): three-way-rename in one transaction, making sure the watcher keeps track. """ - watcher = self.mkrunner('watch') + watcher = self.mkrunner("watch") watcher.setup() - root = self.repo.path + '/__root__/' - src = '/__source-utf8__.html' + root = self.repo.path + "/__root__/" + src = "/__source-utf8__.html" app = conn.app - add = app.manage_addProduct['PageTemplates'].manage_addPageTemplate + add = app.manage_addProduct["PageTemplates"].manage_addPageTemplate rename = app.manage_renameObject with conn.tm: - add(id='test1', text='test1') - self.watcher_step_until(watcher, - lambda: os.path.isdir(root + 'test1')) + add(id="test1", text="test1") + self.watcher_step_until(watcher, lambda: os.path.isdir(root + "test1")) with conn.tm: - rename('test1', 'test2') - self.watcher_step_until(watcher, lambda: os.path.isdir(root + 'test2')) - assert not os.path.isdir(root + 'test1') + rename("test1", "test2") + self.watcher_step_until(watcher, lambda: os.path.isdir(root + "test2")) + assert not os.path.isdir(root + "test1") with conn.tm: - add(id='test1', text='test2') - self.watcher_step_until(watcher, lambda: os.path.isdir(root + 'test1')) + add(id="test1", text="test2") + self.watcher_step_until(watcher, lambda: os.path.isdir(root + "test1")) - assert os.path.isdir(root + 'test1') - assert open(root + 'test1' + src).read() == 'test2' - assert open(root + 'test2' + src).read() == 'test1' + assert os.path.isdir(root + "test1") + assert open(root + "test1" + src).read() == "test2" + assert open(root + "test2" + src).read() == "test1" with conn.tm: - rename('test1', 'tmp') - rename('test2', 'test1') - rename('tmp', 'test2') + rename("test1", "tmp") + rename("test2", "test1") + rename("tmp", "test2") self.watcher_step_until( watcher, - lambda: open(root + 'test1' + src).read() == 'test1', + lambda: open(root + "test1" + src).read() == "test1", ) - assert open(root + 'test1' + src).read() == 'test1' - assert open(root + 'test2' + src).read() == 'test2' + assert open(root + "test1" + src).read() == "test1" + assert open(root + "test2" + src).read() == "test2" def test_watch_dump_setup(self): """ Check output that a spawned initialization subprocess would generate. """ - watcher = self.mkrunner('watch') + watcher = self.mkrunner("watch") watcher.setup() stream = io.BytesIO() watcher.dump_setup_data(stream=stream) data = pickle.loads(stream.getvalue()) - assert set(data.keys()) == {'tree', 'txn', 'add_oids'} - tofind = ['/', '/acl_users/', '/index_html/'] - for obj in data['tree'].values(): - if obj['path'] in tofind: - tofind.remove(obj['path']) + assert set(data.keys()) == {"tree", "txn", "add_oids"} + tofind = ["/", "/acl_users/", "/index_html/"] + for obj in data["tree"].values(): + if obj["path"] in tofind: + tofind.remove(obj["path"]) assert tofind == [] def test_ff(self): @@ -721,80 +716,78 @@ def test_ff(self): perform a fast-forward merge to it, and verify that the change is correctly applied. """ - self.gitrun('checkout', '-b', 'second') - path = self.repo.path + '/__root__/index_html/__meta__' + self.gitrun("checkout", "-b", "second") + path = self.repo.path + "/__root__/index_html/__meta__" with open(path) as f: lines = f.readlines() lines = [ - line if "('title', " not in line - else " ('title', 'test-ff'),\n" + line if "('title', " not in line else " ('title', 'test-ff'),\n" for line in lines ] - with open(path, 'w') as f: + with open(path, "w") as f: f.writelines(lines) - self.gitrun('commit', '-a', '-m', 'Change title via ff') + self.gitrun("commit", "-a", "-m", "Change title via ff") - self.gitrun('checkout', 'autotest') - self.run('ff', 'second') - assert self.app.index_html.title == 'test-ff' + self.gitrun("checkout", "autotest") + self.run("ff", "second") + assert self.app.index_html.title == "test-ff" def test_reset(self): """ Change the title of index_html in a second branch, reset to it and check that it is played back correctly. """ - self.gitrun('checkout', '-b', 'second') - path = self.repo.path + '/__root__/index_html/__meta__' + self.gitrun("checkout", "-b", "second") + path = self.repo.path + "/__root__/index_html/__meta__" with open(path) as f: lines = f.readlines() lines = [ - line if "('title', " not in line - else " ('title', 'test'),\n" + line if "('title', " not in line else " ('title', 'test'),\n" for line in lines ] - with open(path, 'w') as f: + with open(path, "w") as f: f.writelines(lines) - self.gitrun('commit', '-a', '-m', 'Change title') - self.gitrun('checkout', 'autotest') - self.run('reset', 'second') - assert self.app.index_html.title == 'test' + self.gitrun("commit", "-a", "-m", "Change title") + self.gitrun("checkout", "autotest") + self.run("reset", "second") + assert self.app.index_html.title == "test" def test_revert(self): """ Do the same as in test_reset, but afterwards revert it. """ self.test_reset() - self.run('exec', 'git revert HEAD') + self.run("exec", "git revert HEAD") title = self.app.index_html.title - assert title != 'test' + assert title != "test" def test_checkout(self): """ Switch to another branch """ - self.run('checkout', '-b', 'other') + self.run("checkout", "-b", "other") # This switches back to autotest, but with a change self.test_reset() - self.run('checkout', 'other') - assert self.app.index_html.title != 'test' - self.run('checkout', 'autotest') - assert self.app.index_html.title == 'test' + self.run("checkout", "other") + assert self.app.index_html.title != "test" + self.run("checkout", "autotest") + assert self.app.index_html.title == "test" def test_exec_checkout(self): """ Prepare two branches and switch between them. """ - self.gitrun('branch', 'other') + self.gitrun("branch", "other") self.test_reset() - self.run('exec', 'git checkout other') + self.run("exec", "git checkout other") title = self.app.index_html.title - assert title != 'test' + assert title != "test" def test_withlock(self): "Running with-lock and, inside that, --no-lock, works" self.run( - 'with-lock', - 'zodbsync --config {} --no-lock record /'.format(self.config.path), + "with-lock", + "zodbsync --config {} --no-lock record /".format(self.config.path), ) def test_extedit(self, encoding=None): @@ -802,25 +795,23 @@ def test_extedit(self, encoding=None): Update /index_html using the external editor launcher """ header_lines = [ - 'url: index_html', - 'path: //index_html', - 'auth: dummy', - 'meta-type: Page Template', - 'content-type: text/html', + "url: index_html", + "path: //index_html", + "auth: dummy", + "meta-type: Page Template", + "content-type: text/html", ] - new_source = 'test' + new_source = "test" with DummyResponse(self.app) as resp: # Read control file content = extedit.launch( self.app, self.app.index_html, - '/index_html', - ) - headers, orig_source = content.split('\n\n', 1) - assert headers == '\n'.join(header_lines) - assert resp.headers['Content-Type'] == ( - 'application/x-perfact-zopeedit' + "/index_html", ) + headers, orig_source = content.split("\n\n", 1) + assert headers == "\n".join(header_lines) + assert resp.headers["Content-Type"] == ("application/x-perfact-zopeedit") # Update to new content if encoding: @@ -831,69 +822,69 @@ def test_extedit(self, encoding=None): res = extedit.launch( self.app, self.app.index_html, - '/index_html', + "/index_html", source=new_source, orig_source=orig_source, encoding=encoding, ) - assert 'success' in res - assert resp.headers['Content-Type'] == 'application/json' - assert self.app.index_html._text == 'test' + assert "success" in res + assert resp.headers["Content-Type"] == "application/json" + assert self.app.index_html._text == "test" # Try the update again, which must fail because the orig_source no # longer matches res = extedit.launch( self.app, self.app.index_html, - '/index_html', + "/index_html", source=new_source, orig_source=orig_source, encoding=encoding, ) - assert 'error' in json.loads(res) + assert "error" in json.loads(res) # Check for error on invalid path res = extedit.launch( self.app, self.app, - '/nonexist', - source='', - orig_source='', + "/nonexist", + source="", + orig_source="", ) assert res == '{"error": "/nonexist not found"}' def test_extedit_base64(self): - self.test_extedit(encoding='base64') + self.test_extedit(encoding="base64") def test_extedit_binary(self): "Test with binary file that is not valid UTF-8" - self.app.manage_addProduct['OFSP'].manage_addFile(id='blob') + self.app.manage_addProduct["OFSP"].manage_addFile(id="blob") with DummyResponse(self.app): extedit.launch( self.app, self.app, - '/blob', - source=helpers.to_string(base64.b64encode(b'\xff')), - orig_source='', - encoding='base64', + "/blob", + source=helpers.to_string(base64.b64encode(b"\xff")), + orig_source="", + encoding="base64", ) - assert self.app.blob.data == b'\xff' + assert self.app.blob.data == b"\xff" res = extedit.launch( self.app, self.app.blob, - '/blob', + "/blob", ) - assert res.endswith('\n\n/w==') + assert res.endswith("\n\n/w==") def meta_file_path(self, *folders): """ takes n folders in order as arguments and returns path to meta file """ - path = self.repo.path + '/__root__/' + path = self.repo.path + "/__root__/" for folder in folders: - path = path + folder + '/' - path = path + '__meta__' + path = path + folder + "/" + path = path + "__meta__" return path def test_record_structure_and_playback_local_changes(self): @@ -908,32 +899,41 @@ def test_record_structure_and_playback_local_changes(self): s_folder_1 = "s_folder_1" self.app.manage_addFolder(id=folder_1) self.app.folder_1.manage_addFolder(id=s_folder_1, title=s_folder_1) - assert 's_folder_1' in self.app.folder_1.objectIds() + assert "s_folder_1" in self.app.folder_1.objectIds() # record structure and check that the objects are recorded - self.run('record', '/') + self.run("record", "/") assert os.path.isfile(self.meta_file_path(folder_1, s_folder_1)) # set new title path = self.meta_file_path(folder_1, s_folder_1) - new_title = 'new_title' - content = "[('title', '"+new_title+"'),('type', 'Folder'),]" - with open(path, 'w') as f: + new_title = "new_title" + content = "[('title', '" + new_title + "'),('type', 'Folder'),]" + with open(path, "w") as f: f.write(content) # create metadata for new folder new_folder = "new_folder" - path = self.repo.path + \ - '/__root__/'+folder_1+'/'+s_folder_1+'/'+new_folder + path = ( + self.repo.path + + "/__root__/" + + folder_1 + + "/" + + s_folder_1 + + "/" + + new_folder + ) os.mkdir(path) - with open(path + '/__meta__', 'w') as f: - f.write('''[ + with open(path + "/__meta__", "w") as f: + f.write( + """[ ('id', '{}'), ('title', ''), ('type', 'Folder'), - ]'''.format(new_folder)) + ]""".format(new_folder) + ) # playback changes and check if they're existent - self.run('playback', '/') + self.run("playback", "/") assert new_title == self.app.folder_1.s_folder_1.title assert new_folder in self.app.folder_1.s_folder_1.objectIds() @@ -945,7 +945,7 @@ def test_watch_structure_changes_and_playback_local_changes(self, conn): """ # start watch daemon - watcher = self.mkrunner('watch') + watcher = self.mkrunner("watch") watcher.setup() app = conn.app folder_1 = "folder_1" @@ -954,33 +954,33 @@ def test_watch_structure_changes_and_playback_local_changes(self, conn): # create folder and wait until watch notices change with conn.tm: app.manage_addFolder(id=folder_1) - self.watcher_step_until(watcher, - lambda: os.path.isdir( - self.repo.path + '/__root__/'+folder_1)) + self.watcher_step_until( + watcher, lambda: os.path.isdir(self.repo.path + "/__root__/" + folder_1) + ) # create subfolder and wait until watch notices change with conn.tm: app.folder_1.manage_addFolder(id=s_folder_1, title=s_folder_1) - path = self.repo.path + '/__root__/'+folder_1+'/'+s_folder_1 - self.watcher_step_until(watcher, - lambda: os.path.isdir(path)) + path = self.repo.path + "/__root__/" + folder_1 + "/" + s_folder_1 + self.watcher_step_until(watcher, lambda: os.path.isdir(path)) # change title new_title = "new_title" path = self.meta_file_path(folder_1, s_folder_1) - content = "[('title', '"+new_title+"'),('type', 'Folder'),]" - with open(path, 'w') as f: + content = "[('title', '" + new_title + "'),('type', 'Folder'),]" + with open(path, "w") as f: f.write(content) # playback changes and check if those are existent in zodb - self.run('playback', '/') + self.run("playback", "/") assert new_title == self.app.folder_1.s_folder_1.title # wait for watch to notices played back changes with open(path) as f: meta = f.read() - self.watcher_step_until(watcher, - lambda: "('title', '"+new_title+"')" in meta) + self.watcher_step_until( + watcher, lambda: "('title', '" + new_title + "')" in meta + ) def test_watch_structure_changes_and_playback_deleted_folder(self, conn): """ @@ -990,7 +990,7 @@ def test_watch_structure_changes_and_playback_deleted_folder(self, conn): """ # start watch daemon - watcher = self.mkrunner('watch') + watcher = self.mkrunner("watch") watcher.setup() app = conn.app folder_1 = "folder_1" @@ -999,55 +999,54 @@ def test_watch_structure_changes_and_playback_deleted_folder(self, conn): # create folder and wait until watch notices change with conn.tm: app.manage_addFolder(id=folder_1) - self.watcher_step_until(watcher, - lambda: os.path.isdir( - self.repo.path + '/__root__/'+folder_1)) + self.watcher_step_until( + watcher, lambda: os.path.isdir(self.repo.path + "/__root__/" + folder_1) + ) # create subfolder and wait until watch notices change with conn.tm: app.folder_1.manage_addFolder(id=s_folder_1, title=s_folder_1) - path = self.repo.path + '/__root__/'+folder_1+'/'+s_folder_1 - self.watcher_step_until(watcher, - lambda: os.path.isdir(path)) + path = self.repo.path + "/__root__/" + folder_1 + "/" + s_folder_1 + self.watcher_step_until(watcher, lambda: os.path.isdir(path)) # remove folder s_folder_1 shutil.rmtree(path) # playback changes and check if those are existent in zodb - self.run('playback', '/') + self.run("playback", "/") # wait for watch to notices played back changes self.watcher_step_until(watcher, lambda: not os.path.isdir(path)) def test_commit_on_branch_and_exec_merge(self): - ''' + """ change to a git feature branch and create a structure there, commit it and change back to the autotest branch on autotest branch check if changes from feature arent existent, then merge feature branch and check if changes have been applied correctly - ''' + """ # change to feature branch and commit created folder/ subfolder branch = "feature" folder_1 = "folder_1" s_folder_1 = "s_folder_1" - self.run('exec', 'git checkout -b {}'.format(branch)) + self.run("exec", "git checkout -b {}".format(branch)) self.app.manage_addFolder(id=folder_1) self.app.folder_1.manage_addFolder(id=s_folder_1) assert s_folder_1 in self.app.folder_1.objectIds() - self.run('record', '/') + self.run("record", "/") assert os.path.isfile(self.meta_file_path(folder_1, s_folder_1)) - self.gitrun('add', '-A') - self.gitrun('commit', '-m', 'test case 3') + self.gitrun("add", "-A") + self.gitrun("commit", "-m", "test case 3") # checkout to autotest and check that changes are not yet existent - self.run('exec', 'git checkout autotest') + self.run("exec", "git checkout autotest") assert not os.path.isfile(self.meta_file_path(folder_1, s_folder_1)) assert folder_1 not in self.app.objectIds() # merge feature branch and check that changes are applied - self.run('exec', 'git merge {}'.format(branch)) + self.run("exec", "git merge {}".format(branch)) assert os.path.isfile(self.meta_file_path(folder_1, s_folder_1)) assert folder_1 in self.app.objectIds() @@ -1061,17 +1060,17 @@ def test_failing_playback_corrupt_metadata(self): # create new folder and record it folder_1 = "folder_1" self.app.manage_addFolder(id=folder_1) - self.run('record', '/') + self.run("record", "/") # break metadata - path = self.repo.path + '/__root__/'+folder_1+'/__meta__' + path = self.repo.path + "/__root__/" + folder_1 + "/__meta__" content = "[('gandalf', 'ThisIsAWrongKey'),]" - with open(path, 'w') as f: + with open(path, "w") as f: f.write(content) # test that playback fails with pytest.raises(KeyError): - self.run('playback', '/') + self.run("playback", "/") def test_failing_exec_commands(self): """ @@ -1079,17 +1078,15 @@ def test_failing_exec_commands(self): check if exceptions are thrown correctly """ with pytest.raises(subprocess.CalledProcessError): - self.run('exec', 'revert ThisIsDefinitelyNoCommit') + self.run("exec", "revert ThisIsDefinitelyNoCommit") with pytest.raises(subprocess.CalledProcessError): - self.run('exec', 'reset ThisIsDefinitelyNoCommit') + self.run("exec", "reset ThisIsDefinitelyNoCommit") with pytest.raises(subprocess.CalledProcessError): - self.run('exec', 'cherry-pick ThisIsDefinitelyNoCommit') + self.run("exec", "cherry-pick ThisIsDefinitelyNoCommit") - def test_create_multiple_commits_on_branch_and_pick_single_on_autotest( - self - ): + def test_create_multiple_commits_on_branch_and_pick_single_on_autotest(self): """ create a feature branch on which two changes will be commited to one commit each @@ -1098,30 +1095,30 @@ def test_create_multiple_commits_on_branch_and_pick_single_on_autotest( make sure only the last changes are present """ branch = "feature" - self.gitrun('checkout', '-b', branch) + self.gitrun("checkout", "-b", branch) # make first changes and commit those folder_1 = "folder_1" self.app.manage_addFolder(id=folder_1) assert folder_1 in self.app.objectIds() - self.run('record', '/') + self.run("record", "/") assert os.path.isfile(self.meta_file_path(folder_1)) - self.gitrun('add', '-A') - self.gitrun('commit', '-m', 'pick_commit_1') + self.gitrun("add", "-A") + self.gitrun("commit", "-m", "pick_commit_1") # make second changes and commit those folder_2 = "sf_2_tc6" self.app.manage_addFolder(id=folder_2) assert folder_2 in self.app.objectIds() - self.run('record', '/') + self.run("record", "/") assert os.path.isfile(self.meta_file_path(folder_2)) - self.gitrun('add', '-A') - self.gitrun('commit', '-m', 'pick_commit_2') + self.gitrun("add", "-A") + self.gitrun("commit", "-m", "pick_commit_2") commit = self.get_head_id() # checkout autotest and check both changes aren't existent - self.run('exec', 'git checkout autotest') + self.run("exec", "git checkout autotest") assert not os.path.isfile(self.meta_file_path(folder_1)) assert folder_1 not in self.app.objectIds() assert not os.path.isfile(self.meta_file_path(folder_2)) @@ -1129,7 +1126,7 @@ def test_create_multiple_commits_on_branch_and_pick_single_on_autotest( # pick 2nd commit and check that # first arent' but second changes are applied - self.run('pick', commit) + self.run("pick", commit) assert not os.path.isfile(self.meta_file_path(folder_1)) assert folder_1 not in self.app.objectIds() assert os.path.isfile(self.meta_file_path(folder_2)) @@ -1152,33 +1149,41 @@ def test_create_structure_and_reset_commits(self): self.app.manage_addFolder(id=folder_1) self.app.folder_1.manage_addFolder(id=s_folder_1, title=s_folder_1) assert s_folder_1 in self.app.folder_1.objectIds() - self.run('record', '/') + self.run("record", "/") assert os.path.isfile(self.meta_file_path(folder_1, s_folder_1)) - self.gitrun('add', '-A') - self.gitrun('commit', '-m', 'reset_commit_1') + self.gitrun("add", "-A") + self.gitrun("commit", "-m", "reset_commit_1") # create second changes and commit those - path = self.repo.path + \ - '/__root__/'+folder_1+'/'+s_folder_1+'/__meta__' + path = self.repo.path + "/__root__/" + folder_1 + "/" + s_folder_1 + "/__meta__" new_title = "new_title" - content = "[('title', '"+new_title+"'),('type', 'Folder'),]" - with open(path, 'w') as f: + content = "[('title', '" + new_title + "'),('type', 'Folder'),]" + with open(path, "w") as f: f.write(content) new_folder = "new_folder" - path = self.repo.path + \ - '/__root__/'+folder_1+'/'+s_folder_1+'/'+new_folder + path = ( + self.repo.path + + "/__root__/" + + folder_1 + + "/" + + s_folder_1 + + "/" + + new_folder + ) os.mkdir(path) - with open(path + '/__meta__', 'w') as f: - f.write('''[ + with open(path + "/__meta__", "w") as f: + f.write( + """[ ('id', '{}'), ('title', ''), ('type', 'Folder'), - ]'''.format(new_folder)) - self.run('playback', '/') + ]""".format(new_folder) + ) + self.run("playback", "/") - self.gitrun('add', '-A') - self.gitrun('commit', '-m', 'reset_commit_2') + self.gitrun("add", "-A") + self.gitrun("commit", "-m", "reset_commit_2") # check that changes are existent in zodb assert new_title == self.app.folder_1.s_folder_1.title @@ -1186,7 +1191,7 @@ def test_create_structure_and_reset_commits(self): # reset HEAD by one commit and check that second changes are # not existent anymore but first changes still are - self.run('reset', 'HEAD~1') + self.run("reset", "HEAD~1") assert folder_1 in self.app.objectIds() assert s_folder_1 in self.app.folder_1.objectIds() assert os.path.isfile(self.meta_file_path(folder_1, s_folder_1)) @@ -1195,11 +1200,11 @@ def test_create_structure_and_reset_commits(self): # reset HEAD by one commit and check that first changes are # not existent anymore - self.run('reset', 'HEAD~1') + self.run("reset", "HEAD~1") assert folder_1 not in self.app.objectIds() assert not os.path.isfile(self.meta_file_path(folder_1)) - @pytest.mark.parametrize('meta_type', object_types.object_handlers) + @pytest.mark.parametrize("meta_type", object_types.object_handlers) def test_objecttypes(self, meta_type): """ Generic test that is executed for each coded object type. This creates @@ -1208,31 +1213,35 @@ def test_objecttypes(self, meta_type): products that are not published on pypi or because they need external ressources like non-free libraries for external data connections. """ - if meta_type in ['DTML TeX', 'ZForce', 'External Method', - 'Z cxOracle Database Connection', - 'Z sap Database Connection']: + if meta_type in [ + "DTML TeX", + "ZForce", + "External Method", + "Z cxOracle Database Connection", + "Z sap Database Connection", + ]: pytest.skip("Skipping objects that require elaborate dependencies") - if 'Test' not in self.app.objectIds(): - self.app.manage_addProduct['OFSP'].manage_addFolder(id='Test') - if meta_type in ['User Folder', 'Simple User Folder']: - objid = 'acl_users' + if "Test" not in self.app.objectIds(): + self.app.manage_addProduct["OFSP"].manage_addFolder(id="Test") + if meta_type in ["User Folder", "Simple User Folder"]: + objid = "acl_users" else: - objid = 'testobj' + objid = "testobj" parent = self.app.Test handler = object_types.object_handlers[meta_type] # data that is required by some objects and ignored by others add_data = { - 'title': 'test', - 'content_type': 'text/plain', - 'connection_id': 'dbconn', - 'connection_string': '', - 'autocommit': False, - 'maxrows': 100, - 'args': '', - 'source': '', - 'smtp_host': 'localhost', - 'smtp_port': '25', + "title": "test", + "content_type": "text/plain", + "connection_id": "dbconn", + "connection_string": "", + "autocommit": False, + "maxrows": 100, + "args": "", + "source": "", + "smtp_host": "localhost", + "smtp_port": "25", } handler.create(parent, add_data, objid) obj = getattr(parent, objid) @@ -1247,34 +1256,38 @@ def test_ordered_folder_playback(self): end was still placing it at the end. """ with self.runner.sync.tm: - self.app.manage_addProduct['OFSP'].manage_addOrderedFolder( - id="Test" - ) - self.app.Test.manage_addProduct['OFSP'].manage_addFolder( - id="exist" - ) - assert self.app.Test.objectIds() == ['exist'] - self.run('record', '/') + self.app.manage_addProduct["OFSP"].manage_addOrderedFolder(id="Test") + self.app.Test.manage_addProduct["OFSP"].manage_addFolder(id="exist") + assert self.app.Test.objectIds() == ["exist"] + self.run("record", "/") - folder = self.repo.path + '/__root__/Test/' + folder = self.repo.path + "/__root__/Test/" - new_folder = folder + 'new' + new_folder = folder + "new" os.mkdir(new_folder) - with open(os.path.join(new_folder, '__meta__'), 'w') as f: - f.write(zodbsync.mod_format({ - "title": "", - "type": "Folder", - })) - - with open(folder + '__meta__', 'w') as f: - f.write(zodbsync.mod_format({ - "contents": ["new", "exist"], - "title": "", - "type": "Folder (Ordered)", - })) - self.run('playback', '--no-recurse', '/Test', '/Test/new') - assert self.app.Test.objectIds() == ['new', 'exist'] + with open(os.path.join(new_folder, "__meta__"), "w") as f: + f.write( + zodbsync.mod_format( + { + "title": "", + "type": "Folder", + } + ) + ) + + with open(folder + "__meta__", "w") as f: + f.write( + zodbsync.mod_format( + { + "contents": ["new", "exist"], + "title": "", + "type": "Folder (Ordered)", + } + ) + ) + self.run("playback", "--no-recurse", "/Test", "/Test/new") + assert self.app.Test.objectIds() == ["new", "exist"] def test_change_folder_type(self): """ @@ -1286,50 +1299,63 @@ def test_change_folder_type(self): stay the same. Also change the type of a folder without children. """ + def add(parent, fid): - parent.manage_addProduct['OFSP'].manage_addFolder(id=fid) + parent.manage_addProduct["OFSP"].manage_addFolder(id=fid) with self.runner.sync.tm: - add(self.app, 'Test') - for child in ['A', 'B', 'C']: + add(self.app, "Test") + for child in ["A", "B", "C"]: add(self.app.Test, child) - self.run('record', '/') - meta = '{}/__root__/Test/__meta__'.format(self.repo.path) - - with open(meta, 'w') as f: - f.write(zodbsync.mod_format({ - 'contents': ['B', 'A'], - 'title': 'change', - 'type': 'Folder (Ordered)', - })) + self.run("record", "/") + meta = "{}/__root__/Test/__meta__".format(self.repo.path) + + with open(meta, "w") as f: + f.write( + zodbsync.mod_format( + { + "contents": ["B", "A"], + "title": "change", + "type": "Folder (Ordered)", + } + ) + ) orig_oid = self.app.Test.A._p_oid - self.run('playback', '/Test', '--override') - assert self.app.Test.meta_type == 'Folder (Ordered)' + self.run("playback", "/Test", "--override") + assert self.app.Test.meta_type == "Folder (Ordered)" ids = self.app.Test.objectIds() - assert sorted(ids) == ['A', 'B', 'C'] - assert ids.index('B') < ids.index('A') + assert sorted(ids) == ["A", "B", "C"] + assert ids.index("B") < ids.index("A") assert self.app.Test.A._p_oid == orig_oid - with open(meta, 'w') as f: - f.write(zodbsync.mod_format({ - 'title': 'change again', - 'type': 'Folder', - })) - self.run('playback', '/Test', '--override') - assert self.app.Test.meta_type == 'Folder' - assert sorted(self.app.Test.objectIds()) == ['A', 'B', 'C'] + with open(meta, "w") as f: + f.write( + zodbsync.mod_format( + { + "title": "change again", + "type": "Folder", + } + ) + ) + self.run("playback", "/Test", "--override") + assert self.app.Test.meta_type == "Folder" + assert sorted(self.app.Test.objectIds()) == ["A", "B", "C"] assert self.app.Test.A._p_oid == orig_oid with self.runner.sync.tm: - self.app.Test.manage_delObjects(ids=['A', 'B', 'C']) - self.run('record', '/') - with open(meta, 'w') as f: - f.write(zodbsync.mod_format({ - 'title': 'change', - 'type': 'Folder (Ordered)', - })) - self.run('playback', '/Test', '--override') - assert self.app.Test.meta_type == 'Folder (Ordered)' + self.app.Test.manage_delObjects(ids=["A", "B", "C"]) + self.run("record", "/") + with open(meta, "w") as f: + f.write( + zodbsync.mod_format( + { + "title": "change", + "type": "Folder (Ordered)", + } + ) + ) + self.run("playback", "/Test", "--override") + assert self.app.Test.meta_type == "Folder (Ordered)" def test_create_userfolder(self): """ @@ -1342,10 +1368,10 @@ def test_create_userfolder(self): recreating the class instance, we need to call it manually. """ with self.runner.sync.tm: - self.app.manage_delObjects('acl_users') + self.app.manage_delObjects("acl_users") self.runner.sync.create_manager_user() - self.run('playback', '/') - assert self.app.acl_users.meta_type == 'User Folder' + self.run("playback", "/") + assert self.app.acl_users.meta_type == "User Folder" def test_no_unnecessary_writes(self): """ @@ -1353,19 +1379,19 @@ def test_no_unnecessary_writes(self): actually update it. """ with self.runner.sync.tm: - self.app.manage_addProduct['OFSP'].manage_addFolder(id='test') + self.app.manage_addProduct["OFSP"].manage_addFolder(id="test") folder = self.app.test mtime1 = folder._p_mtime - self.run('record', '/test') - self.run('playback', '/test') + self.run("record", "/test") + self.run("playback", "/test") mtime2 = folder._p_mtime assert mtime1 == mtime2 - path = self.repo.path + '/__root__/test/__meta__' + path = self.repo.path + "/__root__/test/__meta__" fsmtime1 = os.stat(path).st_mtime - self.run('record', '/test') + self.run("record", "/test") fsmtime2 = os.stat(path).st_mtime assert fsmtime1 == fsmtime2 @@ -1374,17 +1400,17 @@ def test_no_meta_file(self): Check that a missing meta file regards the object as deleted. """ - broken_obj = os.path.join(self.repo.path, '__root__', 'foo') + broken_obj = os.path.join(self.repo.path, "__root__", "foo") os.mkdir(broken_obj) - self.run('playback', '/foo') - assert 'foo' not in self.app.objectIds() + self.run("playback", "/foo") + assert "foo" not in self.app.objectIds() - self.add_folder('Test') - self.run('playback', '/Test') - os.remove(os.path.join(self.repo.path, '__root__/Test/__meta__')) - self.run('playback', '/Test') - assert 'Test' not in self.app.objectIds() + self.add_folder("Test") + self.run("playback", "/Test") + os.remove(os.path.join(self.repo.path, "__root__/Test/__meta__")) + self.run("playback", "/Test") + assert "Test" not in self.app.objectIds() def test_force_default_owner(self): """ @@ -1394,34 +1420,39 @@ def test_force_default_owner(self): self.runner.sync.force_default_owner = True # first test: owner from meta file pushed to app - folder = os.path.join(self.repo.path, '__root__', 'newfolder') + folder = os.path.join(self.repo.path, "__root__", "newfolder") os.mkdir(folder) - with open(os.path.join(folder, '__meta__'), 'w') as f: - f.write(zodbsync.mod_format({ - "title": "", - "type": "Folder", - "owner": (['acl_users'], "Somebody"), - })) + with open(os.path.join(folder, "__meta__"), "w") as f: + f.write( + zodbsync.mod_format( + { + "title": "", + "type": "Folder", + "owner": (["acl_users"], "Somebody"), + } + ) + ) - self.run('playback', '/newfolder') + self.run("playback", "/newfolder") - expected_owner = (['acl_users'], self.runner.sync.default_owner) + expected_owner = (["acl_users"], self.runner.sync.default_owner) assert self.app.newfolder._owner == expected_owner # second test: owner from zope read to meta file with self.runner.sync.tm: - self.app.manage_addProduct['OFSP'].manage_addFolder(id='another') + self.app.manage_addProduct["OFSP"].manage_addFolder(id="another") - self.app.another._owner = (['acl_users'], "Somebody") + self.app.another._owner = (["acl_users"], "Somebody") - self.run('record', '/') + self.run("record", "/") - meta = self.runner.sync.fs_parse(os.path.join(self.repo.path, - '__root__/another')) + meta = self.runner.sync.fs_parse( + os.path.join(self.repo.path, "__root__/another") + ) - assert 'owner' not in meta + assert "owner" not in meta def test_force_default_owner_negative(self): """ @@ -1432,31 +1463,36 @@ def test_force_default_owner_negative(self): self.runner.sync.force_default_owner = False # first test: owner from meta file pushed to app - folder = os.path.join(self.repo.path, '__root__', 'newfolder') + folder = os.path.join(self.repo.path, "__root__", "newfolder") os.mkdir(folder) - with open(os.path.join(folder, '__meta__'), 'w') as f: - f.write(zodbsync.mod_format({ - "title": "", - "type": "Folder", - "owner": (['acl_users'], "Somebody"), - })) + with open(os.path.join(folder, "__meta__"), "w") as f: + f.write( + zodbsync.mod_format( + { + "title": "", + "type": "Folder", + "owner": (["acl_users"], "Somebody"), + } + ) + ) - self.run('playback', '/newfolder') - assert self.app.newfolder._owner == (['acl_users'], "Somebody") + self.run("playback", "/newfolder") + assert self.app.newfolder._owner == (["acl_users"], "Somebody") # second test: owner from zope read to meta file with self.runner.sync.tm: - self.app.manage_addProduct['OFSP'].manage_addFolder(id='another') + self.app.manage_addProduct["OFSP"].manage_addFolder(id="another") - self.app.another._owner = (['acl_users'], "Somebody") + self.app.another._owner = (["acl_users"], "Somebody") - self.run('record', '/') + self.run("record", "/") - meta = self.runner.sync.fs_parse(os.path.join(self.repo.path, - '__root__/another')) + meta = self.runner.sync.fs_parse( + os.path.join(self.repo.path, "__root__/another") + ) - assert meta['owner'] == (['acl_users'], "Somebody") + assert meta["owner"] == (["acl_users"], "Somebody") def test_reformat(self): """ @@ -1464,36 +1500,41 @@ def test_reformat(self): format. Then reformat them, checking that no error occurs and that the final state uses the new formatting. """ - folder = os.path.join(self.repo.path, '__root__/Test') + folder = os.path.join(self.repo.path, "__root__/Test") os.mkdir(folder) - fname = os.path.join(folder, '__meta__') + fname = os.path.join(folder, "__meta__") def commit(): - self.gitrun('add', '__root__/Test/__meta__') - self.gitrun('commit', '-m', 'Test') + self.gitrun("add", "__root__/Test/__meta__") + self.gitrun("commit", "-m", "Test") def store(data, strip=False): # With strip=False, simulate an older version where there was no # newline at the end of meta files - with open(fname, 'w') as f: + with open(fname, "w") as f: s = helpers.StrRepr()(data, legacy=True) if strip: s = s.strip() f.write(s) commit() - store({ - 'title': 'Zope', - 'roles': ['A'], - 'perms': [('View', False, ['Anonymous'])], - }) + store( + { + "title": "Zope", + "roles": ["A"], + "perms": [("View", False, ["Anonymous"])], + } + ) start = self.get_head_id() - store({ - 'title': 'Other', - 'roles': ['A', 'B'], - 'perms': [('View', True, ['Anonymous', 'A'])], - }, strip=True) + store( + { + "title": "Other", + "roles": ["A", "B"], + "perms": [("View", True, ["Anonymous", "A"])], + }, + strip=True, + ) # Add a commit that deletes the object while it does not end in a # newline. A naive cherry-pick would result in a merge conflict. @@ -1501,18 +1542,19 @@ def store(data, strip=False): commit() os.mkdir(folder) - store({ - 'title': 'Other', - 'props': [ - [('id', 'columns'), ('type', 'tokens'), - ('value', ('a', 'b', 'c'))], - ] - }) + store( + { + "title": "Other", + "props": [ + [("id", "columns"), ("type", "tokens"), ("value", ("a", "b", "c"))], + ], + } + ) - self.run('reformat', start) + self.run("reformat", start) with open(fname) as f: fmt = f.read() - assert fmt.strip().split('\n') == [ + assert fmt.strip().split("\n") == [ "[", " ('props', [", " [('id', 'columns'), ('type', 'tokens'), ('value', (", @@ -1531,24 +1573,24 @@ def test_replace_child_by_property(self): same name in the same transaction, and also vice versa. """ with self.runner.sync.tm: - self.app._setProperty('test', 'foo', 'string') + self.app._setProperty("test", "foo", "string") - self.run('record', '/') - self.gitrun('add', '.') - self.gitrun('commit', '-m', 'with property') + self.run("record", "/") + self.gitrun("add", ".") + self.gitrun("commit", "-m", "with property") c1 = self.get_head_id() with self.runner.sync.tm: - self.app.manage_delProperties(ids=['test']) - self.app.manage_addProduct['OFSP'].manage_addFolder(id='test') + self.app.manage_delProperties(ids=["test"]) + self.app.manage_addProduct["OFSP"].manage_addFolder(id="test") - self.run('record', '/') - self.gitrun('add', '.') - self.gitrun('commit', '-m', 'with child') + self.run("record", "/") + self.gitrun("add", ".") + self.gitrun("commit", "-m", "with child") c2 = self.get_head_id() - self.run('reset', c2) - self.run('reset', c1) + self.run("reset", c2) + self.run("reset", c1) @contextmanager def appendtoconf(self, text): @@ -1557,12 +1599,12 @@ def appendtoconf(self, text): """ with open(self.config.path) as f: orig_config = f.read() - with open(self.config.path, 'a') as f: - f.write('\n' + text + '\n') + with open(self.config.path, "a") as f: + f.write("\n" + text + "\n") try: yield finally: - with open(self.config.path, 'w') as f: + with open(self.config.path, "w") as f: f.write(orig_config) def test_playback_postprocess(self): @@ -1572,11 +1614,13 @@ def test_playback_postprocess(self): """ fname = "{}/postproc".format(self.zeo.path) outfile = "{}.out".format(fname) - script = '\n'.join([ - "#!/bin/bash", - "cat > {}", - ]).format(outfile) - with open(fname, 'w') as f: + script = "\n".join( + [ + "#!/bin/bash", + "cat > {}", + ] + ).format(outfile) + with open(fname, "w") as f: f.write(script) os.chmod(fname, 0o700) with self.appendtoconf('run_after_playback = "{}"'.format(fname)): @@ -1590,8 +1634,8 @@ def addscript(self, basename, *lines): """ fname = "{}/{}".format(self.zeo.path, basename) lines = ("#!/bin/bash",) + lines - with open(fname, 'w') as f: - f.write('\n'.join(lines)) + with open(fname, "w") as f: + f.write("\n".join(lines)) os.chmod(fname, 0o700) return fname @@ -1600,11 +1644,11 @@ def test_playback_hook(self): Add configuration option for a playback hook script and check that only the paths returned are played back """ - self.add_folder('NewFolder', 'First Folder') - self.add_folder('NewFolder2', 'Second Folder') + self.add_folder("NewFolder", "First Folder") + self.add_folder("NewFolder2", "Second Folder") commit = self.get_head_id() # Reset the commit - self.gitrun('reset', '--hard', 'HEAD~2') + self.gitrun("reset", "--hard", "HEAD~2") playback_cmd_out = "{}/playback_cmd.out".format(self.zeo.path) playback_cmd = self.addscript( @@ -1614,16 +1658,22 @@ def test_playback_hook(self): playback_hook = self.addscript( "playback_hook", - "echo '{}'".format(json.dumps([{ - "paths": ["/NewFolder"], - "cmd": playback_cmd, - }])), + "echo '{}'".format( + json.dumps( + [ + { + "paths": ["/NewFolder"], + "cmd": playback_cmd, + } + ] + ) + ), ) with self.appendtoconf('playback_hook = "{}"'.format(playback_hook)): - self.run('pick', 'HEAD..{}'.format(commit)) + self.run("pick", "HEAD..{}".format(commit)) - assert 'NewFolder' in self.app.objectIds() - assert 'NewFolder2' not in self.app.objectIds() + assert "NewFolder" in self.app.objectIds() + assert "NewFolder2" not in self.app.objectIds() assert os.path.isfile(playback_cmd_out) def test_playback_hook_failed(self): @@ -1631,11 +1681,11 @@ def test_playback_hook_failed(self): Add configuration option for a playback hook script with a failing cmd and check that all changes are rolled back """ - self.add_folder('NewFolder', 'First Folder') - self.add_folder('NewFolder2', 'Second Folder') + self.add_folder("NewFolder", "First Folder") + self.add_folder("NewFolder2", "Second Folder") commit = self.get_head_id() # Reset the commit - self.gitrun('reset', '--hard', 'HEAD~2') + self.gitrun("reset", "--hard", "HEAD~2") playback_cmd = self.addscript( "playback_cmd", @@ -1643,54 +1693,61 @@ def test_playback_hook_failed(self): ) playback_hook = self.addscript( "playback_hook", - "echo '{}'".format(json.dumps([ - { - "paths": ["/NewFolder"], - "cmd": playback_cmd, - }, - { - "paths": ["/NewFolder2"], - }, - ])), + "echo '{}'".format( + json.dumps( + [ + { + "paths": ["/NewFolder"], + "cmd": playback_cmd, + }, + { + "paths": ["/NewFolder2"], + }, + ] + ) + ), ) with self.appendtoconf('playback_hook = "{}"'.format(playback_hook)): with pytest.raises(AssertionError): - self.run('pick', 'HEAD..{}'.format(commit)) + self.run("pick", "HEAD..{}".format(commit)) - assert 'NewFolder' not in self.app.objectIds() - assert 'NewFolder2' not in self.app.objectIds() + assert "NewFolder" not in self.app.objectIds() + assert "NewFolder2" not in self.app.objectIds() @contextmanager - def addlayer(self, seqnum='00'): + def addlayer(self, seqnum="00"): """ Create a temp directory and add a config that uses this as additional code layer. """ - name = '{}-{}.py'.format(seqnum, ''.join( - [random.choice(string.ascii_letters) for _ in range(16)] - )) - path = '{}/layers/{}'.format(self.config.folder, name) + name = "{}-{}.py".format( + seqnum, "".join([random.choice(string.ascii_letters) for _ in range(16)]) + ) + path = "{}/layers/{}".format(self.config.folder, name) with tempfile.TemporaryDirectory() as layer: - workdir = f'{layer}/workdir' - os.makedirs(f'{workdir}/__root__') - subprocess.run(['git', 'init'], cwd=workdir) - subprocess.run(['git', 'config', 'user.email', - 'zodbsync-tester@perfact.de'], cwd=workdir) - subprocess.run(['git', 'config', 'user.name', - 'ZODBSync tester'], cwd=workdir) + workdir = f"{layer}/workdir" + os.makedirs(f"{workdir}/__root__") + subprocess.run(["git", "init"], cwd=workdir) + subprocess.run( + ["git", "config", "user.email", "zodbsync-tester@perfact.de"], + cwd=workdir, + ) + subprocess.run( + ["git", "config", "user.name", "ZODBSync tester"], cwd=workdir + ) source = f"{layer}/source" - os.makedirs(f'{source}/__root__') - with open(path, 'w') as f: + os.makedirs(f"{source}/__root__") + with open(path, "w") as f: f.write(f'workdir = "{layer}/workdir"\n') f.write(f'source = "{source}"\n') f.write(f'ident = "{name}"\n') # Force re-reading config - if hasattr(self, 'runner'): + if hasattr(self, "runner"): del self.runner try: yield layer finally: - if hasattr(self, 'runner'): + if hasattr(self, "runner"): del self.runner os.remove(path) @@ -1700,36 +1757,32 @@ def test_layer_record_freeze(self): folder and record it. Check that the top layer still has the object and a __frozen__ marker. """ - self.add_folder('Test', 'Test') - self.run('playback', '/Test') + self.add_folder("Test", "Test") + self.run("playback", "/Test") with self.addlayer() as layer: shutil.copytree( - '{}/__root__/Test'.format(self.repo.path), - '{}/workdir/__root__/Test'.format(layer), - ) - self.run('freeze', '/') - self.run('record', '/') - for fname in ['__meta__', '__frozen__', 'Test/__meta__']: - assert os.path.exists( - '{}/__root__/{}'.format(self.repo.path, fname) + "{}/__root__/Test".format(self.repo.path), + "{}/workdir/__root__/Test".format(layer), ) + self.run("freeze", "/") + self.run("record", "/") + for fname in ["__meta__", "__frozen__", "Test/__meta__"]: + assert os.path.exists("{}/__root__/{}".format(self.repo.path, fname)) def test_layer_record_nofreeze(self): """ Create a folder, copy it into an additional fixed layer and record everything. Check that the top layer no longer has the folder. """ - self.add_folder('Test', 'Test') - self.run('playback', '/Test') + self.add_folder("Test", "Test") + self.run("playback", "/Test") with self.addlayer() as layer: shutil.copytree( - '{}/__root__/Test'.format(self.repo.path), - '{}/workdir/__root__/Test'.format(layer), + "{}/__root__/Test".format(self.repo.path), + "{}/workdir/__root__/Test".format(layer), ) - self.run('record', '/') - assert not os.path.exists( - '{}/__root__/Test'.format(self.repo.path) - ) + self.run("record", "/") + assert not os.path.exists("{}/__root__/Test".format(self.repo.path)) def test_layer_record_compress_simple(self): """ @@ -1740,70 +1793,65 @@ def test_layer_record_compress_simple(self): """ # in our custom layer we create a folder with title 'Foobar' - self.add_folder('Test', 'Test') - self.run('playback', '/Test') - self.app.Test.title = 'Foobar' - self.run('record', '/') + self.add_folder("Test", "Test") + self.run("playback", "/Test") + self.app.Test.title = "Foobar" + self.run("record", "/") # ... then we add a new base layer with self.addlayer() as layer: shutil.copytree( - '{}/__root__/Test'.format(self.repo.path), # custom layer! - '{}/workdir/__root__/Test'.format(layer), # new base layer! + "{}/__root__/Test".format(self.repo.path), # custom layer! + "{}/workdir/__root__/Test".format(layer), # new base layer! ) # now create the standard Test folder titled 'Something - meta = zodbsync.mod_format({ - 'title': 'Something', - 'type': 'Folder' - }) - with open(f'{layer}/workdir/__root__/Test/__meta__', 'w') as f: + meta = zodbsync.mod_format({"title": "Something", "type": "Folder"}) + with open(f"{layer}/workdir/__root__/Test/__meta__", "w") as f: f.write(meta) - self.run('playback', '/') + self.run("playback", "/") # still 'Foobar' - custom layer wins - assert self.app.Test.title == 'Foobar' + assert self.app.Test.title == "Foobar" # now really switch to 'Something' via app - self.app.Test.title = 'Something' + self.app.Test.title = "Something" # ... and record. should remove customized # Test folder aka compress - self.run('record', '/') - assert not os.path.isdir( - os.path.join(self.repo.path, '__root__/Test') - ) + self.run("record", "/") + assert not os.path.isdir(os.path.join(self.repo.path, "__root__/Test")) - @pytest.mark.parametrize('recurse', [True, False]) + @pytest.mark.parametrize("recurse", [True, False]) def test_layer_playback(self, recurse): """ Set up a base layer, add a path there and play it back. """ - self.add_folder('Test') + self.add_folder("Test") with self.addlayer() as layer: - src = '{}/__root__'.format(self.repo.path) - tgt = '{}/workdir/__root__'.format(layer) - os.rename(src + '/Test', tgt + '/Test') - cmd = ['playback', '/Test'] + src = "{}/__root__".format(self.repo.path) + tgt = "{}/workdir/__root__".format(layer) + os.rename(src + "/Test", tgt + "/Test") + cmd = ["playback", "/Test"] if not recurse: - cmd.append('--no-recurse') + cmd.append("--no-recurse") self.run(*cmd) - assert 'Test' in self.app.objectIds() + assert "Test" in self.app.objectIds() def test_layer_playback_frozen_deleted(self): """ Set up a base layer with a folder, but mask it as deleted in the upper layer. """ - self.add_folder('Test') + self.add_folder("Test") with self.addlayer() as layer: - src = '{}/__root__'.format(self.repo.path) - tgt = '{}/workdir/__root__'.format(layer) - shutil.copytree(src + '/Test', tgt + '/Test') - with open('{}/__frozen__'.format(src), 'w'): + src = "{}/__root__".format(self.repo.path) + tgt = "{}/workdir/__root__".format(layer) + shutil.copytree(src + "/Test", tgt + "/Test") + with open("{}/__frozen__".format(src), "w"): pass - os.remove(src + '/Test/__meta__') - self.run('playback', '/Test') - assert 'Test' not in self.app.objectIds() + os.remove(src + "/Test/__meta__") + self.run("playback", "/Test") + assert "Test" not in self.app.objectIds() def test_layer_playback_combined(self): """ @@ -1812,41 +1860,41 @@ def test_layer_playback_combined(self): changing the object itself and one path being merged without changing the object itself. """ - for folder in ['Test1', 'Test2', 'Test3']: + for folder in ["Test1", "Test2", "Test3"]: self.add_folder(folder) - for sub in ['Sub1', 'Sub2']: + for sub in ["Sub1", "Sub2"]: self.add_folder(sub, parent=folder) with self.addlayer() as layer: - root = os.path.join(self.repo.path, '__root__') + root = os.path.join(self.repo.path, "__root__") # Move current structure into lower layer - os.rename(root, os.path.join(layer, 'workdir/__root__')) + os.rename(root, os.path.join(layer, "workdir/__root__")) # Create a sparse structure in top layer files = [ - 'Test1/__frozen__', - 'Test1/__meta__', - 'Test1/Sub3/__meta__', - 'Test2/__meta__', - 'Test2/Sub3/__meta__', - 'Test3/Sub3/__meta__', + "Test1/__frozen__", + "Test1/__meta__", + "Test1/Sub3/__meta__", + "Test2/__meta__", + "Test2/Sub3/__meta__", + "Test3/Sub3/__meta__", ] - meta = ('''[ + meta = """[ ('props', []), ('title', 'overwritten'), ('type', 'Folder'), - ]''') + ]""" for file in files: - dirname, fname = file.rsplit('/', 1) + dirname, fname = file.rsplit("/", 1) os.makedirs(os.path.join(root, dirname), exist_ok=True) - with open(os.path.join(root, file), 'w') as f: - if fname == '__meta__': + with open(os.path.join(root, file), "w") as f: + if fname == "__meta__": f.write(meta) - self.run('playback', '/') - assert self.app.Test1.objectIds() == ['Sub3'] - assert self.app.Test2.objectIds() == ['Sub1', 'Sub2', 'Sub3'] - assert self.app.Test3.objectIds() == ['Sub1', 'Sub2', 'Sub3'] - assert self.app.Test2.title == 'overwritten' - assert self.app.Test3.title == '' + self.run("playback", "/") + assert self.app.Test1.objectIds() == ["Sub3"] + assert self.app.Test2.objectIds() == ["Sub1", "Sub2", "Sub3"] + assert self.app.Test3.objectIds() == ["Sub1", "Sub2", "Sub3"] + assert self.app.Test2.title == "overwritten" + assert self.app.Test3.title == "" def test_layer_record(self): """ @@ -1854,18 +1902,17 @@ def test_layer_record(self): must not be added to the top layer since it is already present in the lower layer. """ - self.add_folder('Test') - self.run('playback', '/Test') - self.run('record', '/Test') + self.add_folder("Test") + self.run("playback", "/Test") + self.run("record", "/Test") with self.addlayer() as layer: root = [ - os.path.join(layer, 'workdir/__root__'), - os.path.join(self.repo.path, '__root__'), + os.path.join(layer, "workdir/__root__"), + os.path.join(self.repo.path, "__root__"), ] - os.rename(os.path.join(root[1], 'Test'), - os.path.join(root[0], 'Test')) - self.run('record', '/Test') - assert not os.path.isdir(os.path.join(root[1], 'Test')) + os.rename(os.path.join(root[1], "Test"), os.path.join(root[0], "Test")) + self.run("record", "/Test") + assert not os.path.isdir(os.path.join(root[1], "Test")) def test_layer_record_deletion(self): """ @@ -1873,16 +1920,15 @@ def test_layer_record_deletion(self): the Data.FS. Record it. The top-level layer needs to recreate the folder and mark it as deleted. """ - self.add_folder('Test') - self.add_folder('Sub', parent='Test') + self.add_folder("Test") + self.add_folder("Sub", parent="Test") with self.addlayer() as layer: - srcroot = os.path.join(self.repo.path, '__root__') - tgtroot = os.path.join(layer, 'workdir/__root__') - os.rename(os.path.join(srcroot, 'Test'), - os.path.join(tgtroot, 'Test')) - self.run('record', '/') - assert os.path.isdir(os.path.join(srcroot, 'Test')) - assert os.path.exists(os.path.join(srcroot, 'Test/__deleted__')) + srcroot = os.path.join(self.repo.path, "__root__") + tgtroot = os.path.join(layer, "workdir/__root__") + os.rename(os.path.join(srcroot, "Test"), os.path.join(tgtroot, "Test")) + self.run("record", "/") + assert os.path.isdir(os.path.join(srcroot, "Test")) + assert os.path.exists(os.path.join(srcroot, "Test/__deleted__")) def test_layer_record_prune(self): """ @@ -1890,17 +1936,15 @@ def test_layer_record_prune(self): layer. Remove the folder and record again - check that the subfolder is actually deleted and not marked with __deleted__. """ - self.app.manage_addFolder(id='Test') - self.run('record', '/') + self.app.manage_addFolder(id="Test") + self.run("record", "/") with self.addlayer() as layer: os.rename( - os.path.join(self.repo.path, '__root__/__meta__'), - os.path.join(layer, 'workdir/__root__/__meta__'), + os.path.join(self.repo.path, "__root__/__meta__"), + os.path.join(layer, "workdir/__root__/__meta__"), ) - self.run('record', '/') - assert not os.path.isdir( - os.path.join(self.repo.path, '__root__/Test') - ) + self.run("record", "/") + assert not os.path.isdir(os.path.join(self.repo.path, "__root__/Test")) def test_layer_watch_rename(self): """ @@ -1910,10 +1954,10 @@ def test_layer_watch_rename(self): """ with self.addlayer() as layer: os.rename( - os.path.join(self.repo.path, '__root__/index_html'), - os.path.join(layer, 'workdir/__root__/index_html'), + os.path.join(self.repo.path, "__root__/index_html"), + os.path.join(layer, "workdir/__root__/index_html"), ) - watcher = self.mkrunner('watch') + watcher = self.mkrunner("watch") watcher.setup() # Somehow, we need to initialize the connection here and can not @@ -1921,14 +1965,14 @@ def test_layer_watch_rename(self): # interference with addlayer resetting the original connection) with self.newconn() as conn: with conn.tm: - conn.app.manage_renameObject('index_html', 'something') + conn.app.manage_renameObject("index_html", "something") watcher.step() - assert os.path.exists(os.path.join( - self.repo.path, '__root__/index_html/__deleted__' - )) - assert os.path.exists(os.path.join( - self.repo.path, '__root__/something/__meta__' - )) + assert os.path.exists( + os.path.join(self.repo.path, "__root__/index_html/__deleted__") + ) + assert os.path.exists( + os.path.join(self.repo.path, "__root__/something/__meta__") + ) def test_layer_watch_paste(self): """ @@ -1938,37 +1982,38 @@ def test_layer_watch_paste(self): check that. """ with self.runner.sync.tm: - self.app.manage_addFolder(id='Test1') - self.app.manage_addFolder(id='Test2') - self.app.Test1.manage_addFolder(id='Sub') - self.run('record', '/') + self.app.manage_addFolder(id="Test1") + self.app.manage_addFolder(id="Test2") + self.app.Test1.manage_addFolder(id="Sub") + self.run("record", "/") with self.addlayer() as layer: - src = os.path.join(self.repo.path, '__root__') - tgt = os.path.join(layer, 'workdir/__root__') + src = os.path.join(self.repo.path, "__root__") + tgt = os.path.join(layer, "workdir/__root__") os.rmdir(tgt) os.rename(src, tgt) os.mkdir(src) - watcher = self.mkrunner('watch') + watcher = self.mkrunner("watch") watcher.setup() with self.newconn() as conn: with conn.tm: - cp = conn.app.Test1.manage_cutObjects(['Sub']) + cp = conn.app.Test1.manage_cutObjects(["Sub"]) conn.app.Test2._pasteObjects(cp) - paths = [os.path.join(self.repo.path, '__root__', path) - for path in ['Test1/Sub/__deleted__', - 'Test2/Sub/__meta__']] - self.watcher_step_until(watcher, - lambda: all(map(os.path.exists, paths))) + paths = [ + os.path.join(self.repo.path, "__root__", path) + for path in ["Test1/Sub/__deleted__", "Test2/Sub/__meta__"] + ] + self.watcher_step_until(watcher, lambda: all(map(os.path.exists, paths))) with self.newconn() as conn: with conn.tm: - cp = conn.app.Test2.manage_cutObjects(['Sub']) + cp = conn.app.Test2.manage_cutObjects(["Sub"]) conn.app.Test1._pasteObjects(cp) - paths = [os.path.join(self.repo.path, '__root__', path) - for path in ['Test1', 'Test2']] + paths = [ + os.path.join(self.repo.path, "__root__", path) + for path in ["Test1", "Test2"] + ] # Both folders must be removed - self.watcher_step_until(watcher, - lambda: not any(map(os.path.isdir, paths))) + self.watcher_step_until(watcher, lambda: not any(map(os.path.isdir, paths))) def test_layer_recreate_deleted(self): """ @@ -1977,21 +2022,21 @@ def test_layer_recreate_deleted(self): custom layer since it is the same as below. """ with self.runner.sync.tm: - self.app.manage_addFolder(id='Test') + self.app.manage_addFolder(id="Test") with self.addlayer() as layer: - self.run('record', '/Test') - root = os.path.join(self.repo.path, '__root__') + self.run("record", "/Test") + root = os.path.join(self.repo.path, "__root__") os.rename( - os.path.join(root, 'Test'), - os.path.join(layer, 'workdir/__root__/Test'), + os.path.join(root, "Test"), + os.path.join(layer, "workdir/__root__/Test"), ) - self.app.manage_delObjects(ids=['Test']) - self.run('record', '/') - assert os.path.exists(os.path.join(root, 'Test/__deleted__')) - self.app.manage_addFolder(id='Test') - self.run('record', '/Test') - assert not os.path.isdir(os.path.join(root, 'Test')) + self.app.manage_delObjects(ids=["Test"]) + self.run("record", "/") + assert os.path.exists(os.path.join(root, "Test/__deleted__")) + self.app.manage_addFolder(id="Test") + self.run("record", "/Test") + assert not os.path.isdir(os.path.join(root, "Test")) def test_layer_remove_subfolder(self): """ @@ -2001,22 +2046,22 @@ def test_layer_remove_subfolder(self): __deleted__ marker. """ with self.runner.sync.tm: - self.app.manage_addFolder(id='Test') - self.app.Test.manage_addFolder(id='Sub') + self.app.manage_addFolder(id="Test") + self.app.Test.manage_addFolder(id="Sub") with self.addlayer() as layer: - self.run('record', '/') - root = os.path.join(self.repo.path, '__root__') + self.run("record", "/") + root = os.path.join(self.repo.path, "__root__") os.rename( - os.path.join(root, 'Test'), - os.path.join(layer, 'workdir/__root__/Test'), + os.path.join(root, "Test"), + os.path.join(layer, "workdir/__root__/Test"), ) with self.runner.sync.tm: - self.app.Test.manage_delObjects(ids=['Sub']) - self.run('record', '/') - assert not os.path.exists(os.path.join(root, 'Test/__meta__')) - assert not os.path.exists(os.path.join(root, 'Test/Sub/__meta__')) - assert os.path.exists(os.path.join(root, 'Test/Sub/__deleted__')) + self.app.Test.manage_delObjects(ids=["Sub"]) + self.run("record", "/") + assert not os.path.exists(os.path.join(root, "Test/__meta__")) + assert not os.path.exists(os.path.join(root, "Test/Sub/__meta__")) + assert os.path.exists(os.path.join(root, "Test/Sub/__deleted__")) def test_layer_update(self, caplog): """ @@ -2024,107 +2069,104 @@ def test_layer_update(self, caplog): layer-update to play back the changed object. """ with self.runner.sync.tm: - self.app.manage_addFolder(id='Test') + self.app.manage_addFolder(id="Test") with self.addlayer() as layer: - self.run('record', '/') - ident = self.runner.sync.layers[-1]['ident'] - src = os.path.join(self.repo.path, '__root__') - tgt = os.path.join(layer, 'source/__root__') + self.run("record", "/") + ident = self.runner.sync.layers[-1]["ident"] + src = os.path.join(self.repo.path, "__root__") + tgt = os.path.join(layer, "source/__root__") os.rmdir(tgt) os.rename(src, tgt) os.mkdir(src) - self.run('layer-init', '*') - with open(os.path.join(tgt, 'Test/__meta__'), 'w') as f: - f.write(zodbsync.mod_format({ - 'title': 'Changed', - 'type': 'Folder' - })) - self.run('layer-update', ident) - assert 'Conflict with object' not in caplog.text - assert self.app.Test.title == 'Changed' + self.run("layer-init", "*") + with open(os.path.join(tgt, "Test/__meta__"), "w") as f: + f.write(zodbsync.mod_format({"title": "Changed", "type": "Folder"})) + self.run("layer-update", ident) + assert "Conflict with object" not in caplog.text + assert self.app.Test.title == "Changed" def test_keep_acl(self): - ''' + """ Make sure deletions on top level acl_users are NOT synced into Data.fs User folders living somewhere else in the application may be deleted though. - ''' + """ acl_path = os.path.join( self.repo.path, - '__root__', - 'acl_users', + "__root__", + "acl_users", ) shutil.rmtree(acl_path) - self.run('playback', '/') + self.run("playback", "/") # this playback will fail horribly if acl_users is gone! - self.run('playback', '/') + self.run("playback", "/") # make sure acl_users in toplevel is still present - assert 'acl_users' in self.app.objectIds() + assert "acl_users" in self.app.objectIds() # now create dummy module with its own acl_users folder with self.runner.sync.tm: - self.app.manage_addFolder(id='some_module') + self.app.manage_addFolder(id="some_module") self.app.some_module.manage_addUserFolder() - self.run('record', '/') + self.run("record", "/") - assert 'acl_users' in self.app.some_module.objectIds() + assert "acl_users" in self.app.some_module.objectIds() module_acl = os.path.join( self.repo.path, - '__root__', - 'some_module', - 'acl_users', + "__root__", + "some_module", + "acl_users", ) shutil.rmtree(module_acl) - self.run('playback', '/') - assert 'acl_users' not in self.app.some_module.objectIds() + self.run("playback", "/") + assert "acl_users" not in self.app.some_module.objectIds() def test_keep_acl_norecurse(self): - ''' + """ test_keep_acl but slightly altered for norecurse, aka playing back single objects instead of the whole object tree - ''' + """ acl_path = os.path.join( self.repo.path, - '__root__', - 'acl_users', + "__root__", + "acl_users", ) shutil.rmtree(acl_path) - self.run('playback', '--no-recurse', '/acl_users') + self.run("playback", "--no-recurse", "/acl_users") # make sure acl_users in toplevel is still present - assert 'acl_users' in self.app.objectIds() + assert "acl_users" in self.app.objectIds() # now create dummy module with its own acl_users folder with self.runner.sync.tm: - self.app.manage_addFolder(id='some_module') - self.app.some_module.manage_addFolder(id='something') + self.app.manage_addFolder(id="some_module") + self.app.some_module.manage_addFolder(id="something") self.app.some_module.manage_addUserFolder() - self.run('record', '/') + self.run("record", "/") - assert 'acl_users' in self.app.some_module.objectIds() + assert "acl_users" in self.app.some_module.objectIds() module_acl = os.path.join( self.repo.path, - '__root__', - 'some_module', - 'acl_users', + "__root__", + "some_module", + "acl_users", ) shutil.rmtree(module_acl) self.run( - 'playback', - '--no-recurse', - '/some_module', - '/some_module/acl_users', + "playback", + "--no-recurse", + "/some_module", + "/some_module/acl_users", ) - assert 'acl_users' not in self.app.some_module.objectIds() + assert "acl_users" not in self.app.some_module.objectIds() def test_layer_update_warn(self, caplog): """ @@ -2137,34 +2179,30 @@ def test_layer_update_warn(self, caplog): to a warning. """ with self.runner.sync.tm: - self.app.manage_addFolder(id='Test') - self.app.manage_addFolder(id='ToDelete') - self.app.ToDelete.manage_addFolder(id='Sub') + self.app.manage_addFolder(id="Test") + self.app.manage_addFolder(id="ToDelete") + self.app.ToDelete.manage_addFolder(id="Sub") with self.addlayer() as layer: - self.run('record', '/') - ident = self.runner.sync.layers[-1]['ident'] - src = os.path.join(self.repo.path, '__root__') - tgt = os.path.join(layer, 'source/__root__') + self.run("record", "/") + ident = self.runner.sync.layers[-1]["ident"] + src = os.path.join(self.repo.path, "__root__") + tgt = os.path.join(layer, "source/__root__") os.rmdir(tgt) os.rename(src, tgt) os.mkdir(src) - self.run('layer-init', '*') + self.run("layer-init", "*") with self.runner.sync.tm: - self.app.Test._setProperty('nav_hidden', True, 'boolean') - self.app.ToDelete.Sub._setProperty('nav_hidden', True, - 'boolean') - self.run('record', '/') - with open(os.path.join(tgt, 'Test/__meta__'), 'w') as f: - f.write(zodbsync.mod_format({ - 'title': 'Changed', - 'type': 'Folder' - })) - shutil.rmtree(os.path.join(tgt, 'ToDelete')) - self.run('layer-update', ident) - expect = 'Conflict with object in custom layer: ' - assert expect + '/Test' in caplog.text - assert 'AttributeError' not in caplog.text - assert expect + '/ToDelete/Sub' in caplog.text + self.app.Test._setProperty("nav_hidden", True, "boolean") + self.app.ToDelete.Sub._setProperty("nav_hidden", True, "boolean") + self.run("record", "/") + with open(os.path.join(tgt, "Test/__meta__"), "w") as f: + f.write(zodbsync.mod_format({"title": "Changed", "type": "Folder"})) + shutil.rmtree(os.path.join(tgt, "ToDelete")) + self.run("layer-update", ident) + expect = "Conflict with object in custom layer: " + assert expect + "/Test" in caplog.text + assert "AttributeError" not in caplog.text + assert expect + "/ToDelete/Sub" in caplog.text def test_layer_change_into_top(self): """ @@ -2175,32 +2213,30 @@ def test_layer_change_into_top(self): top layer. """ with self.runner.sync.tm: - self.app.manage_addProduct['OFSP'].manage_addFile(id='blob') + self.app.manage_addProduct["OFSP"].manage_addFile(id="blob") with self.addlayer() as layer: - self.run('record', '/blob') + self.run("record", "/blob") shutil.move( - '{}/__root__/blob'.format(self.repo.path), - '{}/workdir/__root__/blob'.format(layer), + "{}/__root__/blob".format(self.repo.path), + "{}/workdir/__root__/blob".format(layer), ) with self.runner.sync.tm: self.app.blob.manage_edit( - filedata='text_content', - content_type='text/plain', - title='BLOB' + filedata="text_content", content_type="text/plain", title="BLOB" ) - self.run('record', '/') - root = os.path.join(self.repo.path, '__root__') + self.run("record", "/") + root = os.path.join(self.repo.path, "__root__") # both meta and source file are in custom layer - assert os.path.exists(os.path.join(root, 'blob/__meta__')) - assert os.path.exists(os.path.join(root, 'blob/__source__.txt')) - source_fmt = '{}/__root__/blob/__source__.txt' - with open(source_fmt.format(f'{layer}/workdir')) as f: + assert os.path.exists(os.path.join(root, "blob/__meta__")) + assert os.path.exists(os.path.join(root, "blob/__source__.txt")) + source_fmt = "{}/__root__/blob/__source__.txt" + with open(source_fmt.format(f"{layer}/workdir")) as f: # source in layer should still be empty - assert f.read() == '' + assert f.read() == "" with open(source_fmt.format(self.repo.path)) as f: # ... content is in custom layer! - assert f.read() == 'text_content' + assert f.read() == "text_content" def test_layer_playback_hook(self): """ @@ -2209,32 +2245,30 @@ def test_layer_playback_hook(self): object paths and not the specific files. """ with self.runner.sync.tm: - self.app.manage_addProduct['OFSP'].manage_addFile(id='blob') + self.app.manage_addProduct["OFSP"].manage_addFile(id="blob") - root = '{}/__root__'.format(self.repo.path) + root = "{}/__root__".format(self.repo.path) with self.addlayer() as layer: - self.run('record', '/blob') + self.run("record", "/blob") shutil.move( - '{}/blob'.format(root), - '{}/workdir/__root__/blob'.format(layer), + "{}/blob".format(root), + "{}/workdir/__root__/blob".format(layer), ) - os.mkdir('{}/blob'.format(root)) - with open('{}/blob/__deleted__'.format(root), 'w'): + os.mkdir("{}/blob".format(root)) + with open("{}/blob/__deleted__".format(root), "w"): pass - self.gitrun('add', '.') - self.gitrun('commit', '-m', 'delete blob') + self.gitrun("add", ".") + self.gitrun("commit", "-m", "delete blob") commid = self.get_head_id() - self.gitrun('reset', '--hard', 'HEAD~') - output = '{}/playback_hook.out'.format(self.zeo.path) + self.gitrun("reset", "--hard", "HEAD~") + output = "{}/playback_hook.out".format(self.zeo.path) playback_hook = self.addscript( "playback_hook", "cat > {}".format(output), "echo '[]'", ) - with self.appendtoconf( - 'playback_hook = "{}"'.format(playback_hook) - ): - self.run('pick', commid) + with self.appendtoconf('playback_hook = "{}"'.format(playback_hook)): + self.run("pick", commid) with open(output) as f: assert {"paths": ["/blob/"]} == json.loads(f.read()) @@ -2243,34 +2277,34 @@ def test_layer_tar(self): Perform a layer-init and layer-update from a tar file source. """ with self.runner.sync.tm: - self.app.manage_addProduct['OFSP'].manage_addFile(id='blob') + self.app.manage_addProduct["OFSP"].manage_addFile(id="blob") with self.addlayer() as layer: - self.run('record', '/blob') + self.run("record", "/blob") subprocess.run( - ['tar', 'cf', f'{layer}/source/__root__.tar', 'blob'], - cwd=f'{self.repo.path}/__root__', + ["tar", "cf", f"{layer}/source/__root__.tar", "blob"], + cwd=f"{self.repo.path}/__root__", check=True, ) os.rmdir(f"{layer}/source/__root__") - self.run('layer-init', '*') - assert os.listdir(f'{layer}/workdir/__root__') == ['blob'] + self.run("layer-init", "*") + assert os.listdir(f"{layer}/workdir/__root__") == ["blob"] # Record to remove from fallback layer - self.run('record', '/') - assert 'blob' not in os.listdir(f'{self.repo.path}/__root__') + self.run("record", "/") + assert "blob" not in os.listdir(f"{self.repo.path}/__root__") # Now change the file in the TAR file and run layer-update shutil.copytree( - f'{layer}/workdir/__root__/blob', - f'{layer}/blob', + f"{layer}/workdir/__root__/blob", + f"{layer}/blob", ) - with open(f'{layer}/blob/__source__.txt', 'w') as f: - f.write('changed') + with open(f"{layer}/blob/__source__.txt", "w") as f: + f.write("changed") subprocess.run( - ['tar', 'cf', f'{layer}/source/__root__.tar', 'blob'], + ["tar", "cf", f"{layer}/source/__root__.tar", "blob"], cwd=layer, check=True, ) - self.run('layer-update', '*') - assert str(self.app.blob) == 'changed' + self.run("layer-update", "*") + assert str(self.app.blob) == "changed" def test_layer_update_2phase_failed(self): """ @@ -2279,28 +2313,34 @@ def test_layer_update_2phase_failed(self): correctly. """ with self.runner.sync.tm: - self.app.manage_addProduct['OFSP'].manage_addFile(id='blob') + self.app.manage_addProduct["OFSP"].manage_addFile(id="blob") playback_cmd = self.addscript("playback_cmd", "false") playback_hook = self.addscript( "playback_hook", - "echo '{}'".format(json.dumps([{ - "paths": ["/blob"], - "cmd": playback_cmd, - }])), + "echo '{}'".format( + json.dumps( + [ + { + "paths": ["/blob"], + "cmd": playback_cmd, + } + ] + ) + ), ) with self.appendtoconf('playback_hook = "{}"'.format(playback_hook)): with self.addlayer() as layer: - self.run('record', '/') - src = f'{self.repo.path}/__root__/blob' - tgt = f'{layer}/source/__root__/blob' + self.run("record", "/") + src = f"{self.repo.path}/__root__/blob" + tgt = f"{layer}/source/__root__/blob" os.rename(src, tgt) - self.run('layer-init', '*') - with open(f'{tgt}/__source__.txt', 'w') as f: - f.write('changed') + self.run("layer-init", "*") + with open(f"{tgt}/__source__.txt", "w") as f: + f.write("changed") with pytest.raises(AssertionError): - self.run('layer-update', '*') - assert str(self.app.blob) == '' + self.run("layer-update", "*") + assert str(self.app.blob) == "" def test_layer_info_datafs(self): """ @@ -2308,40 +2348,38 @@ def test_layer_info_datafs(self): in the Data.FS """ with self.runner.sync.tm: - self.app.manage_addProduct['OFSP'].manage_addFile(id='blob') + self.app.manage_addProduct["OFSP"].manage_addFile(id="blob") with self.addlayer() as layer: - self.run('record', '/blob') - assert getattr(self.app.blob, 'zodbsync_layer', None) is None + self.run("record", "/blob") + assert getattr(self.app.blob, "zodbsync_layer", None) is None # Move file to layer and check that layer info is stored in Data.FS shutil.move( - '{}/__root__/blob'.format(self.repo.path), - '{}/workdir/__root__/blob'.format(layer), + "{}/__root__/blob".format(self.repo.path), + "{}/workdir/__root__/blob".format(layer), ) - self.run('record', '/') - assert getattr(self.app.blob, 'zodbsync_layer') is not None + self.run("record", "/") + assert getattr(self.app.blob, "zodbsync_layer") is not None # Change file in Data.FS and verify that layer info is cleared with self.runner.sync.tm: self.app.blob.manage_edit( - filedata='text_content', - content_type='text/plain', - title='BLOB' + filedata="text_content", content_type="text/plain", title="BLOB" ) - self.run('record', '/') - assert getattr(self.app.blob, 'zodbsync_layer', None) is None + self.run("record", "/") + assert getattr(self.app.blob, "zodbsync_layer", None) is None def test_fail_when_meta_is_missing(self): """ Check that playing back a structure where no layer has a meta file for a given folder does not work. """ - root = f'{self.repo.path}/__root__' - os.mkdir(f'{root}/newfolder') - os.mkdir(f'{root}/newobj') - with open(f'{root}/newobj/__source__.py', 'w'): + root = f"{self.repo.path}/__root__" + os.mkdir(f"{root}/newfolder") + os.mkdir(f"{root}/newobj") + with open(f"{root}/newobj/__source__.py", "w"): pass with pytest.raises(AssertionError): - self.run('playback', '/') + self.run("playback", "/") def test_fail_when_meta_missing_layers(self): """ @@ -2349,7 +2387,7 @@ def test_fail_when_meta_missing_layers(self): a given folder does not work (multi-layer). """ with self.addlayer() as layer: - os.mkdir(f'{self.repo.path}/__root__/newfolder') - os.mkdir(f'{layer}/workdir/__root__/newfolder') + os.mkdir(f"{self.repo.path}/__root__/newfolder") + os.mkdir(f"{layer}/workdir/__root__/newfolder") with pytest.raises(AssertionError): - self.run('playback', '/') + self.run("playback", "/") diff --git a/perfact/zodbsync/zodbsync.py b/perfact/zodbsync/zodbsync.py index e626f8c..ada7e32 100644 --- a/perfact/zodbsync/zodbsync.py +++ b/perfact/zodbsync/zodbsync.py @@ -1,59 +1,68 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- +import logging import os import shutil -import time # for periodic output -import sys -import logging import subprocess as sp +import sys +import time # for periodic output -# for using an explicit transaction manager -import transaction # for "logging in" import AccessControl.SecurityManagement +import App.config + +# for using an explicit transaction manager +import transaction + # For config loading and initial connection, possibly populating an empty ZODB import Zope2.App.startup -import App.config from Zope2.Startup.run import configure_wsgi -# Plugins for handling different object types -from .object_types import object_handlers, mod_implemented_handlers -from .helpers import StrRepr, to_string, literal_eval, remove_redundant_paths -from .helpers import load_config +from .helpers import ( + StrRepr, + literal_eval, + load_config, + remove_redundant_paths, + to_string, +) +# Plugins for handling different object types +from .object_types import mod_implemented_handlers, object_handlers # Monkey patch ZRDB not to connect to databases immediately. try: from Shared.DC.ZRDB import Connection + Connection.Connection.connect_on_load = False except ImportError: # pragma: no cover pass def mod_format(data=None): - '''Make a printable output of the given object data.''' + """Make a printable output of the given object data.""" # This defines which levels of each key should be split into separate lines # if they contain an iterable, in addition to the default rule rules = { - 'perms': [4], - 'props': [5], - 'local_roles': [4], + "perms": [4], + "props": [5], + "local_roles": [4], } return StrRepr()(data, rules) def obj_contents(obj): - ''' Fetch list of subitems ''' - func = getattr(obj, 'objectIds') + """Fetch list of subitems""" + func = getattr(obj, "objectIds") return sorted(func()) if func else [] -def mod_read(obj=None, onerrorstop=False, default_owner=None, - force_default_owner=False): - '''Build a consistent metadata dictionary for all types.''' +def mod_read( + obj=None, onerrorstop=False, default_owner=None, force_default_owner=False +): + """Build a consistent metadata dictionary for all types.""" # Known types: known_types = list(object_handlers.keys()) @@ -67,13 +76,13 @@ def mod_read(obj=None, onerrorstop=False, default_owner=None, # The Zope object type is always in the same place meta_type = obj.meta_type - meta['type'] = meta_type + meta["type"] = meta_type # The title should always be readable - title = getattr(obj, 'title', None) + title = getattr(obj, "title", None) # see comment in helpers.py:str_repr for why we convert to string if isinstance(title, (bytes, str)): - meta['title'] = to_string(title) + meta["title"] = to_string(title) # Generic and meta type dependent handlers @@ -81,7 +90,7 @@ def mod_read(obj=None, onerrorstop=False, default_owner=None, if onerrorstop: raise AssertionError(f"Unsupported type: {meta_type}") else: - meta['unsupported'] = meta_type + meta["unsupported"] = meta_type return meta for handler in mod_implemented_handlers(obj, meta_type): @@ -89,49 +98,55 @@ def mod_read(obj=None, onerrorstop=False, default_owner=None, # if default owner is set, remove the owner attribute if it matches the # default owner. also when force_default_owner is set - owner_is_default = meta.get('owner') == (['acl_users'], default_owner) + owner_is_default = meta.get("owner") == (["acl_users"], default_owner) if (default_owner) and (owner_is_default or force_default_owner): - if 'owner' in meta: - del meta['owner'] + if "owner" in meta: + del meta["owner"] - meta['zodbsync_layer'] = getattr(obj, 'zodbsync_layer', None) + meta["zodbsync_layer"] = getattr(obj, "zodbsync_layer", None) return meta -def mod_write(data, parent=None, obj_id=None, override=False, root=None, - default_owner=None, force_default_owner=False, layer=None): - ''' +def mod_write( + data, + parent=None, + obj_id=None, + override=False, + root=None, + default_owner=None, + force_default_owner=False, + layer=None, +): + """ Given object data in , store the object, creating it if it was missing. With = True, this method will remove an existing object if there is a meta_type mismatch. If root is given, it should be the application root, which is then updated with the metadata in data, ignoring parent. Returns the existing or created object - ''' + """ # Retrieve the object meta type. d = dict(data) - meta_type = d['type'] + meta_type = d["type"] - no_owner_given = 'owner' not in d + no_owner_given = "owner" not in d if (default_owner) and (no_owner_given or force_default_owner): - d['owner'] = (['acl_users'], default_owner) + d["owner"] = (["acl_users"], default_owner) if root is None: - if hasattr(parent, 'aq_explicit'): + if hasattr(parent, "aq_explicit"): obj = getattr(parent.aq_explicit, obj_id, None) else: obj = getattr(parent, obj_id, None) else: obj = root - if obj is not None and not hasattr(obj, 'meta_type'): - logging.getLogger('ZODBSync').warning( - 'Removing property with colliding ID! ({} in {})'.format( - obj_id, parent - ) + if obj is not None and not hasattr(obj, "meta_type"): + logging.getLogger("ZODBSync").warning( + "Removing property with colliding ID! ({} in {})".format(obj_id, parent) ) parent.manage_delProperties(ids=[obj_id]) obj = None @@ -147,17 +162,21 @@ def mod_write(data, parent=None, obj_id=None, override=False, root=None, temp_obj = obj temp_id = obj_id while temp_id in parent.objectIds(): - temp_id += '_' + temp_id += "_" parent.manage_renameObject(obj_id, temp_id) else: # Remove the existing object in override mode - parent.manage_delObjects(ids=[obj_id, ]) + parent.manage_delObjects( + ids=[ + obj_id, + ] + ) obj = None # ID is new? Create a minimal object (depending on type) if obj is None: object_handlers[meta_type].create(parent, data, obj_id) - if hasattr(parent, 'aq_explicit'): + if hasattr(parent, "aq_explicit"): obj = getattr(parent.aq_explicit, obj_id, None) else: obj = getattr(parent, obj_id, None) @@ -178,47 +197,47 @@ def mod_write(data, parent=None, obj_id=None, override=False, root=None, class ZODBSync: - '''A ZODBSync instance is capable of mirroring a part of the ZODB + """A ZODBSync instance is capable of mirroring a part of the ZODB object tree in the file system. By default, the syncer creates a subdirectory "__root__" in the given directory and can use the methods "record()" and "playback()" to get all objects from the ZODB or write them back, respectively. - ''' + """ # We write the binary sources into files ending with appropriate extensions # for convenience. This table guesses the most important ones from the # "content_type" property. content_types = { - 'application/pdf': 'pdf', - 'application/json': 'json', - 'application/javascript': 'js', - 'image/jpeg': 'jpg', - 'image/gif': 'gif', - 'image/png': 'png', - 'text/javascript': 'js', - 'text/css': 'css', - 'text/html': 'html', - 'image/svg+xml': 'svg', + "application/pdf": "pdf", + "application/json": "json", + "application/javascript": "js", + "image/jpeg": "jpg", + "image/gif": "gif", + "image/png": "png", + "text/javascript": "js", + "text/css": "css", + "text/html": "html", + "image/svg+xml": "svg", } # In some cases, we can deduce the best extension from the object type. meta_types = { - 'Z SQL Method': 'sql', - 'Script (Python)': 'py', + "Z SQL Method": "sql", + "Script (Python)": "py", } - def __init__(self, config, logger, site='__root__'): + def __init__(self, config, logger, site="__root__"): self.logger = logger self.config = config - self.base_dir = config['base_dir'] + self.base_dir = config["base_dir"] self.site = site self.app_dir = os.path.join(self.base_dir, self.site) - self.manager_user = config.get('manager_user', 'perfact') - self.default_owner = config.get('default_owner', 'perfact') - self.force_default_owner = config.get('force_default_owner', False) + self.manager_user = config.get("manager_user", "perfact") + self.default_owner = config.get("default_owner", "perfact") + self.force_default_owner = config.get("force_default_owner", False) # Statistics self.num_obj_total = 1 @@ -229,9 +248,9 @@ def __init__(self, config, logger, site='__root__'): # which configure function to call. However, they essentially do the # same and depending on the Zope version, only one is available, so it # does not matter how the name of the config was given. - conf_path = self.config.get('wsgi_conf_path') + conf_path = self.config.get("wsgi_conf_path") if not conf_path: - conf_path = self.config.get('conf_path') + conf_path = self.config.get("conf_path") # clear arguments to avoid confusing zope configuration procedure sys.argv = sys.argv[:1] @@ -251,85 +270,85 @@ def __init__(self, config, logger, site='__root__'): # thread will not yield "client has seen newer transactions than # server!" messages (which is mostly relevant for the tests). self.tm = transaction.TransactionManager() - db = App.config.getConfiguration().dbtab.getDatabase('/', is_root=1) + db = App.config.getConfiguration().dbtab.getDatabase("/", is_root=1) root = db.open(self.tm).root self.app = root.Application # Initialize layers - layerdir = self.config.get('layers', None) + layerdir = self.config.get("layers", None) layers = [] fnames = [] if layerdir and os.path.isdir(layerdir): fnames = sorted(os.listdir(layerdir)) for fname in fnames: - if any([fname.startswith(key) for key in '.~_']): + if any([fname.startswith(key) for key in ".~_"]): continue ident = fname - if ident.endswith('.py'): + if ident.endswith(".py"): ident = ident[:-3] layer = { **{ - 'ident': ident, + "ident": ident, }, - **load_config(f'{layerdir}/{fname}') + **load_config(f"{layerdir}/{fname}"), } - if 'workdir' not in layer or 'source' not in layer: - raise ValueError( - "Old-style layer config without workdir+source" - ) + if "workdir" not in layer or "source" not in layer: + raise ValueError("Old-style layer config without workdir+source") layers.append(layer) - workdir = layer['workdir'] - root = f'{workdir}/{site}' + workdir = layer["workdir"] + root = f"{workdir}/{site}" if not os.path.isdir(root): os.makedirs(root, exist_ok=True) if not os.path.isdir(f"{workdir}/.git"): - sp.run(['git', 'init'], cwd=workdir, check=True) + sp.run(["git", "init"], cwd=workdir, check=True) # Append default top-level layer - layers.append({ - 'ident': None, - 'workdir': self.config['base_dir'], - }) + layers.append( + { + "ident": None, + "workdir": self.config["base_dir"], + } + ) # Reverse order - index zero is the topmost fallback layer self.layers = list(reversed(layers)) # Make sure the manager user exists - if self.config.get('create_manager_user', False): + if self.config.get("create_manager_user", False): self.create_manager_user() def create_manager_user(self): """ Make sure the manager user exists. """ - userfolder = getattr(self.app, 'acl_users', None) + userfolder = getattr(self.app, "acl_users", None) if userfolder is None: - self.app.manage_addProduct['OFSP'].manage_addUserFolder() + self.app.manage_addProduct["OFSP"].manage_addUserFolder() userfolder = self.app.acl_users user = userfolder.getUser(self.manager_user) if user is not None: return self.tm.begin() - userfolder._doAddUser(self.manager_user, 'admin', ['Manager'], []) + userfolder._doAddUser(self.manager_user, "admin", ["Manager"], []) self.logger.warning( - 'Created user %s with password admin because this user does not' - ' exist!' % self.manager_user + "Created user %s with password admin because this user does not" + " exist!" % self.manager_user ) self.tm.commit() - def start_transaction(self, note=''): - ''' Start a transaction with a given note and return the transaction + def start_transaction(self, note=""): + """Start a transaction with a given note and return the transaction manager, so the caller can call commit() or abort() - ''' + """ # Log in as a manager uf = self.app.acl_users user = uf.getUser(self.manager_user).__of__(uf) if user is None: raise AssertionError( - f'User {self.manager_user} is not available in database.' - ' Perhaps you need to set create_manager_user in config.py?' + f"User {self.manager_user} is not available in database." + " Perhaps you need to set create_manager_user in config.py?" ) - self.logger.info('Using user %s' % self.manager_user) + self.logger.info("Using user %s" % self.manager_user) AccessControl.SecurityManagement.newSecurityManager(None, user) self.tm.begin() @@ -339,24 +358,24 @@ def start_transaction(self, note=''): return self.tm def source_ext_from_meta(self, meta, obj_id): - '''Guess a good extension from meta data.''' + """Guess a good extension from meta data.""" content_type = None # Extract meta data from the key-value list passed. - meta_type = meta.get('type', None) - props = meta.get('props', []) + meta_type = meta.get("type", None) + props = meta.get("props", []) for prop in props: d = dict(prop) - if d['id'] == 'content_type': - content_type = d['value'] + if d["id"] == "content_type": + content_type = d["value"] break # txt is the default extension. - ext = 'txt' + ext = "txt" # If the ID has a period, the extension defaults from the ID. - if (obj_id or '').find('.') != -1: - ext = obj_id.rsplit('.', 1)[-1] + if (obj_id or "").find(".") != -1: + ext = obj_id.rsplit(".", 1)[-1] # If there's an extension to use for the object meta_type, use # that. @@ -367,13 +386,13 @@ def source_ext_from_meta(self, meta, obj_id): return ext def fs_path(self, path): - ''' + """ Return filesystem path corresponding to the object path, which might start with a /. Note that this is not layer-aware and will always return the path in the topmost layer. - ''' - return os.path.join(self.app_dir, path.lstrip('/')) + """ + return os.path.join(self.app_dir, path.lstrip("/")) def fs_pathinfo(self, path): """ @@ -406,8 +425,8 @@ def fs_pathinfo(self, path): """ layers = self.layers check = self.base_dir - markers = ['__frozen__', '__deleted__'] - for part in [self.site] + path.split('/'): + markers = ["__frozen__", "__deleted__"] + for part in [self.site] + path.split("/"): if not part: continue check = os.path.join(check, part) @@ -419,91 +438,85 @@ def fs_pathinfo(self, path): break result = { - 'path': path, - 'fspath': None, - 'children': [], - 'layers': layers, - 'layeridx': None, + "path": path, + "fspath": None, + "children": [], + "layers": layers, + "layeridx": None, } - path = path.lstrip('/') + path = path.lstrip("/") candidates = set() # subfolders on any layer children = set() # those with a __meta__ file on a some layer for idx, layer in enumerate(layers): - fspath = os.path.join(layer['workdir'], self.site, path) + fspath = os.path.join(layer["workdir"], self.site, path) if not os.path.isdir(fspath): continue - meta = os.path.join(fspath, '__meta__') - if result['fspath'] is None and os.path.exists(meta): - result['fspath'] = fspath - result['layeridx'] = idx + meta = os.path.join(fspath, "__meta__") + if result["fspath"] is None and os.path.exists(meta): + result["fspath"] = fspath + result["layeridx"] = idx for entry in os.listdir(fspath): - if entry in children or entry.startswith('__'): + if entry in children or entry.startswith("__"): continue candidates.add(entry) - if os.path.exists(os.path.join(fspath, entry, '__meta__')): + if os.path.exists(os.path.join(fspath, entry, "__meta__")): children.add(entry) missing = candidates - children if missing: - raise AssertionError( - f"No __meta__ file on any layer: {path}/{children}" - ) + raise AssertionError(f"No __meta__ file on any layer: {path}/{children}") - result['children'] = sorted(children) + result["children"] = sorted(children) return result def fs_write(self, path, data): - ''' + """ Write object data out to a folder with the given path. - ''' + """ # If the custom layer has a __deleted__ marker for this object, remove # it. base_dir = self.fs_path(path) - delpath = os.path.join(base_dir, '__deleted__') + delpath = os.path.join(base_dir, "__deleted__") if os.path.exists(delpath): os.remove(delpath) # Find layer that holds the current version of the object, falling back # to the custom layer pathinfo = self.fs_pathinfo(path) - base_dir = pathinfo['fspath'] or base_dir + base_dir = pathinfo["fspath"] or base_dir # Make directory for the object if it's not already there if not os.path.isdir(base_dir): self.logger.debug("Will create new directory %s" % path) os.makedirs(base_dir) - old_data = self.fs_read(pathinfo['fspath']) + old_data = self.fs_read(pathinfo["fspath"]) # Build object - exclude_keys = ['source', 'zodbsync_layer'] - meta = { - key: value - for key, value in data.items() - if key not in exclude_keys - } + exclude_keys = ["source", "zodbsync_layer"] + meta = {key: value for key, value in data.items() if key not in exclude_keys} fmt = mod_format(meta) if isinstance(fmt, str): - fmt = fmt.encode('utf-8') - source = data.get('source', None) + fmt = fmt.encode("utf-8") + source = data.get("source", None) - new_data = {'meta': fmt.strip()} + new_data = {"meta": fmt.strip()} # Only write out sources if unicode or string write_source = isinstance(source, (bytes, str)) src_fname = None if write_source: # Write bytes or utf-8 encoded text. - base = '__source__' + base = "__source__" if isinstance(source, str): - source = source.encode('utf-8') - base = '__source-utf8__' + source = source.encode("utf-8") + base = "__source-utf8__" ext = self.source_ext_from_meta( meta=meta, - obj_id=path.rstrip('/').rsplit('/', 1)[-1], + obj_id=path.rstrip("/").rsplit("/", 1)[-1], ) - src_fname = '{}.{}'.format(base, ext) - new_data['src_fnames'] = [src_fname] - new_data['source'] = source + src_fname = "{}.{}".format(base, ext) + new_data["src_fnames"] = [src_fname] + new_data["source"] = source if old_data != new_data: # Path in top layer, might be different than the one where we read @@ -512,39 +525,39 @@ def fs_write(self, path, data): os.makedirs(write_base, exist_ok=True) self.logger.debug("Will write %d bytes of metadata" % len(fmt)) - with open(os.path.join(write_base, '__meta__'), 'wb') as f: + with open(os.path.join(write_base, "__meta__"), "wb") as f: f.write(fmt) # Check if there are stray __source* files and remove them first. - source_files = [s for s in os.listdir(write_base) - if s.startswith('__source') and s != src_fname] + source_files = [ + s + for s in os.listdir(write_base) + if s.startswith("__source") and s != src_fname + ] for source_file in source_files: os.remove(os.path.join(write_base, source_file)) if write_source: - self.logger.debug( - "Will write %d bytes of source" % len(source) - ) - with open(os.path.join(write_base, src_fname), 'wb') as f: + self.logger.debug("Will write %d bytes of source" % len(source)) + with open(os.path.join(write_base, src_fname), "wb") as f: f.write(source) # We wrote the object to the topmost layer, so the index where the # current representation can be found is zero. - pathinfo['layeridx'] = 0 + pathinfo["layeridx"] = 0 # Compress if possible: Compare object with its representation on disk # if the current layer is ignored. If it is the same, remove it in the # current layer. Continue with the next layer that holds the object - for idx, layer in enumerate(pathinfo['layers']): + for idx, layer in enumerate(pathinfo["layers"]): # This is now the layer that we compare the current layer to in # order to check if we can compress it. - if idx <= pathinfo['layeridx']: + if idx <= pathinfo["layeridx"]: continue - fspath = os.path.join(layer['workdir'], self.site, - path.lstrip('/')) + fspath = os.path.join(layer["workdir"], self.site, path.lstrip("/")) data = self.fs_read(fspath) - if not data or not data.get('meta'): + if not data or not data.get("meta"): # No representation on this layer continue if data != new_data: @@ -552,49 +565,50 @@ def fs_write(self, path, data): break # Remove meta file and all source files base = os.path.join( - pathinfo['layers'][pathinfo['layeridx']]['workdir'], - self.site, path.lstrip('/') + pathinfo["layers"][pathinfo["layeridx"]]["workdir"], + self.site, + path.lstrip("/"), ) - os.remove(os.path.join(base, '__meta__')) - for src in data.get('src_fnames', []): + os.remove(os.path.join(base, "__meta__")) + for src in data.get("src_fnames", []): os.remove(os.path.join(base, src)) # Next comparison point - pathinfo['layeridx'] = idx + pathinfo["layeridx"] = idx return pathinfo def fs_prune(self, pathinfo, contents): - ''' + """ Remove all subfolders from path that are not in contents. Removes the folder from the top-level directory, but if the effective folder that defines the object (in a multi-layer setup) still would provide it, recreate the directory and add a __deleted__ file. - ''' - relpath = os.path.join(self.site, pathinfo['path'].lstrip('/')) - base_dir = self.fs_path(pathinfo['path']) - for item in pathinfo['children']: + """ + relpath = os.path.join(self.site, pathinfo["path"].lstrip("/")) + base_dir = self.fs_path(pathinfo["path"]) + for item in pathinfo["children"]: if item in contents: continue tgt = os.path.join(base_dir, item) if os.path.isdir(tgt): self.logger.info("Removing old item %s from filesystem" % item) shutil.rmtree(tgt) - meta = os.path.join(relpath, item, '__meta__') + meta = os.path.join(relpath, item, "__meta__") # Omit topmost (custom) layer - for layer in pathinfo['layers'][1:]: - if not os.path.exists(os.path.join(layer['workdir'], meta)): + for layer in pathinfo["layers"][1:]: + if not os.path.exists(os.path.join(layer["workdir"], meta)): continue # Mask the path as deleted because it is also present # in a lower layer os.makedirs(tgt, exist_ok=True) - with open(os.path.join(tgt, '__deleted__'), 'wb'): + with open(os.path.join(tgt, "__deleted__"), "wb"): pass break def fs_prune_empty_dirs(self): "Remove all empty directories" for layer in self.layers: - start = os.path.join(layer['workdir'], self.site) + start = os.path.join(layer["workdir"], self.site) for root, _, _ in os.walk(start, topdown=False): if root == start: continue @@ -602,73 +616,70 @@ def fs_prune_empty_dirs(self): os.rmdir(root) def fs_read(self, fspath): - ''' + """ Read data from local file system. :fspath: is the full filesystem path of the directory. Returns a dictionary with - the stripped content of the meta file (if there is one) - the list of source files if there are any - the content of the source file if there is exactly one - ''' + """ if fspath is None or not os.path.isdir(fspath): return {} filenames = os.listdir(fspath) - if '__meta__' not in filenames: + if "__meta__" not in filenames: return {} result = {} - meta_fname = os.path.join(fspath, '__meta__') - with open(meta_fname, 'rb') as f: - result['meta'] = f.read().strip() + meta_fname = os.path.join(fspath, "__meta__") + with open(meta_fname, "rb") as f: + result["meta"] = f.read().strip() - src_fnames = sorted([a for a in filenames if a.startswith('__source')]) + src_fnames = sorted([a for a in filenames if a.startswith("__source")]) if src_fnames: - result['src_fnames'] = src_fnames + result["src_fnames"] = src_fnames if len(src_fnames) == 1: - with open(os.path.join(fspath, src_fnames[0]), 'rb') as f: - result['source'] = f.read() + with open(os.path.join(fspath, src_fnames[0]), "rb") as f: + result["source"] = f.read() return result def fs_parse(self, fspath, data=None): - ''' + """ Parse data obtained from fs_read. Returns a dictionary with the parsed data from the meta file and an additional "source" key. Raises an error if there is no meta file or multiple source files - ''' + """ if data is None: data = self.fs_read(fspath) - if 'meta' not in data: + if "meta" not in data: raise AssertionError(f"Missing meta file: {fspath}") - src_fnames = data.get('src_fnames', []) + src_fnames = data.get("src_fnames", []) if len(src_fnames) > 1: - raise AssertionError( - f"Multiple source files in {fspath}" - ) - result = dict(literal_eval(data['meta'])) + raise AssertionError(f"Multiple source files in {fspath}") + result = dict(literal_eval(data["meta"])) if src_fnames: src_fname = src_fnames[0] - src = data['source'] - if src_fname.rsplit('.', 1)[0].endswith('-utf8__'): - src = src.decode('utf-8') - result['source'] = src + src = data["source"] + if src_fname.rsplit(".", 1)[0].endswith("-utf8__"): + src = src.decode("utf-8") + result["source"] = src return result - def record(self, paths, recurse=True, skip_errors=False, - ignore_removed=False): - '''Record Zope objects from the given paths into the local - filesystem.''' + def record(self, paths, recurse=True, skip_errors=False, ignore_removed=False): + """Record Zope objects from the given paths into the local + filesystem.""" # If /a/b as well as /a are to be recorded recursively, drop /a/b if recurse: remove_redundant_paths(paths) for path in paths: obj = self.app # traverse into the object of interest - for part in path.split('/'): + for part in path.split("/"): if not part: continue if part not in obj.objectIds(): @@ -679,12 +690,11 @@ def record(self, paths, recurse=True, skip_errors=False, obj = getattr(obj, part) if obj is None and ignore_removed: continue - self.record_obj(obj, path, recurse=recurse, - skip_errors=skip_errors) + self.record_obj(obj, path, recurse=recurse, skip_errors=skip_errors) self.fs_prune_empty_dirs() def record_obj(self, obj, path, recurse=True, skip_errors=False): - '''Record a Zope object into the local filesystem''' + """Record a Zope object into the local filesystem""" try: data = mod_read( obj, @@ -692,8 +702,8 @@ def record_obj(self, obj, path, recurse=True, skip_errors=False): force_default_owner=self.force_default_owner, ) except Exception: - severity = 'Skipping' if skip_errors else 'ERROR' - msg = '{}: Unable to record path {}'.format(severity, path) + severity = "Skipping" if skip_errors else "ERROR" + msg = "{}: Unable to record path {}".format(severity, path) if skip_errors: self.logger.warning(msg) return @@ -702,9 +712,9 @@ def record_obj(self, obj, path, recurse=True, skip_errors=False): raise pathinfo = self.fs_write(path, data) - path_layer = pathinfo['layers'][pathinfo['layeridx']]['ident'] + path_layer = pathinfo["layers"][pathinfo["layeridx"]]["ident"] - current_layer = getattr(obj, 'zodbsync_layer', None) + current_layer = getattr(obj, "zodbsync_layer", None) if current_layer != path_layer: with self.tm: obj.zodbsync_layer = path_layer @@ -712,19 +722,17 @@ def record_obj(self, obj, path, recurse=True, skip_errors=False): if not recurse: return - contents = obj_contents(obj) if ('unsupported' not in data) else [] + contents = obj_contents(obj) if ("unsupported" not in data) else [] self.fs_prune(pathinfo, contents) # Update statistics self.num_obj_total += len(contents) now = time.time() if now - self.num_obj_last_report > 2: - self.logger.info('%d obj saved of at least %d, ' - 'current path %s' - % (self.num_obj_current, - self.num_obj_total, - path) - ) + self.logger.info( + "%d obj saved of at least %d, " + "current path %s" % (self.num_obj_current, self.num_obj_total, path) + ) self.num_obj_last_report = now for item in contents: @@ -738,7 +746,7 @@ def record_obj(self, obj, path, recurse=True, skip_errors=False): ) def _playback_path(self, pathinfo): - ''' + """ Play back one object from the file system to the ZODB. Params: @@ -752,43 +760,44 @@ def _playback_path(self, pathinfo): Side effects: In addition to the effect on the ZODB, it might add elements to `self.playback_todo` and/or `self.playback_fixorder`. - ''' - path = pathinfo['path'] + """ + path = pathinfo["path"] if self.recurse: self.num_obj_current += 1 now = time.time() if now - self.num_obj_last_report > 2: self.logger.info( - '%d obj checked of at least %d, current path %s' + "%d obj checked of at least %d, current path %s" % (self.num_obj_current, self.num_obj_total, path) ) self.num_obj_last_report = now else: # be more verbose because every path is explicitly requested - self.logger.info('Uploading %s' % path) + self.logger.info("Uploading %s" % path) # fspath is None if the object is to be deleted - fs_data = pathinfo['fspath'] and self.fs_parse(pathinfo['fspath']) + fs_data = pathinfo["fspath"] and self.fs_parse(pathinfo["fspath"]) # extend fs_data with layerinfo if fs_data: - fs_data['zodbsync_layer'] = pathinfo['layers'][ - pathinfo['layeridx']]['ident'] + fs_data["zodbsync_layer"] = pathinfo["layers"][pathinfo["layeridx"]][ + "ident" + ] # Traverse to the object if it exists parent_obj = None obj = self.app obj_id = None obj_path = [] - for part in path.split('/'): + for part in path.split("/"): if not part: continue if obj is None: # Some parent object is missing raise ValueError( - 'Object {} not found when uploading {}'.format( - '/'.join(obj_path), path + "Object {} not found when uploading {}".format( + "/".join(obj_path), path ) ) @@ -806,23 +815,23 @@ def _playback_path(self, pathinfo): return if fs_data is None: - if obj_id == 'acl_users' and path.startswith('/acl_users'): + if obj_id == "acl_users" and path.startswith("/acl_users"): return - self.logger.info('Removing object ' + path) + self.logger.info("Removing object " + path) try: parent_obj.manage_delObjects(ids=[obj_id]) except AttributeError as e: msg = ( - f'\n\nFailed to remove object {path}, ' - f'original error was {e}.\n' - f'Perhaps your layer workdir is empty?\n' - f'Possible solution: Execute layer-init or layer-update.\n' + f"\n\nFailed to remove object {path}, " + f"original error was {e}.\n" + f"Perhaps your layer workdir is empty?\n" + f"Possible solution: Execute layer-init or layer-update.\n" ) raise AssertionError(msg) from e return - if 'unsupported' in fs_data: - self.logger.warning('Skipping unsupported object ' + path) + if "unsupported" in fs_data: + self.logger.warning("Skipping unsupported object " + path) return contents = [] @@ -831,34 +840,37 @@ def _playback_path(self, pathinfo): # itself, in case a property with the same name is to be created. # The addition of new paths is not done here - playback_paths calls # them later on. - contents = pathinfo['children'] + contents = pathinfo["children"] srv_contents = obj_contents(obj) if obj else [] # Find IDs in Data.fs object not present in file system del_ids = [ - a for a in srv_contents - if a not in contents and - not (obj == self.app and a == 'acl_users') + a + for a in srv_contents + if a not in contents and not (obj == self.app and a == "acl_users") ] if del_ids: - self.logger.warning('Deleting objects ' + repr(del_ids)) + self.logger.warning("Deleting objects " + repr(del_ids)) obj.manage_delObjects(ids=del_ids) try: srv_data = ( - dict(mod_read( - obj, - default_owner=self.manager_user, - force_default_owner=self.force_default_owner, - )) - if obj is not None else None + dict( + mod_read( + obj, + default_owner=self.manager_user, + force_default_owner=self.force_default_owner, + ) + ) + if obj is not None + else None ) except Exception: - self.logger.exception('Unable to read object at %s' % path) + self.logger.exception("Unable to read object at %s" % path) raise if fs_data != srv_data: - self.logger.debug("Uploading: %s:%s" % (path, fs_data['type'])) + self.logger.debug("Uploading: %s:%s" % (path, fs_data["type"])) try: obj = mod_write( fs_data, @@ -868,13 +880,13 @@ def _playback_path(self, pathinfo): root=(obj if parent_obj is None else None), default_owner=self.default_owner, force_default_owner=self.force_default_owner, - layer=pathinfo['layers'][pathinfo['layeridx']]['ident'] + layer=pathinfo["layers"][pathinfo["layeridx"]]["ident"], ) except Exception: # If we do not want to get errors from missing # ExternalMethods, this can be used to skip them - severity = 'Skipping' if self.skip_errors else 'ERROR' - msg = '%s %s:%s' % (severity, path, fs_data['type']) + severity = "Skipping" if self.skip_errors else "ERROR" + msg = "%s %s:%s" % (severity, path, fs_data["type"]) if self.skip_errors: self.logger.warning(msg) return @@ -883,15 +895,17 @@ def _playback_path(self, pathinfo): raise self.num_obj_total += len(contents) - if hasattr(object_handlers[fs_data['type']], 'fix_order'): + if hasattr(object_handlers[fs_data["type"]], "fix_order"): # Store the data for later usage by `_playback_fixorder`. self.fs_data[path] = fs_data self.playback_fixorder.append(path) - self.playback_todo.extend([ - self.fs_pathinfo('{}{}/'.format(path, item)) - for item in reversed(contents) - ]) + self.playback_todo.extend( + [ + self.fs_pathinfo("{}{}/".format(path, item)) + for item in reversed(contents) + ] + ) def _playback_fixorder(self, path): """ @@ -907,34 +921,39 @@ def _playback_fixorder(self, path): entry from `self.fs_data`. """ obj = self.app - for part in path.split('/'): + for part in path.split("/"): obj = getattr(obj, part) if part else obj fs_data = self.fs_data[path] - object_handlers[fs_data['type']].fix_order(obj, fs_data) + object_handlers[fs_data["type"]].fix_order(obj, fs_data) del self.fs_data[path] def prepare_paths(self, paths): # normalize paths - cut off filenames and the site name paths = { - path.rsplit('/', 1)[0] if ( - path.rsplit('/', 1)[-1].startswith('__') - ) else path + ( + path.rsplit("/", 1)[0] + if (path.rsplit("/", 1)[-1].startswith("__")) + else path + ) for path in paths } - paths = sorted({ - path[len(self.site):] if path.startswith(self.site) else path - for path in paths - }) + paths = sorted( + { + path[len(self.site) :] if path.startswith(self.site) else path + for path in paths + } + ) if not len(paths): return [] - paths = [path.rstrip('/') + '/' for path in paths] + paths = [path.rstrip("/") + "/" for path in paths] return paths - def playback_paths(self, paths, recurse=True, override=False, - skip_errors=False, dryrun=False): + def playback_paths( + self, paths, recurse=True, override=False, skip_errors=False, dryrun=False + ): self.recurse = recurse self.override = override self.skip_errors = skip_errors @@ -946,9 +965,9 @@ def playback_paths(self, paths, recurse=True, override=False, self.num_obj_current = 0 self.num_obj_total = len(paths) - note = 'zodbsync' + note = "zodbsync" if len(paths) == 1: - note += ': ' + paths[0] + note += ": " + paths[0] txn_mgr = self.start_transaction(note=note) # Stack of paths that are to be played back (reversed alphabetical @@ -970,13 +989,13 @@ def playback_paths(self, paths, recurse=True, override=False, # with properties with the same ID that take their place lastdel = None for entry in pathinfo: - if lastdel and entry['path'].startswith(lastdel): + if lastdel and entry["path"].startswith(lastdel): continue - if entry['fspath'] is not None: + if entry["fspath"] is not None: todo.append(entry) continue self._playback_path(entry) - lastdel = entry['path'] + lastdel = entry["path"] todo.reverse() # Iterate until both stacks are empty. Whenever the topmost element in @@ -988,30 +1007,29 @@ def playback_paths(self, paths, recurse=True, override=False, # Handle next object on which to fix order unless there are # still subpaths to be handled path = fixorder[-1] - if not (todo and todo[-1]['path'].startswith(path)): + if not (todo and todo[-1]["path"].startswith(path)): self._playback_fixorder(fixorder.pop()) continue entry = todo.pop() self._playback_path(entry) except Exception: - self.logger.exception('Error with path: ' + entry['path']) + self.logger.exception("Error with path: " + entry["path"]) txn_mgr.abort() raise if dryrun: - self.logger.info('Dry-run. Rolling back') + self.logger.info("Dry-run. Rolling back") txn_mgr.abort() else: txn_mgr.commit() - def recent_changes(self, since_secs=None, txnid=None, limit=50, - search_limit=100): - '''Retrieve all distinct paths which have changed recently. Control how + def recent_changes(self, since_secs=None, txnid=None, limit=50, search_limit=100): + """Retrieve all distinct paths which have changed recently. Control how far to look back in time by supplying the number of seconds in Unix time in "since_secs" or the transaction ID at which to stop scanning in "txnid". Retrieves at most "limit" distinct paths. - ''' + """ paths = [] newest_txnid = None # Clear the request, so we can access undoable_transactions() @@ -1023,26 +1041,26 @@ def recent_changes(self, since_secs=None, txnid=None, limit=50, no_records = False limit_reached = False while cursor < search_limit: - txns = self.app._p_jar.db().undoInfo(cursor, cursor+step_size) + txns = self.app._p_jar.db().undoInfo(cursor, cursor + step_size) if len(txns) == 0 and cursor == 0: no_records = True break for txn in txns: if newest_txnid is None: - newest_txnid = txn['id'] - if since_secs and txn['time'] < since_secs: + newest_txnid = txn["id"] + if since_secs and txn["time"] < since_secs: done = True break - if txnid and txn['id'] == txnid: + if txnid and txn["id"] == txnid: done = True break - this_path = txn['description'].split('\n')[0] + this_path = txn["description"].split("\n")[0] # Ignore transaction descriptions not defining a path - if not this_path.startswith('/'): + if not this_path.startswith("/"): continue # Cut the method which originated the change, leaving # only the object. - this_path = this_path.rsplit('/', 1)[0] + this_path = this_path.rsplit("/", 1)[0] if this_path not in paths: paths.append(this_path) if len(paths) >= limit: @@ -1053,22 +1071,22 @@ def recent_changes(self, since_secs=None, txnid=None, limit=50, break cursor += step_size return { - 'paths': paths, - 'newest_txnid': newest_txnid, - 'no_records': no_records, - 'search_limit_reached': not done, - 'limit_reached': limit_reached, + "paths": paths, + "newest_txnid": newest_txnid, + "no_records": no_records, + "search_limit_reached": not done, + "limit_reached": limit_reached, } def txn_write(self, txnid): - '''Write the newest transaction ID''' - with open(os.path.join(self.base_dir, '__last_txn__'), 'wb') as f: + """Write the newest transaction ID""" + with open(os.path.join(self.base_dir, "__last_txn__"), "wb") as f: f.write(txnid) def txn_read(self): - '''Read the newest transaction ID''' + """Read the newest transaction ID""" try: - with open(os.path.join(self.base_dir, '__last_txn__'), 'rb') as f: + with open(os.path.join(self.base_dir, "__last_txn__"), "rb") as f: txn = f.read() except IOError: txn = None diff --git a/pyproject.toml b/pyproject.toml index 6894d7f..958606b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -40,3 +40,8 @@ universal = 1 include-package-data = false [tool.setuptools_scm] + +[tool.ruff] +line-length = 88 +[tool.ruff.lint] +select = ["E", "F", "W", "I"] diff --git a/tox.ini b/tox.ini index 30b6f30..dc43b92 100644 --- a/tox.ini +++ b/tox.ini @@ -4,7 +4,7 @@ isolated_build = True [testenv] deps = - flake8 + ruff pytest coverage zope.mkzeoinstance @@ -21,6 +21,6 @@ deps = git+https://github.com/perfact/Products.ZPyODBCDA commands = - flake8 perfact + ruff format --check coverage run --source=perfact -m pytest --show-capture=no {posargs} coverage report --show-missing