diff --git a/.evergreen/auth_aws/aws_tester.py b/.evergreen/auth_aws/aws_tester.py old mode 100644 new mode 100755 index c0cbc1ce9..2da714575 --- a/.evergreen/auth_aws/aws_tester.py +++ b/.evergreen/auth_aws/aws_tester.py @@ -44,7 +44,7 @@ def join(*parts): def run(args, env): """Run a python command in a subprocess.""" env.update(os.environ.copy()) - return subprocess.run([sys.executable] + args, env=env).returncode + return subprocess.run([sys.executable, *args], env=env, check=False).returncode def create_user(user, kwargs): diff --git a/.evergreen/auth_oidc/azure/remote-scripts/test.py b/.evergreen/auth_oidc/azure/remote-scripts/test.py index 466441329..696bf07ed 100644 --- a/.evergreen/auth_oidc/azure/remote-scripts/test.py +++ b/.evergreen/auth_oidc/azure/remote-scripts/test.py @@ -21,8 +21,8 @@ def fetch(self, context: OIDCCallbackContext) -> OIDCCallbackResult: status = response.status body = response.read().decode('utf8') except Exception as e: - msg = "Failed to acquire IMDS access token: %s" % e - raise ValueError(msg) + msg = "Failed to acquire IMDS access token" + raise ValueError(msg) from e if status != 200: print(body) @@ -30,8 +30,8 @@ def fetch(self, context: OIDCCallbackContext) -> OIDCCallbackResult: raise ValueError(msg) try: data = json.loads(body) - except Exception: - raise ValueError("Azure IMDS response must be in JSON format.") + except Exception as e: + raise ValueError("Azure IMDS response must be in JSON format.") from e for key in ["access_token", "expires_in"]: if not data.get(key): diff --git a/.evergreen/auth_oidc/gcp/remote-scripts/test.py b/.evergreen/auth_oidc/gcp/remote-scripts/test.py index c8fa9aaa6..caecbad14 100644 --- a/.evergreen/auth_oidc/gcp/remote-scripts/test.py +++ b/.evergreen/auth_oidc/gcp/remote-scripts/test.py @@ -1,6 +1,5 @@ from pymongo import MongoClient import os -import json from urllib.request import urlopen, Request from pymongo.auth_oidc import OIDCCallback, OIDCCallbackContext, OIDCCallbackResult @@ -20,7 +19,7 @@ def fetch(self, context: OIDCCallbackContext) -> OIDCCallbackResult: body = response.read().decode('utf8') except Exception as e: msg = "Failed to acquire IMDS access token: %s" % e - raise ValueError(msg) + raise ValueError(msg) from e if status != 200: print(body) diff --git a/.evergreen/auth_oidc/oidc_write_orchestration.py b/.evergreen/auth_oidc/oidc_write_orchestration.py old mode 100644 new mode 100755 index 97abc9ac6..bfc930052 --- a/.evergreen/auth_oidc/oidc_write_orchestration.py +++ b/.evergreen/auth_oidc/oidc_write_orchestration.py @@ -9,7 +9,7 @@ HERE = os.path.abspath(os.path.dirname(__file__)) sys.path.insert(0, HERE) -from utils import get_secrets, MOCK_ENDPOINT, DEFAULT_CLIENT +from utils import get_secrets, DEFAULT_CLIENT def azure(): diff --git a/.evergreen/auth_oidc/utils.py b/.evergreen/auth_oidc/utils.py index a3220b302..d903d6936 100644 --- a/.evergreen/auth_oidc/utils.py +++ b/.evergreen/auth_oidc/utils.py @@ -1,8 +1,7 @@ -import json import os import sys -import boto3 +import boto3 # noqa: F401 HERE = os.path.abspath(os.path.dirname(__file__)) @@ -11,7 +10,7 @@ def join(*args): aws_lib = join(os.path.dirname(HERE), 'auth_aws', 'lib') sys.path.insert(0, aws_lib) -from aws_handle_oidc_creds import get_id_token, MOCK_ENDPOINT +from aws_handle_oidc_creds import get_id_token, MOCK_ENDPOINT # noqa: F401 secrets_root = join(os.path.dirname(HERE), 'secrets_handling') sys.path.insert(0, secrets_root) from setup_secrets import get_secrets as root_get_secrets diff --git a/.evergreen/csfle/fake_azure.py b/.evergreen/csfle/fake_azure.py index fec1c3410..90d321e39 100644 --- a/.evergreen/csfle/fake_azure.py +++ b/.evergreen/csfle/fake_azure.py @@ -115,7 +115,7 @@ def get_oauth2_token(): if case == 'slow': return _slow() - assert case in (None, ''), 'Unknown HTTP test case "{}"'.format(case) + assert case in (None, ''), f'Unknown HTTP test case "{case}"' return { 'access_token': 'magic-cookie', @@ -148,7 +148,6 @@ def _slow() -> Iterable[bytes]: if __name__ == '__main__': print( - 'RECOMMENDED: Run this script using bottle.py (e.g. [{} {}/bottle.py fake_azure:imds])' - .format(sys.executable, - Path(__file__).resolve().parent)) + f'RECOMMENDED: Run this script using bottle.py (e.g. [{sys.executable} {Path(__file__).resolve().parent}/bottle.py fake_azure:imds])' + ) imds.run() diff --git a/.evergreen/csfle/gcpkms/mock_server.py b/.evergreen/csfle/gcpkms/mock_server.py index 51071f6c2..22bdef6d0 100644 --- a/.evergreen/csfle/gcpkms/mock_server.py +++ b/.evergreen/csfle/gcpkms/mock_server.py @@ -19,8 +19,7 @@ def b64_to_b64url(b64): def dict_to_b64url(arg): as_json = json.dumps(arg).encode("utf8") as_b64 = base64.b64encode(as_json).decode("utf8") - as_b64url = b64_to_b64url(as_b64) - return as_b64url + return b64_to_b64url(as_b64) def get_access_token(): @@ -34,7 +33,7 @@ def get_access_token(): if "GOOGLE_APPLICATION_CREDENTIALS" not in os.environ: raise Exception( "please set GOOGLE_APPLICATION_CREDENTIALS environment variable to a JSON Service account key") - creds = json.load(open(os.environ["GOOGLE_APPLICATION_CREDENTIALS"], "r")) + creds = json.load(open(os.environ["GOOGLE_APPLICATION_CREDENTIALS"])) private_key = creds["private_key"].encode("utf8") client_email = creds["client_email"] @@ -82,7 +81,7 @@ def main(): global private_key port = 5000 server = http.server.HTTPServer(("localhost", port), Handler) - print ("Listening on port {}".format(port)) + print (f"Listening on port {port}") server.serve_forever() diff --git a/.evergreen/csfle/kms_failpoint_server.py b/.evergreen/csfle/kms_failpoint_server.py index 460040d59..e9a4710bf 100644 --- a/.evergreen/csfle/kms_failpoint_server.py +++ b/.evergreen/csfle/kms_failpoint_server.py @@ -64,7 +64,7 @@ def _send_json(self, data: dict): def _send_not_found(self): self.send_response(http.HTTPStatus.NOT_FOUND) - msg = "Not found".encode("utf8") + msg = b"Not found" self.send_header("Content-Type", "text/plain") self.send_header("Content-Length", len(msg)) self.end_headers() @@ -93,18 +93,18 @@ def do_POST(self): remaining_http_fails = data['count'] else: self._send_not_found() - return - print("Enabling failpoint for type: {}".format(failpoint_type)) + return None + print(f"Enabling failpoint for type: {failpoint_type}") self._send_json( - {"message": "failpoint set for type: '{}'".format(failpoint_type)} + {"message": f"failpoint set for type: '{failpoint_type}'"} ) - return + return None if path.match("/reset"): remaining_http_fails = 0 remaining_network_fails = 0 self._send_json({"message": "failpoints reset"}) - return + return None # If a failpoint was set, fail the request. if remaining_network_fails > 0: @@ -116,40 +116,39 @@ def do_POST(self): aws_op = self.headers['X-Amz-Target'] if aws_op == "TrentService.Encrypt": self._send_json({"CiphertextBlob": base64.b64encode(fake_ciphertext.encode()).decode()}) - return - elif aws_op == "TrentService.Decrypt": + return None + if aws_op == "TrentService.Decrypt": if remaining_http_fails > 0: self._http_fail() - return + return None self._send_json({"Plaintext": base64.b64encode(fake_plaintext.encode()).decode()}) - return - else: - self._send_not_found() - return + return None + self._send_not_found() + return None # GCP or Azure auth path: /c01df00d-cafe-g00d-dea1-decea5sedbeef/oauth2/v2.0/token if path.match("*token"): if remaining_http_fails > 0: self._http_fail() - return + return None return self._send_json({"access_token": "foo", "expires_in": 99999}) # GCP encrypt path: /v1/projects/{project}/locations/{location}/keyRings/{key-ring}/cryptoKeys/{key}:encrypt - elif path.match("*encrypt"): + if path.match("*encrypt"): return self._send_json({"ciphertext": base64.b64encode(fake_ciphertext.encode()).decode()}) # GCP decrypt path: /v1/projects/{project}/locations/{location}/keyRings/{key-ring}/cryptoKeys/{key}:decrypt - elif path.match("*decrypt"): + if path.match("*decrypt"): if remaining_http_fails > 0: self._http_fail() - return + return None return self._send_json({"plaintext": base64.b64encode(fake_plaintext.encode()).decode()}) # Azure decrypt path: /keys/{key-name}/{key-version}/unwrapkey - elif path.match("*unwrapkey"): + if path.match("*unwrapkey"): if remaining_http_fails > 0: self._http_fail() - return + return None return self._send_json({"value": base64.b64encode(fake_plaintext.encode()).decode()}) # Azure encrypt path: /keys/{key-name}/{key-version}/wrapkey - elif path.match("*wrapkey"): + if path.match("*wrapkey"): return self._send_json({"value": base64.b64encode(fake_ciphertext.encode()).decode()}) self._send_not_found() diff --git a/.evergreen/csfle/kms_http_common.py b/.evergreen/csfle/kms_http_common.py index 1dae48b54..d363e2313 100644 --- a/.evergreen/csfle/kms_http_common.py +++ b/.evergreen/csfle/kms_http_common.py @@ -71,12 +71,11 @@ def do_GET(self): else: self.send_response(http.HTTPStatus.NOT_FOUND) self.end_headers() - self.wfile.write("Unknown URL".encode()) + self.wfile.write(b"Unknown URL") @abstractmethod def do_POST(self): """Serve a POST request.""" - pass def _send_reply(self, data, status=http.HTTPStatus.OK): print("Sending Response: " + data.decode()) diff --git a/.evergreen/csfle/kms_http_server.py b/.evergreen/csfle/kms_http_server.py old mode 100644 new mode 100755 index 3678a20d5..ae2c3c0b9 --- a/.evergreen/csfle/kms_http_server.py +++ b/.evergreen/csfle/kms_http_server.py @@ -56,7 +56,7 @@ def do_POST(self): else: self.send_response(http.HTTPStatus.NOT_FOUND) self.end_headers() - self.wfile.write("Unknown URL".encode()) + self.wfile.write(b"Unknown URL") def _do_post(self): c_len = int(self.headers.get('content-length')) @@ -133,15 +133,15 @@ def _do_encrypt(self, raw_input): } self._send_reply(json.dumps(response).encode('utf-8')) - return + return None def _do_encrypt_faults(self, raw_ciphertext): kms_http_common.stats.fault_calls += 1 if kms_http_common.fault_type == kms_http_common.FAULT_ENCRYPT: - self._send_reply("Internal Error of some sort.".encode(), http.HTTPStatus.INTERNAL_SERVER_ERROR) + self._send_reply(b"Internal Error of some sort.", http.HTTPStatus.INTERNAL_SERVER_ERROR) return - elif kms_http_common.fault_type == kms_http_common.FAULT_ENCRYPT_WRONG_FIELDS: + if kms_http_common.fault_type == kms_http_common.FAULT_ENCRYPT_WRONG_FIELDS: response = { "SomeBlob" : raw_ciphertext, "KeyId" : "foo", @@ -149,7 +149,7 @@ def _do_encrypt_faults(self, raw_ciphertext): self._send_reply(json.dumps(response).encode('utf-8')) return - elif kms_http_common.fault_type == kms_http_common.FAULT_ENCRYPT_BAD_BASE64: + if kms_http_common.fault_type == kms_http_common.FAULT_ENCRYPT_BAD_BASE64: response = { "CiphertextBlob" : "foo", "KeyId" : "foo", @@ -157,7 +157,7 @@ def _do_encrypt_faults(self, raw_ciphertext): self._send_reply(json.dumps(response).encode('utf-8')) return - elif kms_http_common.fault_type == kms_http_common.FAULT_ENCRYPT_CORRECT_FORMAT: + if kms_http_common.fault_type == kms_http_common.FAULT_ENCRYPT_CORRECT_FORMAT: response = { "__type" : "NotFoundException", "Message" : "Error encrypting message", @@ -190,15 +190,15 @@ def _do_decrypt(self, raw_input): } self._send_reply(json.dumps(response).encode('utf-8')) - return + return None def _do_decrypt_faults(self, blob): kms_http_common.stats.fault_calls += 1 if kms_http_common.fault_type == kms_http_common.FAULT_DECRYPT: - self._send_reply("Internal Error of some sort.".encode(), http.HTTPStatus.INTERNAL_SERVER_ERROR) + self._send_reply(b"Internal Error of some sort.", http.HTTPStatus.INTERNAL_SERVER_ERROR) return - elif kms_http_common.fault_type == kms_http_common.FAULT_DECRYPT_WRONG_KEY: + if kms_http_common.fault_type == kms_http_common.FAULT_DECRYPT_WRONG_KEY: response = { "Plaintext" : "ta7DXE7J0OiCRw03dYMJSeb8nVF5qxTmZ9zWmjuX4zW/SOorSCaY8VMTWG+cRInMx/rr/+QeVw2WjU2IpOSvMg==", "KeyId" : "Not a clue", @@ -206,7 +206,7 @@ def _do_decrypt_faults(self, blob): self._send_reply(json.dumps(response).encode('utf-8')) return - elif kms_http_common.fault_type == kms_http_common.FAULT_DECRYPT_CORRECT_FORMAT: + if kms_http_common.fault_type == kms_http_common.FAULT_DECRYPT_CORRECT_FORMAT: response = { "__type" : "NotFoundException", "Message" : "Error decrypting message", diff --git a/.evergreen/csfle/kms_kmip_client.py b/.evergreen/csfle/kms_kmip_client.py old mode 100644 new mode 100755 diff --git a/.evergreen/csfle/kms_kmip_server.py b/.evergreen/csfle/kms_kmip_server.py old mode 100644 new mode 100755 diff --git a/.evergreen/csfle/setup_secrets.py b/.evergreen/csfle/setup_secrets.py old mode 100644 new mode 100755 index 023ad5fb0..5a720e6b3 --- a/.evergreen/csfle/setup_secrets.py +++ b/.evergreen/csfle/setup_secrets.py @@ -15,13 +15,13 @@ credentials = client.get_session_token()["Credentials"] with open('secrets-export.sh', 'ab') as fid: - fid.write(f'\nexport CSFLE_AWS_TEMP_ACCESS_KEY_ID="{credentials["AccessKeyId"]}"'.encode('utf8')) - fid.write(f'\nexport CSFLE_AWS_TEMP_SECRET_ACCESS_KEY="{credentials["SecretAccessKey"]}"'.encode('utf8')) - fid.write(f'\nexport CSFLE_AWS_TEMP_SESSION_TOKEN="{credentials["SessionToken"]}"'.encode('utf8')) + fid.write(f'\nexport CSFLE_AWS_TEMP_ACCESS_KEY_ID="{credentials["AccessKeyId"]}"'.encode()) + fid.write(f'\nexport CSFLE_AWS_TEMP_SECRET_ACCESS_KEY="{credentials["SecretAccessKey"]}"'.encode()) + fid.write(f'\nexport CSFLE_AWS_TEMP_SESSION_TOKEN="{credentials["SessionToken"]}"'.encode()) for key in ['AWS_ACCESS_KEY_ID', 'AWS_SECRET_ACCESS_KEY', 'AWS_DEFAULT_REGION', 'AWS_SESSION_TOKEN', 'CSFLE_TLS_CA_FILE', 'CSFLE_TLS_CERT_FILE', 'CSFLE_TLS_CLIENT_CERT_FILE']: - fid.write(f'\nexport {key}="{os.environ[key]}"'.encode('utf8')) - fid.write('\n'.encode('utf8')) + fid.write(f'\nexport {key}="{os.environ[key]}"'.encode()) + fid.write(b'\n') print("Getting CSFLE temp creds...done") diff --git a/.evergreen/mongodl.py b/.evergreen/mongodl.py index 28d91a59f..f0baadd70 100755 --- a/.evergreen/mongodl.py +++ b/.evergreen/mongodl.py @@ -157,8 +157,7 @@ def infer_target_from_os_release(osr: Path) -> str: # Extract the "ID" field id_re = re.compile(r'\bID=("?)(.*)\1') mat = id_re.search(os_rel) - assert mat, 'Unable to detect ID from [{}] content:\n{}'.format( - osr, os_rel) + assert mat, f'Unable to detect ID from [{osr}] content:\n{os_rel}' os_id = mat.group(2) if os_id == 'arch': # There are no Archlinux-specific MongoDB downloads, so we'll just use @@ -168,8 +167,7 @@ def infer_target_from_os_release(osr: Path) -> str: # Extract the "VERSION_ID" field ver_id_re = re.compile(r'VERSION_ID=("?)(.*)\1') mat = ver_id_re.search(os_rel) - assert mat, 'Unable to detect VERSION_ID from [{}] content:\n{}'.format( - osr, os_rel) + assert mat, f'Unable to detect VERSION_ID from [{osr}] content:\n{os_rel}' ver_id = mat.group(2) # Map the ID to the download ID mapped_id = DISTRO_ID_MAP.get(os_id) @@ -184,23 +182,22 @@ def infer_target_from_os_release(osr: Path) -> str: if mapped_version is None: # If this raises, a version/pattern needs to be added # to DISTRO_VERSION_MAP - raise RuntimeError("We don't know how to map {} version '{}' " - "to an upstream {} version. Please contribute!" - "".format(os_id, ver_id, mapped_id)) + raise RuntimeError(f"We don't know how to map {os_id} version '{ver_id}' " + f"to an upstream {mapped_id} version. Please contribute!") ver_id = mapped_version os_id = mapped_id os_id = os_id.lower() if os_id not in DISTRO_ID_TO_TARGET: - raise RuntimeError("We don't know how to map '{}' to a distribution " - "download target. Please contribute!".format(os_id)) + raise RuntimeError(f"We don't know how to map '{os_id}' to a distribution " + "download target. Please contribute!") # Find the download target based on a filename-style pattern: ver_table = DISTRO_ID_TO_TARGET[os_id] for pattern, target in ver_table.items(): if fnmatch(ver_id, pattern): return target raise RuntimeError( - "We don't know how to map '{}' version '{}' to a distribution " - "download target. Please contribute!".format(os_id, ver_id)) + f"We don't know how to map '{os_id}' version '{ver_id}' to a distribution " + "download target. Please contribute!") def user_caches_root() -> Path: @@ -253,7 +250,7 @@ def version_tup(version: str) -> 'tuple[int, int, int, int, int]': return tuple([int(maj), int(min), 0, 0, 0]) mat = VERSION_RE.match(version) - assert mat, ('Failed to parse "{}" as a version number'.format(version)) + assert mat, (f'Failed to parse "{version}" as a version number') major, minor, patch, tag, tagnum = list(mat.groups()) if tag is None: # No rc tag is greater than an equal base version with any rc tag @@ -569,7 +566,7 @@ def download_file(self, url: str) -> DownloadResult: except urllib.error.HTTPError as e: if e.code != 304: raise RuntimeError( - 'Failed to download [{u}]'.format(u=url)) from e + f'Failed to download [{url}]') from e assert dest.is_file(), ( 'The download cache is missing an expected file', dest) return DownloadResult(False, dest) @@ -633,19 +630,18 @@ def _print_list(db: CacheDB, version: 'str | None', target: 'str | None', component=component) for version, target, arch, edition, comp_key, comp_data in matching: counter += 1 - print('Download: {}\n' - ' Version: {}\n' - ' Target: {}\n' - ' Arch: {}\n' - ' Edition: {}\n' - ' Info: {}\n\n'.format(comp_key, version, target, arch, - edition, comp_data)) + print(f'Download: {comp_key}\n' + f' Version: {version}\n' + f' Target: {target}\n' + f' Arch: {arch}\n' + f' Edition: {edition}\n' + f' Info: {comp_data}\n\n') if counter == 1: print('Only one matching item') elif counter == 0: print('No items matched the listed filters') else: - print('{} available downloadable components'.format(counter)) + print(f'{counter} available downloadable components') print('(Omit filter arguments for a list of available filters)') return @@ -678,15 +674,15 @@ def _print_list(db: CacheDB, version: 'str | None', target: 'str | None', initial_indent=' ', subsequent_indent=' ')) print('Architectures:\n' - ' {}\n' + f' {arches}\n' 'Targets:\n' - '{}\n' + f'{targets}\n' 'Editions:\n' - ' {}\n' + f' {editions}\n' 'Versions:\n' - '{}\n' + f'{versions}\n' 'Components:\n' - ' {}\n'.format(arches, targets, editions, versions, components)) + f' {components}\n') def infer_arch(): @@ -722,8 +718,7 @@ def _published_build_url(cache: Cache, version: str, target: str, arch: str, if tup is None: raise ValueError( 'No download was found for ' - 'version="{}" target="{}" arch="{}" edition="{}" component="{}"'.format( - version, target, arch, edition, component)) + f'version="{version}" target="{target}" arch="{arch}" edition="{edition}" component="{component}"') data = json.loads(tup.data_json) return data[value] @@ -752,28 +747,21 @@ def _latest_build_url(target: str, arch: str, edition: str, component: str, 'archive': 'mongodb', 'crypt_shared': 'mongo_crypt_shared_v1', }.get(component, component) - base = 'https://downloads.10gen.com/{plat}'.format(plat=platform) + base = f'https://downloads.10gen.com/{platform}' # Windows has Zip files ext = 'zip' if target == 'windows' else 'tgz' # Enterprise builds have an "enterprise" infix ent_infix = 'enterprise-' if edition == 'enterprise' else '' # Some platforms have a filename infix - tgt_infix = ((target + '-') # - if target not in ('windows', 'win32', 'macos') # + tgt_infix = ((target + '-') + if target not in ('windows', 'win32', 'macos') else '') # Non-master branch uses a filename infix br_infix = ((branch + '-') if - (branch is not None and branch != 'master') # + (branch is not None and branch != 'master') else '') - filename = '{comp}-{typ}-{arch}-{enterprise_}{target_}{br_}latest.{ext}'.format( - comp=component_name, - typ=typ, - arch=arch, - enterprise_=ent_infix, - target_=tgt_infix, - br_=br_infix, - ext=ext) - return '{}/{}'.format(base, filename) + filename = f'{component_name}-{typ}-{arch}-{ent_infix}{tgt_infix}{br_infix}latest.{ext}' + return f'{base}/{filename}' def _dl_component(cache: Cache, out_dir: Path, version: str, target: str, @@ -781,8 +769,7 @@ def _dl_component(cache: Cache, out_dir: Path, version: str, target: str, pattern: 'str | None', strip_components: int, test: bool, no_download: bool, latest_build_branch: 'str|None') -> ExpandResult: - print('Download {} {}-{} for {}-{}'.format(component, version, edition, - target, arch), file=sys.stderr) + print(f'Download {component} {version}-{edition} for {target}-{arch}', file=sys.stderr) if version == 'latest-build': dl_url = _latest_build_url(target, arch, edition, component, latest_build_branch) @@ -791,7 +778,7 @@ def _dl_component(cache: Cache, out_dir: Path, version: str, target: str, component) if no_download: print(dl_url) - return + return None cached = cache.download_file(dl_url).path return _expand_archive(cached, out_dir, @@ -845,8 +832,8 @@ def _expand_archive(ar: Path, dest: Path, pattern: 'str | None', Expand the archive members from 'ar' into 'dest'. If 'pattern' is not-None, only extracts members that match the pattern. ''' - print('Extract from: [{}]'.format(ar.name), file=sys.stderr) - print(' into: [{}]'.format(dest), file=sys.stderr) + print(f'Extract from: [{ar.name}]', file=sys.stderr) + print(f' into: [{dest}]', file=sys.stderr) if ar.suffix == '.zip': n_extracted = _expand_zip(ar, dest, @@ -864,27 +851,22 @@ def _expand_archive(ar: Path, dest: Path, pattern: 'str | None', verb = 'would be' if test else 'were' if n_extracted == 0: if pattern and strip_components: - print('NOTE: No files {verb} extracted. Likely all files {verb} ' - 'excluded by "--only={p}" and/or "--strip-components={s}"'. - format(p=pattern, s=strip_components, verb=verb), file=sys.stderr) + print(f'NOTE: No files {verb} extracted. Likely all files {verb} ' + f'excluded by "--only={pattern}" and/or "--strip-components={strip_components}"', file=sys.stderr) elif pattern: - print('NOTE: No files {verb} extracted. Likely all files {verb} ' - 'excluded by the "--only={p}" filter'.format(p=pattern, - verb=verb), file=sys.stderr) + print(f'NOTE: No files {verb} extracted. Likely all files {verb} ' + f'excluded by the "--only={pattern}" filter', file=sys.stderr) elif strip_components: - print('NOTE: No files {verb} extracted. Likely all files {verb} ' - 'excluded by "--strip-components={s}"'.format( - s=strip_components, verb=verb), file=sys.stderr) + print(f'NOTE: No files {verb} extracted. Likely all files {verb} ' + f'excluded by "--strip-components={strip_components}"', file=sys.stderr) else: - print('NOTE: No files {verb} extracted. Empty archive?'.format( - verb=verb), file=sys.stderr) + print(f'NOTE: No files {verb} extracted. Empty archive?', file=sys.stderr) return ExpandResult.Empty - elif n_extracted == 1: + if n_extracted == 1: print('One file {v} extracted'.format(v='would be' if test else 'was'), file=sys.stderr) return ExpandResult.Okay - else: - print('{n} files {verb} extracted'.format(n=n_extracted, verb=verb), file=sys.stderr) - return ExpandResult.Okay + print(f'{n_extracted} files {verb} extracted', file=sys.stderr) + return ExpandResult.Okay def _expand_tgz(ar: Path, dest: Path, pattern: 'str | None', @@ -899,7 +881,7 @@ def _expand_tgz(ar: Path, dest: Path, pattern: 'str | None', pattern, strip_components, mem.isdir(), - lambda: cast('IO[bytes]', tf.extractfile(mem)), + lambda: cast('IO[bytes]', tf.extractfile(mem)), # noqa: B023 mem.mode, test=test, ) @@ -918,7 +900,7 @@ def _expand_zip(ar: Path, dest: Path, pattern: 'str | None', pattern, strip_components, item.filename.endswith('/'), ## Equivalent to: item.is_dir(), - lambda: zf.open(item, 'r'), + lambda: zf.open(item, 'r'), # noqa: B023 0o655, test=test, ) @@ -946,7 +928,7 @@ def _maybe_extract_member(out: Path, relpath: PurePath, pattern: 'str | None', return 0 stripped = _pathjoin(relpath.parts[strip:]) dest = Path(out) / stripped - print('\n -> [{}]'.format(dest), file=sys.stderr) + print(f'\n -> [{dest}]', file=sys.stderr) if test: # We are running in test-only mode: Do not do anything return 1 @@ -1058,7 +1040,7 @@ def main(argv: 'Sequence[str]'): if args.list: _print_list(cache.db, args.version, args.target, args.arch, args.edition, args.component) - return + return None if args.version is None: raise argparse.ArgumentError(None, 'A "--version" is required') diff --git a/.evergreen/mongosh-dl.py b/.evergreen/mongosh-dl.py old mode 100644 new mode 100755 index 261b7202f..5a6ed65fd --- a/.evergreen/mongosh-dl.py +++ b/.evergreen/mongosh-dl.py @@ -57,7 +57,7 @@ def _download(out_dir: Path, version: str, target: str, arch: str, pattern: 'str | None', strip_components: int, test: bool, no_download: bool,) -> int: - print('Download {} mongosh for {}-{}'.format(version, target, arch), file=sys.stderr) + print(f'Download {version} mongosh for {target}-{arch}', file=sys.stderr) if version == "latest": version = _get_latest_version() if arch == "x86_64": diff --git a/.evergreen/ocsp/mock_ocsp_responder.py b/.evergreen/ocsp/mock_ocsp_responder.py index 0ffbe7d12..1e3aa958c 100644 --- a/.evergreen/ocsp/mock_ocsp_responder.py +++ b/.evergreen/ocsp/mock_ocsp_responder.py @@ -39,17 +39,14 @@ # See the License for the specific language governing permissions and # limitations under the License. -from __future__ import unicode_literals, division, absolute_import, print_function import logging import base64 import inspect import re import enum -import sys import textwrap from datetime import datetime, timezone, timedelta -from typing import Callable, Tuple, Optional from asn1crypto import x509, keys, core, ocsp from asn1crypto.ocsp import OCSPRequest, OCSPResponse @@ -61,11 +58,6 @@ logger = logging.getLogger(__name__) -if sys.version_info < (3,): - byte_cls = str -else: - byte_cls = bytes - def _pretty_message(string, *params): """ Takes a multi-line string and does the following: @@ -90,9 +82,7 @@ def _pretty_message(string, *params): if params: output = output % params - output = output.strip() - - return output + return output.strip() def _type_name(value): @@ -120,7 +110,7 @@ def _writer(func): return property(fget=lambda self: getattr(self, '_%s' % name), fset=func) -class OCSPResponseBuilder(object): +class OCSPResponseBuilder: _response_status = None _certificate = None @@ -135,7 +125,7 @@ class OCSPResponseBuilder(object): _response_data_extensions = None _single_response_extensions = None - def __init__(self, response_status, certificate_status_list=[], revocation_date=None): + def __init__(self, response_status, certificate_status_list=None, revocation_date=None): """ Unless changed, responses will use SHA-256 for the signature, and will be valid from the moment created for one week. @@ -169,7 +159,7 @@ def __init__(self, response_status, certificate_status_list=[], revocation_date= not "good" or "unknown". """ self._response_status = response_status - self._certificate_status_list = certificate_status_list + self._certificate_status_list = certificate_status_list or [] self._revocation_date = revocation_date self._key_hash_algo = 'sha1' @@ -183,7 +173,7 @@ def nonce(self, value): The nonce that was provided during the request. """ - if not isinstance(value, byte_cls): + if not isinstance(value, bytes): raise TypeError(_pretty_message( ''' nonce must be a byte string, not %s @@ -494,10 +484,10 @@ def validate(self): time = datetime(2018, 1, 1, 1, 00, 00, 00, timezone.utc) if self._fault == FAULT_REVOKED: return (CertificateStatus.revoked, time) - elif self._fault == FAULT_UNKNOWN: + if self._fault == FAULT_UNKNOWN: return (CertificateStatus.unknown, None) - elif self._fault != None: - raise NotImplemented('Fault type could not be found') + if self._fault is not None: + raise NotImplementedError('Fault type could not be found') return (CertificateStatus.good, time) def _build_ocsp_response(self, ocsp_request: OCSPRequest) -> OCSPResponse: @@ -509,7 +499,7 @@ def _build_ocsp_response(self, ocsp_request: OCSPRequest) -> OCSPResponse: request_list = tbs_request['request_list'] if len(request_list) < 1: logger.warning('Received OCSP request with no requests') - raise NotImplemented('Empty requests not supported') + raise NotImplementedError('Empty requests not supported') single_request = request_list[0] # TODO: Support more than one request req_cert = single_request['req_cert'] @@ -525,11 +515,7 @@ def _build_ocsp_response(self, ocsp_request: OCSPRequest) -> OCSPResponse: certificate_status_list = [(serial, certificate_status.value)] # Build the response - builder = OCSPResponseBuilder(**{ - 'response_status': ResponseStatus.successful.value, - 'certificate_status_list': certificate_status_list, - 'revocation_date': revocation_date, - }) + builder = OCSPResponseBuilder(response_status=ResponseStatus.successful.value, certificate_status_list=certificate_status_list, revocation_date=revocation_date) # Parse extensions for extension in tbs_request['request_extensions']: @@ -557,7 +543,7 @@ def _build_ocsp_response(self, ocsp_request: OCSPRequest) -> OCSPResponse: return self._fail(ResponseStatus.internal_error) # If it's an unknown non-critical extension, we can safely ignore it. - elif unknown is True: + if unknown is True: logger.info('Ignored unknown non-critical extension: %r', dict(extension.native)) # Set certificate issuer diff --git a/.evergreen/secrets_handling/setup_secrets.py b/.evergreen/secrets_handling/setup_secrets.py old mode 100644 new mode 100755 index 40515e931..880f3e095 --- a/.evergreen/secrets_handling/setup_secrets.py +++ b/.evergreen/secrets_handling/setup_secrets.py @@ -27,8 +27,7 @@ def get_secrets(vaults, region, profile): # This will only fail locally. resp = client.assume_role(RoleArn=AWS_ROLE_ARN, RoleSessionName=str(uuid.uuid4())) except Exception as e: - print(e) - raise ValueError("Please provide a profile (typically using AWS_PROFILE)") + raise ValueError("Please provide a profile (typically using AWS_PROFILE)") from e creds = resp['Credentials'] diff --git a/.evergreen/socks5srv.py b/.evergreen/socks5srv.py index 8adaff6d8..8b7fcb80c 100755 --- a/.evergreen/socks5srv.py +++ b/.evergreen/socks5srv.py @@ -34,7 +34,7 @@ def parse_single_mapping(string): match = re.match(full_re, string) if match is None: - raise Exception("Mapping {} does not match format '{{host}}:{{port}} to {{host}}:{{port}}'".format(string)) + raise Exception(f"Mapping {string} does not match format '{{host}}:{{port}} to {{host}}:{{port}}'") src = ((match.group('src_ipv6') or match.group('src_host')).encode('utf8'), int(match.group('src_port'))) dst = ((match.group('dst_ipv6') or match.group('dst_host')).encode('utf8'), int(match.group('dst_port'))) @@ -109,7 +109,7 @@ def read_exact(self, n): while bytes_read < n: try: chunk_length = self.request.recv_into(mv[bytes_read:]) - except OSError as exc: + except OSError: return None if chunk_length == 0: return None @@ -125,11 +125,11 @@ def create_outgoing_tcp_connection(self, dst, port): af, socktype, proto, canonname, sa = res try: outgoing = socket.socket(af, socktype, proto) - except OSError as msg: + except OSError: continue try: outgoing.connect(sa) - except OSError as msg: + except OSError: outgoing.close() continue break @@ -228,7 +228,7 @@ def raw_proxy(self, a, b): while True: try: (readable, _, _) = select.select([a, b], [], []) - except (select.error, ValueError): + except (OSError, ValueError): return if not readable: diff --git a/evergreen_config_generator/evergreen_config_generator/__init__.py b/evergreen_config_generator/evergreen_config_generator/__init__.py index 7d32e3ecf..51d9745d8 100644 --- a/evergreen_config_generator/evergreen_config_generator/__init__.py +++ b/evergreen_config_generator/evergreen_config_generator/__init__.py @@ -25,7 +25,7 @@ raise -class ConfigObject(object): +class ConfigObject: def __init__(self, *args, **kwargs): super(ConfigObject, self).__init__() @@ -60,7 +60,7 @@ def __init__(self, *args, **kwargs): type(self).represent_config_object) def represent_scalar(self, tag, value, style=None): - if isinstance(value, (str, unicode)) and '\n' in value: + if isinstance(value, str) and '\n' in value: style = '|' return super(_Dumper, self).represent_scalar(tag, value, style) diff --git a/evergreen_config_generator/evergreen_config_generator/tasks.py b/evergreen_config_generator/evergreen_config_generator/tasks.py index 04e1da479..e43afc653 100644 --- a/evergreen_config_generator/evergreen_config_generator/tasks.py +++ b/evergreen_config_generator/evergreen_config_generator/tasks.py @@ -18,7 +18,7 @@ try: # Python 3 abstract base classes. - import collections.abc as abc + from collections import abc except ImportError: import collections as abc diff --git a/ruff.toml b/ruff.toml new file mode 100644 index 000000000..5a1fb396b --- /dev/null +++ b/ruff.toml @@ -0,0 +1,43 @@ +target-version = "py38" + +exclude = [".evergreen/csfle/bottle.py"] + +[lint] +extend-select = [ + "B", # flake8-bugbear + "EXE", # flake8-executable + "F", # pyflakes + "FURB", # refurb + "I", # isort + "ICN", # flake8-import-conventions + "PGH", # pygrep-hooks + "PIE", # flake8-pie + "PL", # pylint + "PT", # flake8-pytest-style + "RET", # flake8-return + "RUF", # Ruff-specific + "UP", # pyupgrade + "YTT", # flake8-2020 +] +ignore = [ + "ISC001", # Conflicts with formatter + "PLR09", # Too many <...> + "PLR2004", # Magic value used in comparison + "UP008", # Use `super()` instead of `super(__class__, self)` + "ARG002", # Unused method argument: `kwargs` + "PTH123", # `open()` should be replaced by `Path.open()` + "B007", # Loop control variable `canonname` not used within loop body" + "UP031", # Use format specifiers instead of percent format + "PGH003", # Use specific rule codes when ignoring type issues" + "PLR1704", # Redefining argument with the local name" + "RUF012", # Mutable class attributes should be annotated with `typing.ClassVar` + "UP014", # Convert `DownloadableComponent` from `NamedTuple` functional to class syntax" + "RET503", # Missing explicit `return` at the end of function able to return non-`None` value + "E402", # Module level import not at top of file +] +unfixable = ["F401"] + +[lint.per-file-ignores] +".evergreen/ocsp/mock_ocsp_responder.py" = ["PLW"] +".evergreen/csfle/kms_*.py" = ["PLW"] +".evergreen/csfle/gcpkms/mock_server.py" = ["PLW"]