diff --git a/.github/workflows/basic_checks.yml b/.github/workflows/basic_checks.yml index ed18658ad3d..eecd993dda6 100644 --- a/.github/workflows/basic_checks.yml +++ b/.github/workflows/basic_checks.yml @@ -86,26 +86,25 @@ jobs: style-check: runs-on: ubuntu-latest - container: - image: ghcr.io/armmbed/mbed-os-env:master-latest - steps: - name: Checkout repo - uses: actions/checkout@v4 + uses: actions/checkout@v5 with: fetch-depth: 0 + + - name: Install Formatters + run: | + sudo apt-get update -y + sudo apt-get install astyle aspell - - - name: UTF-8 Check + - name: UTF-8 Check run: | git config --global --add safe.directory "$GITHUB_WORKSPACE" # Make sure we're not introducing any text which is not UTF-8 encoded git diff origin/${GITHUB_BASE_REF} -U0 | ( grep -a '^+' || true ) | ( ! grep -axv '.*' ) - - - - name: astyle checks + - name: astyle checks run: | set -x git config --global --add safe.directory "$GITHUB_WORKSPACE" @@ -134,11 +133,26 @@ jobs: tee BUILD/badlibs | sed -e "s/^/Bad library name found: /" && [ ! -s BUILD/badlibs ] # Assert that all assembler files are named correctly - # The strange command below asserts that there are exactly 0 libraries + # The strange command below asserts that there are exactly 0 files # that do end with .s find -name "*.s" | tee BUILD/badasm | sed -e "s/^/Bad Assembler file name found: /" && [ ! -s BUILD/badasm ] + # Set up the oldest python version that's currently still supported by Python (as of Fall 2025) + - uses: actions/setup-python@v6 + with: + python-version: '3.9' + + - name: Install Python packages + run: | + pip install -e ./tools[linters] + + - name: Check Python Formatting + run: | + cd tools + ruff format --diff + + docs-check: runs-on: ubuntu-latest @@ -162,46 +176,39 @@ jobs: python-tests: runs-on: ubuntu-latest steps: - - - name: Checkout repo - uses: actions/checkout@v4 - - - name: Install python3-venv - run: | - sudo apt-get update - sudo apt-get install -y python3-venv + - name: Checkout repo + uses: actions/checkout@v5 + + # Set up the oldest python version that's currently still supported by Python (as of Fall 2025) + - uses: actions/setup-python@v6 + with: + python-version: '3.9' - name: Install Python packages run: | - python3 -m venv venv - source venv/bin/activate pip install -e ./tools[unit-tests] - name: Python Tests run: | - source venv/bin/activate tools/run_python_tests.sh check-cmsis-mcu-descriptions-matches-target-list: runs-on: ubuntu-latest steps: - name: Checkout repo - uses: actions/checkout@v4 + uses: actions/checkout@v5 - - name: Install python3-venv - run: | - sudo apt-get update - sudo apt-get install -y python3-venv + # Set up the oldest python version that's currently still supported by Python (as of Fall 2025) + - uses: actions/setup-python@v6 + with: + python-version: '3.9' - name: Install Python packages run: | - python3 -m venv venv - source venv/bin/activate pip install -e ./tools - name: Verify that cmsis_mcu_descriptions.json5 is in sync with targets.json5 run: | - source venv/bin/activate cd tools/python mbed-tools cmsis-mcu-descr find-unused mbed-tools cmsis-mcu-descr check-missing @@ -214,7 +221,7 @@ jobs: steps: - name: Checkout repo - uses: actions/checkout@v4 + uses: actions/checkout@v5 with: fetch-depth: 0 diff --git a/tools/cmake/upload_methods/python_packagecheck.py b/tools/cmake/upload_methods/python_packagecheck.py index 422c6818358..1d114be3e9a 100644 --- a/tools/cmake/upload_methods/python_packagecheck.py +++ b/tools/cmake/upload_methods/python_packagecheck.py @@ -1,7 +1,7 @@ # Copyright (c) 2020 ARM Limited. All rights reserved. # SPDX-License-Identifier: Apache-2.0 -#file which is invoked by the cmake build system to check if all necessary python packages are installed. +# file which is invoked by the cmake build system to check if all necessary python packages are installed. import sys @@ -10,4 +10,4 @@ except ImportError: exit(1) -exit(0) \ No newline at end of file +exit(0) diff --git a/tools/debug_tools/crash_log_parser/crash_log_parser.py b/tools/debug_tools/crash_log_parser/crash_log_parser.py index 05e036ae057..48566085b52 100644 --- a/tools/debug_tools/crash_log_parser/crash_log_parser.py +++ b/tools/debug_tools/crash_log_parser/crash_log_parser.py @@ -25,68 +25,72 @@ from subprocess import check_output import sys -#arm-none-eabi-nm -nl +# arm-none-eabi-nm -nl _NM_EXEC = "arm-none-eabi-nm" _OPT = "-nlC" _PTN = re.compile("([0-9a-f]*) ([Tt]) ([^\t\n]*)(?:\t(.*):([0-9]*))?") + class ElfHelper(object): def __init__(self, elf_file, map_file): - - op = check_output([_NM_EXEC, _OPT, elf_file.name]).decode('utf-8') + op = check_output([_NM_EXEC, _OPT, elf_file.name]).decode("utf-8") self.maplines = map_file.readlines() self.matches = _PTN.findall(op) self.addrs = [int(x[0], 16) for x in self.matches] - + def function_addrs(self): return self.addrs - + def function_name_for_addr(self, addr): i = bisect.bisect_right(self.addrs, addr) - funcname = self.matches[i-1][2] + funcname = self.matches[i - 1][2] return funcname + def print_HFSR_info(hfsr): if int(hfsr, 16) & 0x80000000: print("\t\tDebug Event Occurred") if int(hfsr, 16) & 0x40000000: - print("\t\tForced exception, a fault with configurable priority has been escalated to HardFault") + print("\t\tForced exception, a fault with configurable priority has been escalated to HardFault") if int(hfsr, 16) & 0x2: - print("\t\tVector table read fault has occurred") + print("\t\tVector table read fault has occurred") -def print_MMFSR_info(mmfsr, mmfar): + +def print_MMFSR_info(mmfsr, mmfar): if int(mmfsr, 16) & 0x20: print("\t\tA MemManage fault occurred during FP lazy state preservation") if int(mmfsr, 16) & 0x10: - print("\t\tA derived MemManage fault occurred on exception entry") + print("\t\tA derived MemManage fault occurred on exception entry") if int(mmfsr, 16) & 0x8: - print("\t\tA derived MemManage fault occurred on exception return") + print("\t\tA derived MemManage fault occurred on exception return") if int(mmfsr, 16) & 0x2: if int(mmfsr, 16) & 0x80: - print("\t\tData access violation. Faulting address: %s"%(str(mmfar))) - else: - print("\t\tData access violation. WARNING: Fault address in MMFAR is NOT valid") + print("\t\tData access violation. Faulting address: %s" % (str(mmfar))) + else: + print("\t\tData access violation. WARNING: Fault address in MMFAR is NOT valid") if int(mmfsr, 16) & 0x1: - print("\t\tMPU or Execute Never (XN) default memory map access violation on an instruction fetch has occurred") - + print("\t\tMPU or Execute Never (XN) default memory map access violation on an instruction fetch has occurred") + + def print_BFSR_info(bfsr, bfar): if int(bfsr, 16) & 0x20: print("\t\tA bus fault occurred during FP lazy state preservation") if int(bfsr, 16) & 0x10: - print("\t\tA derived bus fault has occurred on exception entry") + print("\t\tA derived bus fault has occurred on exception entry") if int(bfsr, 16) & 0x8: - print("\t\tA derived bus fault has occurred on exception return") + print("\t\tA derived bus fault has occurred on exception return") if int(bfsr, 16) & 0x4: - print("\t\tImprecise data access error has occurred") + print("\t\tImprecise data access error has occurred") if int(bfsr, 16) & 0x2: - if int(bfsr,16) & 0x80: - print("\t\tA precise data access error has occurred. Faulting address: %s"%(str(bfar))) - else: - print("\t\tA precise data access error has occurred. WARNING: Fault address in BFAR is NOT valid") + if int(bfsr, 16) & 0x80: + print("\t\tA precise data access error has occurred. Faulting address: %s" % (str(bfar))) + else: + print("\t\tA precise data access error has occurred. WARNING: Fault address in BFAR is NOT valid") if int(bfsr, 16) & 0x1: - print("\t\tA bus fault on an instruction prefetch has occurred") + print("\t\tA bus fault on an instruction prefetch has occurred") + -def print_UFSR_info(ufsr): +def print_UFSR_info(ufsr): if int(ufsr, 16) & 0x200: print("\t\tDivide by zero error has occurred") if int(ufsr, 16) & 0x100: @@ -96,100 +100,120 @@ def print_UFSR_info(ufsr): if int(ufsr, 16) & 0x4: print("\t\tAn integrity check error has occurred on EXC_RETURN") if int(ufsr, 16) & 0x2: - print("\t\tInstruction executed with invalid EPSR.T or EPSR.IT field( This may be caused by Thumb bit not being set in branching instruction )") + print( + "\t\tInstruction executed with invalid EPSR.T or EPSR.IT field( This may be caused by Thumb bit not being set in branching instruction )" + ) if int(ufsr, 16) & 0x1: - print("\t\tThe processor has attempted to execute an undefined instruction") - -def print_CPUID_info(cpuid): + print("\t\tThe processor has attempted to execute an undefined instruction") + + +def print_CPUID_info(cpuid): if (int(cpuid, 16) & 0xF0000) == 0xC0000: print("\t\tProcessor Arch: ARM-V6M") - else: + else: print("\t\tProcessor Arch: ARM-V7M or above") - - print("\t\tProcessor Variant: %X" % ((int(cpuid,16) & 0xFFF0 ) >> 4)) + + print("\t\tProcessor Variant: %X" % ((int(cpuid, 16) & 0xFFF0) >> 4)) + def parse_line_for_register(line): _, register_val = line.split(":") - return register_val.strip() + return register_val.strip() + def main(crash_log, elfhelper): mmfar_val = 0 bfar_val = 0 - lines = iter(crash_log.read().decode('utf-8').splitlines()) + lines = iter(crash_log.read().decode("utf-8").splitlines()) for eachline in lines: if "++ MbedOS Fault Handler ++" in eachline: break else: - print("ERROR: Unable to find \"MbedOS Fault Handler\" header") + print('ERROR: Unable to find "MbedOS Fault Handler" header') return - + for eachline in lines: if "-- MbedOS Fault Handler --" in eachline: break - + elif eachline.startswith("PC"): pc_val = parse_line_for_register(eachline) if elfhelper: pc_name = elfhelper.function_name_for_addr(int(pc_val, 16)) else: pc_name = "" - + elif eachline.startswith("LR"): lr_val = parse_line_for_register(eachline) if elfhelper: lr_name = elfhelper.function_name_for_addr(int(lr_val, 16)) else: lr_name = "" - + elif eachline.startswith("SP"): sp_val = parse_line_for_register(eachline) - + elif eachline.startswith("HFSR"): hfsr_val = parse_line_for_register(eachline) - + elif eachline.startswith("MMFSR"): mmfsr_val = parse_line_for_register(eachline) - + elif eachline.startswith("BFSR"): bfsr_val = parse_line_for_register(eachline) - + elif eachline.startswith("UFSR"): ufsr_val = parse_line_for_register(eachline) - + elif eachline.startswith("CPUID"): cpuid_val = parse_line_for_register(eachline) - + elif eachline.startswith("MMFAR"): mmfar_val = parse_line_for_register(eachline) - + elif eachline.startswith("BFAR"): - bfar_val = parse_line_for_register(eachline) - - print("\nCrash Info:") + bfar_val = parse_line_for_register(eachline) + + print("\nCrash Info:") print("\tCrash location = %s [0x%s] (based on PC value)" % (pc_name.strip(), str(pc_val))) - print("\tCaller location = %s [0x%s] (based on LR value)" % (lr_name.strip(), str(lr_val))) + print("\tCaller location = %s [0x%s] (based on LR value)" % (lr_name.strip(), str(lr_val))) print("\tStack Pointer at the time of crash = [%s]" % (str(sp_val))) - + print("\tTarget and Fault Info:") print_CPUID_info(cpuid_val) print_HFSR_info(hfsr_val) print_MMFSR_info(mmfsr_val, mmfar_val) print_BFSR_info(bfsr_val, bfar_val) print_UFSR_info(ufsr_val) - - -if __name__ == '__main__': + + +if __name__ == "__main__": import argparse - - parser = argparse.ArgumentParser(description='Analyse mbed-os crash log. This tool requires arm-gcc binary utilities to be available in current path as it uses \'nm\' command') + + parser = argparse.ArgumentParser( + description="Analyse mbed-os crash log. This tool requires arm-gcc binary utilities to be available in current path as it uses 'nm' command" + ) # specify arguments - parser.add_argument(metavar='CRASH LOG', type=argparse.FileType('rb', 0), - dest='crashlog',help='path to crash log file') - parser.add_argument(metavar='ELF FILE', type=argparse.FileType('rb', 0), - nargs='?',const=None,dest='elffile',help='path to elf file') - parser.add_argument(metavar='MAP FILE', type=argparse.FileType('rb', 0), - nargs='?',const=None,dest='mapfile',help='path to map file') + parser.add_argument( + metavar="CRASH LOG", type=argparse.FileType("rb", 0), dest="crashlog", help="path to crash log file" + ) + parser.add_argument( + metavar="ELF FILE", + type=argparse.FileType("rb", 0), + nargs="?", + const=None, + dest="elffile", + help="path to elf file", + ) + parser.add_argument( + metavar="MAP FILE", + type=argparse.FileType("rb", 0), + nargs="?", + const=None, + dest="mapfile", + help="path to map file", + ) # get and validate arguments args = parser.parse_args() @@ -200,14 +224,13 @@ def main(crash_log, elfhelper): else: print("ELF or MAP file missing, logging raw values.") elfhelper = None - + # parse input and write to output main(args.crashlog, elfhelper) - - #close all files + + # close all files if args.elffile: args.elffile.close() if args.mapfile: args.mapfile.close() args.crashlog.close() - diff --git a/tools/psa/tfm/bin_utils/__init__.py b/tools/psa/tfm/bin_utils/__init__.py index 3aaffc48479..bf26e783976 100644 --- a/tools/psa/tfm/bin_utils/__init__.py +++ b/tools/psa/tfm/bin_utils/__init__.py @@ -16,6 +16,4 @@ from .assemble import Assembly -__all__ = [ - 'Assembly' -] +__all__ = ["Assembly"] diff --git a/tools/psa/tfm/bin_utils/assemble.py b/tools/psa/tfm/bin_utils/assemble.py index e6b56251893..85e425c47b5 100755 --- a/tools/psa/tfm/bin_utils/assemble.py +++ b/tools/psa/tfm/bin_utils/assemble.py @@ -32,7 +32,8 @@ offset_re = re.compile(r"^\s*RE_([0-9A-Z_]+)_IMAGE_OFFSET\s*=\s*(.*)") size_re = re.compile(r"^\s*RE_([0-9A-Z_]+)_IMAGE_MAX_SIZE\s*=\s*(.*)") -class Assembly(): + +class Assembly: def __init__(self, layout_path, output): self.output = output self.layout_path = layout_path @@ -50,40 +51,37 @@ def find_slots(self): offsets = macro_parser.evaluate_macro(self.layout_path, offset_re, 1, 2) sizes = macro_parser.evaluate_macro(self.layout_path, size_re, 1, 2) - if 'SECURE' not in offsets: + if "SECURE" not in offsets: raise Exception("Image config does not have secure partition") - if 'NON_SECURE' not in offsets: + if "NON_SECURE" not in offsets: raise Exception("Image config does not have non-secure partition") self.offsets = offsets self.sizes = sizes def add_image(self, source, partition): - with open(self.output, 'ab') as ofd: + with open(self.output, "ab") as ofd: ofd.seek(0, os.SEEK_END) pos = ofd.tell() if pos > self.offsets[partition]: raise Exception("Partitions not in order, unsupported") if pos < self.offsets[partition]: - ofd.write(b'\xFF' * (self.offsets[partition] - pos)) + ofd.write(b"\xff" * (self.offsets[partition] - pos)) statinfo = os.stat(source) if statinfo.st_size > self.sizes[partition]: raise Exception("Image {} is too large for partition".format(source)) - with open(source, 'rb') as rfd: + with open(source, "rb") as rfd: shutil.copyfileobj(rfd, ofd, 0x10000) + def main(): parser = argparse.ArgumentParser() - parser.add_argument('-l', '--layout', required=True, - help='Location of the file that contains preprocessed macros') - parser.add_argument('-s', '--secure', required=True, - help='Unsigned secure image') - parser.add_argument('-n', '--non_secure', - help='Unsigned non-secure image') - parser.add_argument('-o', '--output', required=True, - help='Filename to write full image to') + parser.add_argument("-l", "--layout", required=True, help="Location of the file that contains preprocessed macros") + parser.add_argument("-s", "--secure", required=True, help="Unsigned secure image") + parser.add_argument("-n", "--non_secure", help="Unsigned non-secure image") + parser.add_argument("-o", "--output", required=True, help="Filename to write full image to") args = parser.parse_args() output = Assembly(args.layout, args.output) @@ -91,5 +89,6 @@ def main(): output.add_image(args.secure, "SECURE") output.add_image(args.non_secure, "NON_SECURE") -if __name__ == '__main__': + +if __name__ == "__main__": main() diff --git a/tools/psa/tfm/bin_utils/imgtool.py b/tools/psa/tfm/bin_utils/imgtool.py index e29e224d8c1..c7b838510a5 100755 --- a/tools/psa/tfm/bin_utils/imgtool.py +++ b/tools/psa/tfm/bin_utils/imgtool.py @@ -18,5 +18,5 @@ from imgtool import main -if __name__ == '__main__': +if __name__ == "__main__": main.imgtool() diff --git a/tools/psa/tfm/bin_utils/imgtool/boot_record.py b/tools/psa/tfm/bin_utils/imgtool/boot_record.py index ac433aa099a..d1246faad68 100644 --- a/tools/psa/tfm/bin_utils/imgtool/boot_record.py +++ b/tools/psa/tfm/bin_utils/imgtool/boot_record.py @@ -24,6 +24,7 @@ class SwComponent(int, Enum): Software component property IDs specified by Arm's PSA Attestation API 1.0 document. """ + TYPE = 1 MEASUREMENT_VALUE = 2 VERSION = 4 @@ -31,9 +32,7 @@ class SwComponent(int, Enum): MEASUREMENT_DESCRIPTION = 6 -def create_sw_component_data(sw_type, sw_version, sw_measurement_description, - sw_measurement_value, sw_signer_id): - +def create_sw_component_data(sw_type, sw_version, sw_measurement_description, sw_measurement_value, sw_signer_id): # List of software component properties (Key ID + value) properties = { SwComponent.TYPE: sw_type, diff --git a/tools/psa/tfm/bin_utils/imgtool/image.py b/tools/psa/tfm/bin_utils/imgtool/image.py index 684c6b354a3..6f17504ede4 100644 --- a/tools/psa/tfm/bin_utils/imgtool/image.py +++ b/tools/psa/tfm/bin_utils/imgtool/image.py @@ -38,7 +38,7 @@ from cryptography.hazmat.primitives import hashes, hmac from cryptography.exceptions import InvalidSignature -IMAGE_MAGIC = 0x96f3b83d +IMAGE_MAGIC = 0x96F3B83D IMAGE_HEADER_SIZE = 32 BIN_EXT = "bin" INTEL_HEX_EXT = "hex" @@ -50,30 +50,30 @@ # Image header flags. IMAGE_F = { - 'PIC': 0x0000001, - 'ENCRYPTED_AES128': 0x0000004, - 'ENCRYPTED_AES256': 0x0000008, - 'NON_BOOTABLE': 0x0000010, - 'RAM_LOAD': 0x0000020, - 'ROM_FIXED': 0x0000100, + "PIC": 0x0000001, + "ENCRYPTED_AES128": 0x0000004, + "ENCRYPTED_AES256": 0x0000008, + "NON_BOOTABLE": 0x0000010, + "RAM_LOAD": 0x0000020, + "ROM_FIXED": 0x0000100, } TLV_VALUES = { - 'KEYHASH': 0x01, - 'PUBKEY': 0x02, - 'SHA256': 0x10, - 'RSA2048': 0x20, - 'ECDSA224': 0x21, - 'ECDSA256': 0x22, - 'RSA3072': 0x23, - 'ED25519': 0x24, - 'ENCRSA2048': 0x30, - 'ENCKW': 0x31, - 'ENCEC256': 0x32, - 'ENCX25519': 0x33, - 'DEPENDENCY': 0x40, - 'SEC_CNT': 0x50, - 'BOOT_RECORD': 0x60, + "KEYHASH": 0x01, + "PUBKEY": 0x02, + "SHA256": 0x10, + "RSA2048": 0x20, + "ECDSA224": 0x21, + "ECDSA256": 0x22, + "RSA3072": 0x23, + "ED25519": 0x24, + "ENCRSA2048": 0x30, + "ENCKW": 0x31, + "ENCEC256": 0x32, + "ENCX25519": 0x33, + "DEPENDENCY": 0x40, + "SEC_CNT": 0x50, + "BOOT_RECORD": 0x60, } TLV_SIZE = 4 @@ -81,25 +81,20 @@ TLV_INFO_MAGIC = 0x6907 TLV_PROT_INFO_MAGIC = 0x6908 -boot_magic = bytes([ - 0x77, 0xc2, 0x95, 0xf3, - 0x60, 0xd2, 0xef, 0x7f, - 0x35, 0x52, 0x50, 0x0f, - 0x2c, 0xb6, 0x79, 0x80, ]) +boot_magic = bytes([0x77, 0xC2, 0x95, 0xF3, 0x60, 0xD2, 0xEF, 0x7F, 0x35, 0x52, 0x50, 0x0F, 0x2C, 0xB6, 0x79, 0x80]) -STRUCT_ENDIAN_DICT = { - 'little': '<', - 'big': '>' -} +STRUCT_ENDIAN_DICT = {"little": "<", "big": ">"} -VerifyResult = Enum('VerifyResult', - """ +VerifyResult = Enum( + "VerifyResult", + """ OK INVALID_MAGIC INVALID_TLV_INFO_MAGIC INVALID_HASH INVALID_SIGNATURE - """) + """, +) -class TLV(): +class TLV: def __init__(self, endian, magic=TLV_INFO_MAGIC): self.magic = magic self.buf = bytearray() @@ -114,9 +109,9 @@ def add(self, kind, payload): """ e = STRUCT_ENDIAN_DICT[self.endian] if isinstance(kind, int): - buf = struct.pack(e + 'BBH', kind, 0, len(payload)) + buf = struct.pack(e + "BBH", kind, 0, len(payload)) else: - buf = struct.pack(e + 'BBH', TLV_VALUES[kind], 0, len(payload)) + buf = struct.pack(e + "BBH", TLV_VALUES[kind], 0, len(payload)) self.buf += buf self.buf += payload @@ -124,19 +119,29 @@ def get(self): if len(self.buf) == 0: return bytes() e = STRUCT_ENDIAN_DICT[self.endian] - header = struct.pack(e + 'HH', self.magic, len(self)) + header = struct.pack(e + "HH", self.magic, len(self)) return header + bytes(self.buf) -class Image(): - - def __init__(self, version=None, header_size=IMAGE_HEADER_SIZE, - pad_header=False, pad=False, confirm=False, align=1, - slot_size=0, max_sectors=DEFAULT_MAX_SECTORS, - overwrite_only=False, endian="little", load_addr=0, - rom_fixed=None, erased_val=None, save_enctlv=False, - security_counter=None): - +class Image: + def __init__( + self, + version=None, + header_size=IMAGE_HEADER_SIZE, + pad_header=False, + pad=False, + confirm=False, + align=1, + slot_size=0, + max_sectors=DEFAULT_MAX_SECTORS, + overwrite_only=False, + endian="little", + load_addr=0, + rom_fixed=None, + erased_val=None, + save_enctlv=False, + security_counter=None, + ): if load_addr and rom_fixed: raise click.UsageError("Can not set rom_fixed and load_addr at the same time") @@ -153,18 +158,16 @@ def __init__(self, version=None, header_size=IMAGE_HEADER_SIZE, self.base_addr = None self.load_addr = 0 if load_addr is None else load_addr self.rom_fixed = rom_fixed - self.erased_val = 0xff if erased_val is None else int(erased_val, 0) + self.erased_val = 0xFF if erased_val is None else int(erased_val, 0) self.payload = [] self.enckey = None self.save_enctlv = save_enctlv self.enctlv_len = 0 - if security_counter == 'auto': + if security_counter == "auto": # Security counter has not been explicitly provided, # generate it from the version number - self.security_counter = ((self.version.major << 24) - + (self.version.minor << 16) - + self.version.revision) + self.security_counter = (self.version.major << 24) + (self.version.minor << 16) + self.version.revision else: self.security_counter = security_counter @@ -173,18 +176,19 @@ def __repr__(self): base_addr={}, load_addr={}, align={}, slot_size={}, \ max_sectors={}, overwrite_only={}, endian={} format={}, \ payloadlen=0x{:x}>".format( - self.version, - self.header_size, - self.security_counter, - self.base_addr if self.base_addr is not None else "N/A", - self.load_addr, - self.align, - self.slot_size, - self.max_sectors, - self.overwrite_only, - self.endian, - self.__class__.__name__, - len(self.payload)) + self.version, + self.header_size, + self.security_counter, + self.base_addr if self.base_addr is not None else "N/A", + self.load_addr, + self.align, + self.slot_size, + self.max_sectors, + self.overwrite_only, + self.endian, + self.__class__.__name__, + len(self.payload), + ) def load(self, path): """Load an image from a given file""" @@ -195,7 +199,7 @@ def load(self, path): self.payload = ih.tobinarray() self.base_addr = ih.minaddr() else: - with open(path, 'rb') as f: + with open(path, "rb") as f: self.payload = f.read() except FileNotFoundError: raise click.UsageError("Input file not found") @@ -205,8 +209,7 @@ def load(self, path): if self.base_addr: # Adjust base_addr for new header self.base_addr -= self.header_size - self.payload = bytes([self.erased_val] * self.header_size) + \ - self.payload + self.payload = bytes([self.erased_val] * self.header_size) + self.payload self.check_header() @@ -216,48 +219,43 @@ def save(self, path, hex_addr=None): if ext == INTEL_HEX_EXT: # input was in binary format, but HEX needs to know the base addr if self.base_addr is None and hex_addr is None: - raise click.UsageError("No address exists in input file " - "neither was it provided by user") + raise click.UsageError("No address exists in input file neither was it provided by user") h = IntelHex() if hex_addr is not None: self.base_addr = hex_addr h.frombytes(bytes=self.payload, offset=self.base_addr) if self.pad: - trailer_size = self._trailer_size(self.align, self.max_sectors, - self.overwrite_only, - self.enckey, - self.save_enctlv, - self.enctlv_len) + trailer_size = self._trailer_size( + self.align, self.max_sectors, self.overwrite_only, self.enckey, self.save_enctlv, self.enctlv_len + ) trailer_addr = (self.base_addr + self.slot_size) - trailer_size - padding = bytearray([self.erased_val] * - (trailer_size - len(boot_magic))) + padding = bytearray([self.erased_val] * (trailer_size - len(boot_magic))) if self.confirm and not self.overwrite_only: padding[-MAX_ALIGN] = 0x01 # image_ok = 0x01 padding += boot_magic h.puts(trailer_addr, bytes(padding)) - h.tofile(path, 'hex') + h.tofile(path, "hex") else: if self.pad: self.pad_to(self.slot_size) - with open(path, 'wb') as f: + with open(path, "wb") as f: f.write(self.payload) def check_header(self): if self.header_size > 0 and not self.pad_header: - if any(v != 0 for v in self.payload[0:self.header_size]): - raise click.UsageError("Header padding was not requested and " - "image does not start with zeros") + if any(v != 0 for v in self.payload[0 : self.header_size]): + raise click.UsageError("Header padding was not requested and image does not start with zeros") def check_trailer(self): if self.slot_size > 0: - tsize = self._trailer_size(self.align, self.max_sectors, - self.overwrite_only, self.enckey, - self.save_enctlv, self.enctlv_len) + tsize = self._trailer_size( + self.align, self.max_sectors, self.overwrite_only, self.enckey, self.save_enctlv, self.enctlv_len + ) padding = self.slot_size - (len(self.payload) + tsize) if padding < 0: - msg = "Image size (0x{:x}) + trailer (0x{:x}) exceeds " \ - "requested size 0x{:x}".format( - len(self.payload), tsize, self.slot_size) + msg = "Image size (0x{:x}) + trailer (0x{:x}) exceeds requested size 0x{:x}".format( + len(self.payload), tsize, self.slot_size + ) raise click.UsageError(msg) def ecies_hkdf(self, enckey, plainkey): @@ -268,28 +266,24 @@ def ecies_hkdf(self, enckey, plainkey): newpk = X25519PrivateKey.generate() shared = newpk.exchange(enckey._get_public()) derived_key = HKDF( - algorithm=hashes.SHA256(), length=48, salt=None, - info=b'MCUBoot_ECIES_v1', backend=default_backend()).derive(shared) - encryptor = Cipher(algorithms.AES(derived_key[:16]), - modes.CTR(bytes([0] * 16)), - backend=default_backend()).encryptor() + algorithm=hashes.SHA256(), length=48, salt=None, info=b"MCUBoot_ECIES_v1", backend=default_backend() + ).derive(shared) + encryptor = Cipher( + algorithms.AES(derived_key[:16]), modes.CTR(bytes([0] * 16)), backend=default_backend() + ).encryptor() cipherkey = encryptor.update(plainkey) + encryptor.finalize() - mac = hmac.HMAC(derived_key[16:], hashes.SHA256(), - backend=default_backend()) + mac = hmac.HMAC(derived_key[16:], hashes.SHA256(), backend=default_backend()) mac.update(cipherkey) ciphermac = mac.finalize() if isinstance(enckey, ecdsa.ECDSA256P1Public): - pubk = newpk.public_key().public_bytes( - encoding=Encoding.X962, - format=PublicFormat.UncompressedPoint) + pubk = newpk.public_key().public_bytes(encoding=Encoding.X962, format=PublicFormat.UncompressedPoint) else: - pubk = newpk.public_key().public_bytes( - encoding=Encoding.Raw, - format=PublicFormat.Raw) + pubk = newpk.public_key().public_bytes(encoding=Encoding.Raw, format=PublicFormat.Raw) return cipherkey, ciphermac, pubk - def create(self, key, public_key_format, enckey, dependencies=None, - sw_type=None, custom_tlvs=None, encrypt_keylen=128): + def create( + self, key, public_key_format, enckey, dependencies=None, sw_type=None, custom_tlvs=None, encrypt_keylen=128 + ): self.enckey = enckey # Calculate the hash of the public key @@ -310,14 +304,14 @@ def create(self, key, public_key_format, enckey, dependencies=None, if sw_type is not None: if len(sw_type) > MAX_SW_TYPE_LENGTH: - msg = "'{}' is too long ({} characters) for sw_type. Its " \ - "maximum allowed length is 12 characters.".format( - sw_type, len(sw_type)) + msg = ( + "'{}' is too long ({} characters) for sw_type. Its maximum allowed length is 12 characters.".format( + sw_type, len(sw_type) + ) + ) raise click.UsageError(msg) - image_version = (str(self.version.major) + '.' - + str(self.version.minor) + '.' - + str(self.version.revision)) + image_version = str(self.version.major) + "." + str(self.version.minor) + "." + str(self.version.revision) # The image hash is computed over the image header, the image # itself and the protected TLV area. However, the boot record TLV @@ -328,9 +322,7 @@ def create(self, key, public_key_format, enckey, dependencies=None, digest = bytes(hashlib.sha256().digest_size) # Create CBOR encoded boot record - boot_record = create_sw_component_data(sw_type, image_version, - "SHA256", digest, - pubbytes) + boot_record = create_sw_component_data(sw_type, image_version, "SHA256", digest, pubbytes) protected_tlv_size += TLV_SIZE + len(boot_record) @@ -338,7 +330,7 @@ def create(self, key, public_key_format, enckey, dependencies=None, # Size of a Dependency TLV = Header ('HH') + Payload('IBBHI') # = 4 + 12 = 16 Bytes dependencies_num = len(dependencies[DEP_IMAGES_KEY]) - protected_tlv_size += (dependencies_num * 16) + protected_tlv_size += dependencies_num * 16 if custom_tlvs is not None: for value in custom_tlvs.values(): @@ -373,27 +365,26 @@ def create(self, key, public_key_format, enckey, dependencies=None, # in the hash calculation protected_tlv_off = None if protected_tlv_size != 0: - e = STRUCT_ENDIAN_DICT[self.endian] if self.security_counter is not None: - payload = struct.pack(e + 'I', self.security_counter) - prot_tlv.add('SEC_CNT', payload) + payload = struct.pack(e + "I", self.security_counter) + prot_tlv.add("SEC_CNT", payload) if sw_type is not None: - prot_tlv.add('BOOT_RECORD', boot_record) + prot_tlv.add("BOOT_RECORD", boot_record) if dependencies is not None: for i in range(dependencies_num): payload = struct.pack( - e + 'B3x'+'BBHI', - int(dependencies[DEP_IMAGES_KEY][i]), - dependencies[DEP_VERSIONS_KEY][i].major, - dependencies[DEP_VERSIONS_KEY][i].minor, - dependencies[DEP_VERSIONS_KEY][i].revision, - dependencies[DEP_VERSIONS_KEY][i].build - ) - prot_tlv.add('DEPENDENCY', payload) + e + "B3x" + "BBHI", + int(dependencies[DEP_IMAGES_KEY][i]), + dependencies[DEP_VERSIONS_KEY][i].major, + dependencies[DEP_VERSIONS_KEY][i].minor, + dependencies[DEP_VERSIONS_KEY][i].revision, + dependencies[DEP_VERSIONS_KEY][i].build, + ) + prot_tlv.add("DEPENDENCY", payload) if custom_tlvs is not None: for tag, value in custom_tlvs.items(): @@ -410,18 +401,18 @@ def create(self, key, public_key_format, enckey, dependencies=None, sha.update(self.payload) digest = sha.digest() - tlv.add('SHA256', digest) + tlv.add("SHA256", digest) if key is not None: - if public_key_format == 'hash': - tlv.add('KEYHASH', pubbytes) + if public_key_format == "hash": + tlv.add("KEYHASH", pubbytes) else: - tlv.add('PUBKEY', pub) + tlv.add("PUBKEY", pub) # `sign` expects the full image payload (sha256 done internally), # while `sign_digest` expects only the digest of the payload - if hasattr(key, 'sign'): + if hasattr(key, "sign"): sig = key.sign(bytes(self.payload)) else: sig = key.sign_digest(digest) @@ -440,29 +431,25 @@ def create(self, key, public_key_format, enckey, dependencies=None, if isinstance(enckey, rsa.RSAPublic): cipherkey = enckey._get_public().encrypt( - plainkey, padding.OAEP( - mgf=padding.MGF1(algorithm=hashes.SHA256()), - algorithm=hashes.SHA256(), - label=None)) + plainkey, + padding.OAEP(mgf=padding.MGF1(algorithm=hashes.SHA256()), algorithm=hashes.SHA256(), label=None), + ) self.enctlv_len = len(cipherkey) - tlv.add('ENCRSA2048', cipherkey) - elif isinstance(enckey, (ecdsa.ECDSA256P1Public, - x25519.X25519Public)): + tlv.add("ENCRSA2048", cipherkey) + elif isinstance(enckey, (ecdsa.ECDSA256P1Public, x25519.X25519Public)): cipherkey, mac, pubk = self.ecies_hkdf(enckey, plainkey) enctlv = pubk + mac + cipherkey self.enctlv_len = len(enctlv) if isinstance(enckey, ecdsa.ECDSA256P1Public): - tlv.add('ENCEC256', enctlv) + tlv.add("ENCEC256", enctlv) else: - tlv.add('ENCX25519', enctlv) + tlv.add("ENCX25519", enctlv) nonce = bytes([0] * 16) - cipher = Cipher(algorithms.AES(plainkey), modes.CTR(nonce), - backend=default_backend()) + cipher = Cipher(algorithms.AES(plainkey), modes.CTR(nonce), backend=default_backend()) encryptor = cipher.encryptor() - img = bytes(self.payload[self.header_size:]) - self.payload[self.header_size:] = \ - encryptor.update(img) + encryptor.finalize() + img = bytes(self.payload[self.header_size :]) + self.payload[self.header_size :] = encryptor.update(img) + encryptor.finalize() self.payload += prot_tlv.get() self.payload += tlv.get() @@ -475,54 +462,56 @@ def add_header(self, enckey, protected_tlv_size, aes_length=128): flags = 0 if enckey is not None: if aes_length == 128: - flags |= IMAGE_F['ENCRYPTED_AES128'] + flags |= IMAGE_F["ENCRYPTED_AES128"] else: - flags |= IMAGE_F['ENCRYPTED_AES256'] + flags |= IMAGE_F["ENCRYPTED_AES256"] if self.load_addr != 0: # Indicates that this image should be loaded into RAM # instead of run directly from flash. - flags |= IMAGE_F['RAM_LOAD'] + flags |= IMAGE_F["RAM_LOAD"] if self.rom_fixed: - flags |= IMAGE_F['ROM_FIXED'] + flags |= IMAGE_F["ROM_FIXED"] e = STRUCT_ENDIAN_DICT[self.endian] - fmt = (e + - # type ImageHdr struct { - 'I' + # Magic uint32 - 'I' + # LoadAddr uint32 - 'H' + # HdrSz uint16 - 'H' + # PTLVSz uint16 - 'I' + # ImgSz uint32 - 'I' + # Flags uint32 - 'BBHI' + # Vers ImageVersion - 'I' # Pad1 uint32 - ) # } + fmt = ( + e + + + # type ImageHdr struct { + "I" # Magic uint32 + + "I" # LoadAddr uint32 + + "H" # HdrSz uint16 + + "H" # PTLVSz uint16 + + "I" # ImgSz uint32 + + "I" # Flags uint32 + + "BBHI" # Vers ImageVersion + + "I" # Pad1 uint32 + ) # } assert struct.calcsize(fmt) == IMAGE_HEADER_SIZE - header = struct.pack(fmt, - IMAGE_MAGIC, - self.rom_fixed or self.load_addr, - self.header_size, - protected_tlv_size, # TLV Info header + Protected TLVs - len(self.payload) - self.header_size, # ImageSz - flags, - self.version.major, - self.version.minor or 0, - self.version.revision or 0, - self.version.build or 0, - 0) # Pad1 + header = struct.pack( + fmt, + IMAGE_MAGIC, + self.rom_fixed or self.load_addr, + self.header_size, + protected_tlv_size, # TLV Info header + Protected TLVs + len(self.payload) - self.header_size, # ImageSz + flags, + self.version.major, + self.version.minor or 0, + self.version.revision or 0, + self.version.build or 0, + 0, + ) # Pad1 self.payload = bytearray(self.payload) - self.payload[:len(header)] = header + self.payload[: len(header)] = header - def _trailer_size(self, write_size, max_sectors, overwrite_only, enckey, - save_enctlv, enctlv_len): + def _trailer_size(self, write_size, max_sectors, overwrite_only, enckey, save_enctlv, enctlv_len): # NOTE: should already be checked by the argument parser magic_size = 16 if overwrite_only: return MAX_ALIGN * 2 + magic_size else: if write_size not in set([1, 2, 4, 8]): - raise click.BadParameter("Invalid alignment: {}".format( - write_size)) + raise click.BadParameter("Invalid alignment: {}".format(write_size)) m = DEFAULT_MAX_SECTORS if max_sectors is None else max_sectors trailer = m * 3 * write_size # status area if enckey is not None: @@ -538,9 +527,9 @@ def _trailer_size(self, write_size, max_sectors, overwrite_only, enckey, def pad_to(self, size): """Pad the image to the given size, with the given flash alignment.""" - tsize = self._trailer_size(self.align, self.max_sectors, - self.overwrite_only, self.enckey, - self.save_enctlv, self.enctlv_len) + tsize = self._trailer_size( + self.align, self.max_sectors, self.overwrite_only, self.enckey, self.save_enctlv, self.enctlv_len + ) padding = size - (len(self.payload) + tsize) pbytes = bytearray([self.erased_val] * padding) pbytes += bytearray([self.erased_val] * (tsize - len(boot_magic))) @@ -554,19 +543,19 @@ def verify(imgfile, key): with open(imgfile, "rb") as f: b = f.read() - magic, _, header_size, _, img_size = struct.unpack('IIHHI', b[:16]) - version = struct.unpack('BBHI', b[20:28]) + magic, _, header_size, _, img_size = struct.unpack("IIHHI", b[:16]) + version = struct.unpack("BBHI", b[20:28]) if magic != IMAGE_MAGIC: return VerifyResult.INVALID_MAGIC, None, None tlv_off = header_size + img_size - tlv_info = b[tlv_off:tlv_off+TLV_INFO_SIZE] - magic, tlv_tot = struct.unpack('HH', tlv_info) + tlv_info = b[tlv_off : tlv_off + TLV_INFO_SIZE] + magic, tlv_tot = struct.unpack("HH", tlv_info) if magic == TLV_PROT_INFO_MAGIC: tlv_off += tlv_tot - tlv_info = b[tlv_off:tlv_off+TLV_INFO_SIZE] - magic, tlv_tot = struct.unpack('HH', tlv_info) + tlv_info = b[tlv_off : tlv_off + TLV_INFO_SIZE] + magic, tlv_tot = struct.unpack("HH", tlv_info) if magic != TLV_INFO_MAGIC: return VerifyResult.INVALID_TLV_INFO_MAGIC, None, None @@ -579,21 +568,21 @@ def verify(imgfile, key): tlv_end = tlv_off + tlv_tot tlv_off += TLV_INFO_SIZE # skip tlv info while tlv_off < tlv_end: - tlv = b[tlv_off:tlv_off+TLV_SIZE] - tlv_type, _, tlv_len = struct.unpack('BBH', tlv) + tlv = b[tlv_off : tlv_off + TLV_SIZE] + tlv_type, _, tlv_len = struct.unpack("BBH", tlv) if tlv_type == TLV_VALUES["SHA256"]: off = tlv_off + TLV_SIZE - if digest == b[off:off+tlv_len]: + if digest == b[off : off + tlv_len]: if key is None: return VerifyResult.OK, version, digest else: return VerifyResult.INVALID_HASH, None, None elif key is not None and tlv_type == TLV_VALUES[key.sig_tlv()]: off = tlv_off + TLV_SIZE - tlv_sig = b[off:off+tlv_len] + tlv_sig = b[off : off + tlv_len] payload = b[:prot_tlv_size] try: - if hasattr(key, 'verify'): + if hasattr(key, "verify"): key.verify(tlv_sig, payload) else: key.verify_digest(tlv_sig, digest) diff --git a/tools/psa/tfm/bin_utils/imgtool/keys/__init__.py b/tools/psa/tfm/bin_utils/imgtool/keys/__init__.py index dfd101dd660..2fad2de2873 100644 --- a/tools/psa/tfm/bin_utils/imgtool/keys/__init__.py +++ b/tools/psa/tfm/bin_utils/imgtool/keys/__init__.py @@ -20,14 +20,10 @@ from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import serialization -from cryptography.hazmat.primitives.asymmetric.rsa import ( - RSAPrivateKey, RSAPublicKey) -from cryptography.hazmat.primitives.asymmetric.ec import ( - EllipticCurvePrivateKey, EllipticCurvePublicKey) -from cryptography.hazmat.primitives.asymmetric.ed25519 import ( - Ed25519PrivateKey, Ed25519PublicKey) -from cryptography.hazmat.primitives.asymmetric.x25519 import ( - X25519PrivateKey, X25519PublicKey) +from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey, RSAPublicKey +from cryptography.hazmat.primitives.asymmetric.ec import EllipticCurvePrivateKey, EllipticCurvePublicKey +from cryptography.hazmat.primitives.asymmetric.ed25519 import Ed25519PrivateKey, Ed25519PublicKey +from cryptography.hazmat.primitives.asymmetric.x25519 import X25519PrivateKey, X25519PublicKey from .rsa import RSA, RSAPublic, RSAUsageError, RSA_KEY_SIZES from .ecdsa import ECDSA256P1, ECDSA256P1Public, ECDSAUsageError @@ -38,18 +34,16 @@ class PasswordRequired(Exception): """Raised to indicate that the key is password protected, but a password was not specified.""" + pass def load(path, passwd=None): """Try loading a key from the given path. Returns None if the password wasn't specified.""" - with open(path, 'rb') as f: + with open(path, "rb") as f: raw_pem = f.read() try: - pk = serialization.load_pem_private_key( - raw_pem, - password=passwd, - backend=default_backend()) + pk = serialization.load_pem_private_key(raw_pem, password=passwd, backend=default_backend()) # Unfortunately, the crypto library raises unhelpful exceptions, # so we have to look at the text. except TypeError as e: @@ -60,9 +54,7 @@ def load(path, passwd=None): except ValueError: # This seems to happen if the key is a public key, let's try # loading it as a public key. - pk = serialization.load_pem_public_key( - raw_pem, - backend=default_backend()) + pk = serialization.load_pem_public_key(raw_pem, backend=default_backend()) if isinstance(pk, RSAPrivateKey): if pk.key_size not in RSA_KEY_SIZES: @@ -73,13 +65,13 @@ def load(path, passwd=None): raise Exception("Unsupported RSA key size: " + pk.key_size) return RSAPublic(pk) elif isinstance(pk, EllipticCurvePrivateKey): - if pk.curve.name != 'secp256r1': + if pk.curve.name != "secp256r1": raise Exception("Unsupported EC curve: " + pk.curve.name) if pk.key_size != 256: raise Exception("Unsupported EC size: " + pk.key_size) return ECDSA256P1(pk) elif isinstance(pk, EllipticCurvePublicKey): - if pk.curve.name != 'secp256r1': + if pk.curve.name != "secp256r1": raise Exception("Unsupported EC curve: " + pk.curve.name) if pk.key_size != 256: raise Exception("Unsupported EC size: " + pk.key_size) diff --git a/tools/psa/tfm/bin_utils/imgtool/keys/ecdsa.py b/tools/psa/tfm/bin_utils/imgtool/keys/ecdsa.py index 56222d8f27e..a2e294b6366 100644 --- a/tools/psa/tfm/bin_utils/imgtool/keys/ecdsa.py +++ b/tools/psa/tfm/bin_utils/imgtool/keys/ecdsa.py @@ -26,9 +26,11 @@ from .general import KeyClass + class ECDSAUsageError(Exception): pass + class ECDSA256P1Public(KeyClass): def __init__(self, key): self.key = key @@ -45,21 +47,21 @@ def _get_public(self): def get_public_bytes(self): # The key is embedded into MBUboot in "SubjectPublicKeyInfo" format return self._get_public().public_bytes( - encoding=serialization.Encoding.DER, - format=serialization.PublicFormat.SubjectPublicKeyInfo) + encoding=serialization.Encoding.DER, format=serialization.PublicFormat.SubjectPublicKeyInfo + ) def get_private_bytes(self, minimal): - self._unsupported('get_private_bytes') + self._unsupported("get_private_bytes") def export_private(self, path, passwd=None): - self._unsupported('export_private') + self._unsupported("export_private") def export_public(self, path): """Write the public key to the given file.""" pem = self._get_public().public_bytes( - encoding=serialization.Encoding.PEM, - format=serialization.PublicFormat.SubjectPublicKeyInfo) - with open(path, 'wb') as f: + encoding=serialization.Encoding.PEM, format=serialization.PublicFormat.SubjectPublicKeyInfo + ) + with open(path, "wb") as f: f.write(pem) def sig_type(self): @@ -84,12 +86,11 @@ def sig_len(self): def verify(self, signature, payload): # strip possible paddings added during sign - signature = signature[:signature[1] + 2] + signature = signature[: signature[1] + 2] k = self.key if isinstance(self.key, ec.EllipticCurvePrivateKey): k = self.key.public_key() - return k.verify(signature=signature, data=payload, - signature_algorithm=ec.ECDSA(SHA256())) + return k.verify(signature=signature, data=payload, signature_algorithm=ec.ECDSA(SHA256())) class ECDSA256P1(ECDSA256P1Public): @@ -104,22 +105,20 @@ def __init__(self, key): @staticmethod def generate(): - pk = ec.generate_private_key( - ec.SECP256R1(), - backend=default_backend()) + pk = ec.generate_private_key(ec.SECP256R1(), backend=default_backend()) return ECDSA256P1(pk) def _get_public(self): return self.key.public_key() def _build_minimal_ecdsa_privkey(self, der): - ''' + """ Builds a new DER that only includes the EC private key, removing the public key that is added as an "optional" BITSTRING. - ''' + """ offset_PUB = 68 EXCEPTION_TEXT = "Error parsing ecdsa key. Please submit an issue!" - if der[offset_PUB] != 0xa1: + if der[offset_PUB] != 0xA1: raise ECDSAUsageError(EXCEPTION_TEXT) len_PUB = der[offset_PUB + 1] b = bytearray(der[:-offset_PUB]) @@ -138,9 +137,10 @@ def _build_minimal_ecdsa_privkey(self, der): def get_private_bytes(self, minimal): priv = self.key.private_bytes( - encoding=serialization.Encoding.DER, - format=serialization.PrivateFormat.PKCS8, - encryption_algorithm=serialization.NoEncryption()) + encoding=serialization.Encoding.DER, + format=serialization.PrivateFormat.PKCS8, + encryption_algorithm=serialization.NoEncryption(), + ) if minimal: priv = self._build_minimal_ecdsa_privkey(priv) return priv @@ -152,23 +152,20 @@ def export_private(self, path, passwd=None): else: enc = serialization.BestAvailableEncryption(passwd) pem = self.key.private_bytes( - encoding=serialization.Encoding.PEM, - format=serialization.PrivateFormat.PKCS8, - encryption_algorithm=enc) - with open(path, 'wb') as f: + encoding=serialization.Encoding.PEM, format=serialization.PrivateFormat.PKCS8, encryption_algorithm=enc + ) + with open(path, "wb") as f: f.write(pem) def raw_sign(self, payload): """Return the actual signature""" - return self.key.sign( - data=payload, - signature_algorithm=ec.ECDSA(SHA256())) + return self.key.sign(data=payload, signature_algorithm=ec.ECDSA(SHA256())) def sign(self, payload): sig = self.raw_sign(payload) if self.pad_sig: # To make fixed length, pad with one or two zeros. - sig += b'\000' * (self.sig_len() - len(sig)) + sig += b"\000" * (self.sig_len() - len(sig)) return sig else: return sig diff --git a/tools/psa/tfm/bin_utils/imgtool/keys/ecdsa_test.py b/tools/psa/tfm/bin_utils/imgtool/keys/ecdsa_test.py index 021f04073f7..1555f1fd05b 100644 --- a/tools/psa/tfm/bin_utils/imgtool/keys/ecdsa_test.py +++ b/tools/psa/tfm/bin_utils/imgtool/keys/ecdsa_test.py @@ -29,12 +29,12 @@ from cryptography.hazmat.primitives.asymmetric import ec from cryptography.hazmat.primitives.hashes import SHA256 -sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '../..'))) +sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "../.."))) from imgtool.keys import load, ECDSA256P1, ECDSAUsageError -class EcKeyGeneration(unittest.TestCase): +class EcKeyGeneration(unittest.TestCase): def setUp(self): self.test_dir = tempfile.TemporaryDirectory() @@ -47,21 +47,20 @@ def tearDown(self): def test_keygen(self): name1 = self.tname("keygen.pem") k = ECDSA256P1.generate() - k.export_private(name1, b'secret') + k.export_private(name1, b"secret") self.assertIsNone(load(name1)) - k2 = load(name1, b'secret') + k2 = load(name1, b"secret") - pubname = self.tname('keygen-pub.pem') + pubname = self.tname("keygen-pub.pem") k2.export_public(pubname) pk2 = load(pubname) # We should be able to export the public key from the loaded # public key, but not the private key. - pk2.export_public(self.tname('keygen-pub2.pem')) - self.assertRaises(ECDSAUsageError, - pk2.export_private, self.tname('keygen-priv2.pem')) + pk2.export_public(self.tname("keygen-pub2.pem")) + self.assertRaises(ECDSAUsageError, pk2.export_private, self.tname("keygen-priv2.pem")) def test_emit(self): """Basic sanity check on the code emitters.""" @@ -95,22 +94,22 @@ def test_emit_pub(self): def test_sig(self): k = ECDSA256P1.generate() - buf = b'This is the message' + buf = b"This is the message" sig = k.raw_sign(buf) # The code doesn't have any verification, so verify this # manually. - k.key.public_key().verify( - signature=sig, - data=buf, - signature_algorithm=ec.ECDSA(SHA256())) + k.key.public_key().verify(signature=sig, data=buf, signature_algorithm=ec.ECDSA(SHA256())) # Modify the message to make sure the signature fails. - self.assertRaises(InvalidSignature, - k.key.public_key().verify, - signature=sig, - data=b'This is thE message', - signature_algorithm=ec.ECDSA(SHA256())) + self.assertRaises( + InvalidSignature, + k.key.public_key().verify, + signature=sig, + data=b"This is thE message", + signature_algorithm=ec.ECDSA(SHA256()), + ) + -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tools/psa/tfm/bin_utils/imgtool/keys/ed25519.py b/tools/psa/tfm/bin_utils/imgtool/keys/ed25519.py index 661b8857a55..9ec4e2f5839 100644 --- a/tools/psa/tfm/bin_utils/imgtool/keys/ed25519.py +++ b/tools/psa/tfm/bin_utils/imgtool/keys/ed25519.py @@ -46,21 +46,21 @@ def _get_public(self): def get_public_bytes(self): # The key is embedded into MBUboot in "SubjectPublicKeyInfo" format return self._get_public().public_bytes( - encoding=serialization.Encoding.DER, - format=serialization.PublicFormat.SubjectPublicKeyInfo) + encoding=serialization.Encoding.DER, format=serialization.PublicFormat.SubjectPublicKeyInfo + ) def get_private_bytes(self, minimal): - self._unsupported('get_private_bytes') + self._unsupported("get_private_bytes") def export_private(self, path, passwd=None): - self._unsupported('export_private') + self._unsupported("export_private") def export_public(self, path): """Write the public key to the given file.""" pem = self._get_public().public_bytes( - encoding=serialization.Encoding.PEM, - format=serialization.PublicFormat.SubjectPublicKeyInfo) - with open(path, 'wb') as f: + encoding=serialization.Encoding.PEM, format=serialization.PublicFormat.SubjectPublicKeyInfo + ) + with open(path, "wb") as f: f.write(pem) def sig_type(self): @@ -91,8 +91,7 @@ def _get_public(self): return self.key.public_key() def get_private_bytes(self, minimal): - raise Ed25519UsageError("Operation not supported with {} keys".format( - self.shortname())) + raise Ed25519UsageError("Operation not supported with {} keys".format(self.shortname())) def export_private(self, path, passwd=None): """ @@ -104,10 +103,9 @@ def export_private(self, path, passwd=None): else: enc = serialization.BestAvailableEncryption(passwd) pem = self.key.private_bytes( - encoding=serialization.Encoding.PEM, - format=serialization.PrivateFormat.PKCS8, - encryption_algorithm=enc) - with open(path, 'wb') as f: + encoding=serialization.Encoding.PEM, format=serialization.PrivateFormat.PKCS8, encryption_algorithm=enc + ) + with open(path, "wb") as f: f.write(pem) def sign_digest(self, digest): diff --git a/tools/psa/tfm/bin_utils/imgtool/keys/ed25519_test.py b/tools/psa/tfm/bin_utils/imgtool/keys/ed25519_test.py index d49f34aa3d2..a036f508eed 100644 --- a/tools/psa/tfm/bin_utils/imgtool/keys/ed25519_test.py +++ b/tools/psa/tfm/bin_utils/imgtool/keys/ed25519_test.py @@ -29,13 +29,12 @@ from cryptography.exceptions import InvalidSignature from cryptography.hazmat.primitives.asymmetric import ed25519 -sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '../..'))) +sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "../.."))) from imgtool.keys import load, Ed25519, Ed25519UsageError class Ed25519KeyGeneration(unittest.TestCase): - def setUp(self): self.test_dir = tempfile.TemporaryDirectory() @@ -48,21 +47,20 @@ def tearDown(self): def test_keygen(self): name1 = self.tname("keygen.pem") k = Ed25519.generate() - k.export_private(name1, b'secret') + k.export_private(name1, b"secret") self.assertIsNone(load(name1)) - k2 = load(name1, b'secret') + k2 = load(name1, b"secret") - pubname = self.tname('keygen-pub.pem') + pubname = self.tname("keygen-pub.pem") k2.export_public(pubname) pk2 = load(pubname) # We should be able to export the public key from the loaded # public key, but not the private key. - pk2.export_public(self.tname('keygen-pub2.pem')) - self.assertRaises(Ed25519UsageError, - pk2.export_private, self.tname('keygen-priv2.pem')) + pk2.export_public(self.tname("keygen-pub2.pem")) + self.assertRaises(Ed25519UsageError, pk2.export_private, self.tname("keygen-priv2.pem")) def test_emit(self): """Basic sanity check on the code emitters.""" @@ -96,7 +94,7 @@ def test_emit_pub(self): def test_sig(self): k = Ed25519.generate() - buf = b'This is the message' + buf = b"This is the message" sha = hashlib.sha256() sha.update(buf) digest = sha.digest() @@ -108,13 +106,10 @@ def test_sig(self): # Modify the message to make sure the signature fails. sha = hashlib.sha256() - sha.update(b'This is thE message') + sha.update(b"This is thE message") new_digest = sha.digest() - self.assertRaises(InvalidSignature, - k.key.public_key().verify, - signature=sig, - data=new_digest) + self.assertRaises(InvalidSignature, k.key.public_key().verify, signature=sig, data=new_digest) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tools/psa/tfm/bin_utils/imgtool/keys/general.py b/tools/psa/tfm/bin_utils/imgtool/keys/general.py index 3fad3ddc10c..8e07848c3f2 100644 --- a/tools/psa/tfm/bin_utils/imgtool/keys/general.py +++ b/tools/psa/tfm/bin_utils/imgtool/keys/general.py @@ -21,42 +21,46 @@ AUTOGEN_MESSAGE = "/* Autogenerated by imgtool.py, do not edit. */" + class KeyClass(object): def _emit(self, header, trailer, encoded_bytes, indent, file=sys.stdout, len_format=None): print(AUTOGEN_MESSAGE, file=file) - print(header, end='', file=file) + print(header, end="", file=file) for count, b in enumerate(encoded_bytes): if count % 8 == 0: - print("\n" + indent, end='', file=file) + print("\n" + indent, end="", file=file) else: - print(" ", end='', file=file) - print("0x{:02x},".format(b), end='', file=file) + print(" ", end="", file=file) + print("0x{:02x},".format(b), end="", file=file) print("\n" + trailer, file=file) if len_format is not None: print(len_format.format(len(encoded_bytes)), file=file) def emit_c_public(self, file=sys.stdout): self._emit( - header="const unsigned char {}_pub_key[] = {{".format(self.shortname()), - trailer="};", - encoded_bytes=self.get_public_bytes(), - indent=" ", - len_format="const unsigned int {}_pub_key_len = {{}};".format(self.shortname()), - file=file) + header="const unsigned char {}_pub_key[] = {{".format(self.shortname()), + trailer="};", + encoded_bytes=self.get_public_bytes(), + indent=" ", + len_format="const unsigned int {}_pub_key_len = {{}};".format(self.shortname()), + file=file, + ) def emit_rust_public(self, file=sys.stdout): self._emit( - header="static {}_PUB_KEY: &[u8] = &[".format(self.shortname().upper()), - trailer="];", - encoded_bytes=self.get_public_bytes(), - indent=" ", - file=file) + header="static {}_PUB_KEY: &[u8] = &[".format(self.shortname().upper()), + trailer="];", + encoded_bytes=self.get_public_bytes(), + indent=" ", + file=file, + ) def emit_private(self, minimal, file=sys.stdout): self._emit( - header="const unsigned char enc_priv_key[] = {", - trailer="};", - encoded_bytes=self.get_private_bytes(minimal), - indent=" ", - len_format="const unsigned int enc_priv_key_len = {};", - file=file) + header="const unsigned char enc_priv_key[] = {", + trailer="};", + encoded_bytes=self.get_private_bytes(minimal), + indent=" ", + len_format="const unsigned int enc_priv_key_len = {};", + file=file, + ) diff --git a/tools/psa/tfm/bin_utils/imgtool/keys/rsa.py b/tools/psa/tfm/bin_utils/imgtool/keys/rsa.py index a7230e30eb3..6f343086fa7 100644 --- a/tools/psa/tfm/bin_utils/imgtool/keys/rsa.py +++ b/tools/psa/tfm/bin_utils/imgtool/keys/rsa.py @@ -38,6 +38,7 @@ class RSAUsageError(Exception): class RSAPublic(KeyClass): """The public key can only do a few operations""" + def __init__(self, key): self.key = key @@ -56,28 +57,28 @@ def _get_public(self): def get_public_bytes(self): # The key embedded into MCUboot is in PKCS1 format. return self._get_public().public_bytes( - encoding=serialization.Encoding.DER, - format=serialization.PublicFormat.PKCS1) + encoding=serialization.Encoding.DER, format=serialization.PublicFormat.PKCS1 + ) def get_private_bytes(self, minimal): - self._unsupported('get_private_bytes') + self._unsupported("get_private_bytes") def export_private(self, path, passwd=None): - self._unsupported('export_private') + self._unsupported("export_private") def export_public(self, path): """Write the public key to the given file.""" pem = self._get_public().public_bytes( - encoding=serialization.Encoding.PEM, - format=serialization.PublicFormat.SubjectPublicKeyInfo) - with open(path, 'wb') as f: + encoding=serialization.Encoding.PEM, format=serialization.PublicFormat.SubjectPublicKeyInfo + ) + with open(path, "wb") as f: f.write(pem) def sig_type(self): return "PKCS1_PSS_RSA{}_SHA256".format(self.key_size()) def sig_tlv(self): - return"RSA{}".format(self.key_size()) + return "RSA{}".format(self.key_size()) def sig_len(self): return self.key_size() / 8 @@ -86,9 +87,9 @@ def verify(self, signature, payload): k = self.key if isinstance(self.key, rsa.RSAPrivateKey): k = self.key.public_key() - return k.verify(signature=signature, data=payload, - padding=PSS(mgf=MGF1(SHA256()), salt_length=32), - algorithm=SHA256()) + return k.verify( + signature=signature, data=payload, padding=PSS(mgf=MGF1(SHA256()), salt_length=32), algorithm=SHA256() + ) class RSA(RSAPublic): @@ -103,23 +104,19 @@ def __init__(self, key): @staticmethod def generate(key_size=2048): if key_size not in RSA_KEY_SIZES: - raise RSAUsageError("Key size {} is not supported by MCUboot" - .format(key_size)) - pk = rsa.generate_private_key( - public_exponent=65537, - key_size=key_size, - backend=default_backend()) + raise RSAUsageError("Key size {} is not supported by MCUboot".format(key_size)) + pk = rsa.generate_private_key(public_exponent=65537, key_size=key_size, backend=default_backend()) return RSA(pk) def _get_public(self): return self.key.public_key() def _build_minimal_rsa_privkey(self, der): - ''' + """ Builds a new DER that only includes N/E/D/P/Q RSA parameters; standard DER private bytes provided by OpenSSL also includes CRT params (DP/DQ/QP) which can be removed. - ''' + """ OFFSET_N = 7 # N is always located at this offset b = bytearray(der) off = OFFSET_N @@ -145,14 +142,15 @@ def _build_minimal_rsa_privkey(self, der): off += len_Q # adjust DER size for removed elements b[2] = (off - 4) >> 8 - b[3] = (off - 4) & 0xff + b[3] = (off - 4) & 0xFF return b[:off] def get_private_bytes(self, minimal): priv = self.key.private_bytes( - encoding=serialization.Encoding.DER, - format=serialization.PrivateFormat.TraditionalOpenSSL, - encryption_algorithm=serialization.NoEncryption()) + encoding=serialization.Encoding.DER, + format=serialization.PrivateFormat.TraditionalOpenSSL, + encryption_algorithm=serialization.NoEncryption(), + ) if minimal: priv = self._build_minimal_rsa_privkey(priv) return priv @@ -165,16 +163,12 @@ def export_private(self, path, passwd=None): else: enc = serialization.BestAvailableEncryption(passwd) pem = self.key.private_bytes( - encoding=serialization.Encoding.PEM, - format=serialization.PrivateFormat.PKCS8, - encryption_algorithm=enc) - with open(path, 'wb') as f: + encoding=serialization.Encoding.PEM, format=serialization.PrivateFormat.PKCS8, encryption_algorithm=enc + ) + with open(path, "wb") as f: f.write(pem) def sign(self, payload): # The verification code only allows the salt length to be the # same as the hash length, 32. - return self.key.sign( - data=payload, - padding=PSS(mgf=MGF1(SHA256()), salt_length=32), - algorithm=SHA256()) + return self.key.sign(data=payload, padding=PSS(mgf=MGF1(SHA256()), salt_length=32), algorithm=SHA256()) diff --git a/tools/psa/tfm/bin_utils/imgtool/keys/rsa_test.py b/tools/psa/tfm/bin_utils/imgtool/keys/rsa_test.py index 722a4446035..9e9f80e5783 100644 --- a/tools/psa/tfm/bin_utils/imgtool/keys/rsa_test.py +++ b/tools/psa/tfm/bin_utils/imgtool/keys/rsa_test.py @@ -30,15 +30,13 @@ from cryptography.hazmat.primitives.hashes import SHA256 # Setup sys path so 'imgtool' is in it. -sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), - '../..'))) +sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "../.."))) from imgtool.keys import load, RSA, RSAUsageError from imgtool.keys.rsa import RSA_KEY_SIZES class KeyGeneration(unittest.TestCase): - def setUp(self): self.test_dir = tempfile.TemporaryDirectory() @@ -56,22 +54,21 @@ def test_keygen(self): for key_size in RSA_KEY_SIZES: name1 = self.tname("keygen.pem") k = RSA.generate(key_size=key_size) - k.export_private(name1, b'secret') + k.export_private(name1, b"secret") # Try loading the key without a password. self.assertIsNone(load(name1)) - k2 = load(name1, b'secret') + k2 = load(name1, b"secret") - pubname = self.tname('keygen-pub.pem') + pubname = self.tname("keygen-pub.pem") k2.export_public(pubname) pk2 = load(pubname) # We should be able to export the public key from the loaded # public key, but not the private key. - pk2.export_public(self.tname('keygen-pub2.pem')) - self.assertRaises(RSAUsageError, pk2.export_private, - self.tname('keygen-priv2.pem')) + pk2.export_public(self.tname("keygen-pub2.pem")) + self.assertRaises(RSAUsageError, pk2.export_private, self.tname("keygen-priv2.pem")) def test_emit(self): """Basic sanity check on the code emitters.""" @@ -108,25 +105,25 @@ def test_emit_pub(self): def test_sig(self): for key_size in RSA_KEY_SIZES: k = RSA.generate(key_size=key_size) - buf = b'This is the message' + buf = b"This is the message" sig = k.sign(buf) # The code doesn't have any verification, so verify this # manually. k.key.public_key().verify( - signature=sig, - data=buf, - padding=PSS(mgf=MGF1(SHA256()), salt_length=32), - algorithm=SHA256()) + signature=sig, data=buf, padding=PSS(mgf=MGF1(SHA256()), salt_length=32), algorithm=SHA256() + ) # Modify the message to make sure the signature fails. - self.assertRaises(InvalidSignature, - k.key.public_key().verify, - signature=sig, - data=b'This is thE message', - padding=PSS(mgf=MGF1(SHA256()), salt_length=32), - algorithm=SHA256()) + self.assertRaises( + InvalidSignature, + k.key.public_key().verify, + signature=sig, + data=b"This is thE message", + padding=PSS(mgf=MGF1(SHA256()), salt_length=32), + algorithm=SHA256(), + ) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tools/psa/tfm/bin_utils/imgtool/keys/x25519.py b/tools/psa/tfm/bin_utils/imgtool/keys/x25519.py index 32751163d3f..0479271d38d 100644 --- a/tools/psa/tfm/bin_utils/imgtool/keys/x25519.py +++ b/tools/psa/tfm/bin_utils/imgtool/keys/x25519.py @@ -46,21 +46,21 @@ def _get_public(self): def get_public_bytes(self): # The key is embedded into MBUboot in "SubjectPublicKeyInfo" format return self._get_public().public_bytes( - encoding=serialization.Encoding.DER, - format=serialization.PublicFormat.SubjectPublicKeyInfo) + encoding=serialization.Encoding.DER, format=serialization.PublicFormat.SubjectPublicKeyInfo + ) def get_private_bytes(self, minimal): - self._unsupported('get_private_bytes') + self._unsupported("get_private_bytes") def export_private(self, path, passwd=None): - self._unsupported('export_private') + self._unsupported("export_private") def export_public(self, path): """Write the public key to the given file.""" pem = self._get_public().public_bytes( - encoding=serialization.Encoding.PEM, - format=serialization.PublicFormat.SubjectPublicKeyInfo) - with open(path, 'wb') as f: + encoding=serialization.Encoding.PEM, format=serialization.PublicFormat.SubjectPublicKeyInfo + ) + with open(path, "wb") as f: f.write(pem) def sig_type(self): @@ -94,7 +94,8 @@ def get_private_bytes(self, minimal): return self.key.private_bytes( encoding=serialization.Encoding.DER, format=serialization.PrivateFormat.PKCS8, - encryption_algorithm=serialization.NoEncryption()) + encryption_algorithm=serialization.NoEncryption(), + ) def export_private(self, path, passwd=None): """ @@ -106,10 +107,9 @@ def export_private(self, path, passwd=None): else: enc = serialization.BestAvailableEncryption(passwd) pem = self.key.private_bytes( - encoding=serialization.Encoding.PEM, - format=serialization.PrivateFormat.PKCS8, - encryption_algorithm=enc) - with open(path, 'wb') as f: + encoding=serialization.Encoding.PEM, format=serialization.PrivateFormat.PKCS8, encryption_algorithm=enc + ) + with open(path, "wb") as f: f.write(pem) def sign_digest(self, digest): diff --git a/tools/psa/tfm/bin_utils/imgtool/main.py b/tools/psa/tfm/bin_utils/imgtool/main.py index dd6c0447b04..2ee898e2c7b 100755 --- a/tools/psa/tfm/bin_utils/imgtool/main.py +++ b/tools/psa/tfm/bin_utils/imgtool/main.py @@ -24,13 +24,11 @@ import sys from imgtool import image, imgtool_version from imgtool.version import decode_version -from .keys import ( - RSAUsageError, ECDSAUsageError, Ed25519UsageError, X25519UsageError) +from .keys import RSAUsageError, ECDSAUsageError, Ed25519UsageError, X25519UsageError MIN_PYTHON_VERSION = (3, 6) if sys.version_info < MIN_PYTHON_VERSION: - sys.exit("Python %s.%s or newer is required by imgtool." - % MIN_PYTHON_VERSION) + sys.exit("Python %s.%s or newer is required by imgtool." % MIN_PYTHON_VERSION) def gen_rsa2048(keyfile, passwd): @@ -38,8 +36,7 @@ def gen_rsa2048(keyfile, passwd): def gen_rsa3072(keyfile, passwd): - keys.RSA.generate(key_size=3072).export_private(path=keyfile, - passwd=passwd) + keys.RSA.generate(key_size=3072).export_private(path=keyfile, passwd=passwd) def gen_ecdsa_p256(keyfile, passwd): @@ -58,14 +55,14 @@ def gen_x25519(keyfile, passwd): keys.X25519.generate().export_private(path=keyfile, passwd=passwd) -valid_langs = ['c', 'rust'] +valid_langs = ["c", "rust"] keygens = { - 'rsa-2048': gen_rsa2048, - 'rsa-3072': gen_rsa3072, - 'ecdsa-p256': gen_ecdsa_p256, - 'ecdsa-p224': gen_ecdsa_p224, - 'ed25519': gen_ed25519, - 'x25519': gen_x25519, + "rsa-2048": gen_rsa2048, + "rsa-3072": gen_rsa3072, + "ecdsa-p256": gen_ecdsa_p256, + "ecdsa-p224": gen_ecdsa_p224, + "ed25519": gen_ed25519, + "x25519": gen_x25519, } @@ -74,7 +71,7 @@ def load_key(keyfile): key = keys.load(keyfile) if key is not None: return key - passwd = getpass.getpass("Enter key passphrase: ").encode('utf-8') + passwd = getpass.getpass("Enter key passphrase: ").encode("utf-8") return keys.load(keyfile, passwd) @@ -88,57 +85,63 @@ def get_password(): # Password must be bytes, always use UTF-8 for consistent # encoding. - return passwd.encode('utf-8') - - -@click.option('-p', '--password', is_flag=True, - help='Prompt for password to protect key') -@click.option('-t', '--type', metavar='type', required=True, - type=click.Choice(keygens.keys()), prompt=True, - help='{}'.format('One of: {}'.format(', '.join(keygens.keys())))) -@click.option('-k', '--key', metavar='filename', required=True) -@click.command(help='Generate pub/private keypair') + return passwd.encode("utf-8") + + +@click.option("-p", "--password", is_flag=True, help="Prompt for password to protect key") +@click.option( + "-t", + "--type", + metavar="type", + required=True, + type=click.Choice(keygens.keys()), + prompt=True, + help="One of: {}".format(", ".join(keygens.keys())), +) +@click.option("-k", "--key", metavar="filename", required=True) +@click.command(help="Generate pub/private keypair") def keygen(type, key, password): password = get_password() if password else None keygens[type](key, password) -@click.option('-l', '--lang', metavar='lang', default=valid_langs[0], - type=click.Choice(valid_langs)) -@click.option('-k', '--key', metavar='filename', required=True) -@click.command(help='Dump public key from keypair') +@click.option("-l", "--lang", metavar="lang", default=valid_langs[0], type=click.Choice(valid_langs)) +@click.option("-k", "--key", metavar="filename", required=True) +@click.command(help="Dump public key from keypair") def getpub(key, lang): key = load_key(key) if key is None: print("Invalid passphrase") - elif lang == 'c': + elif lang == "c": key.emit_c_public() - elif lang == 'rust': + elif lang == "rust": key.emit_rust_public() else: raise ValueError("BUG: should never get here!") -@click.option('--minimal', default=False, is_flag=True, - help='Reduce the size of the dumped private key to include only ' - 'the minimum amount of data required to decrypt. This ' - 'might require changes to the build config. Check the docs!' - ) -@click.option('-k', '--key', metavar='filename', required=True) -@click.command(help='Dump private key from keypair') +@click.option( + "--minimal", + default=False, + is_flag=True, + help="Reduce the size of the dumped private key to include only " + "the minimum amount of data required to decrypt. This " + "might require changes to the build config. Check the docs!", +) +@click.option("-k", "--key", metavar="filename", required=True) +@click.command(help="Dump private key from keypair") def getpriv(key, minimal): key = load_key(key) if key is None: print("Invalid passphrase") try: key.emit_private(minimal) - except (RSAUsageError, ECDSAUsageError, Ed25519UsageError, - X25519UsageError) as e: + except (RSAUsageError, ECDSAUsageError, Ed25519UsageError, X25519UsageError) as e: raise click.UsageError(e) -@click.argument('imgfile') -@click.option('-k', '--key', metavar='filename') +@click.argument("imgfile") +@click.option("-k", "--key", metavar="filename") @click.command(help="Check that signed image can be verified by given key") def verify(key, imgfile): key = load_key(key) if key else None @@ -171,23 +174,22 @@ def validate_version(ctx, param, value): def validate_security_counter(ctx, param, value): if value is not None: - if value.lower() == 'auto': - return 'auto' + if value.lower() == "auto": + return "auto" else: try: return int(value, 0) except ValueError: raise click.BadParameter( "{} is not a valid integer. Please use code literals " - "prefixed with 0b/0B, 0o/0O, or 0x/0X as necessary." - .format(value)) + "prefixed with 0b/0B, 0o/0O, or 0x/0X as necessary.".format(value) + ) def validate_header_size(ctx, param, value): min_hdr_size = image.IMAGE_HEADER_SIZE if value < min_hdr_size: - raise click.BadParameter( - "Minimum value for -H/--header-size is {}".format(min_hdr_size)) + raise click.BadParameter("Minimum value for -H/--header-size is {}".format(min_hdr_size)) return value @@ -196,13 +198,13 @@ def get_dependencies(ctx, param, value): versions = [] images = re.findall(r"\((\d+)", value) if len(images) == 0: - raise click.BadParameter( - "Image dependency format is invalid: {}".format(value)) + raise click.BadParameter("Image dependency format is invalid: {}".format(value)) raw_versions = re.findall(r",\s*([0-9.+]+)\)", value) if len(images) != len(raw_versions): raise click.BadParameter( - '''There's a mismatch between the number of dependency images - and versions in: {}'''.format(value)) + """There's a mismatch between the number of dependency images + and versions in: {}""".format(value) + ) for raw_version in raw_versions: try: versions.append(decode_version(raw_version)) @@ -215,118 +217,192 @@ def get_dependencies(ctx, param, value): class BasedIntParamType(click.ParamType): - name = 'integer' + name = "integer" def convert(self, value, param, ctx): try: return int(value, 0) except ValueError: - self.fail('%s is not a valid integer. Please use code literals ' - 'prefixed with 0b/0B, 0o/0O, or 0x/0X as necessary.' - % value, param, ctx) - - -@click.argument('outfile') -@click.argument('infile') -@click.option('--custom-tlv', required=False, nargs=2, default=[], - multiple=True, metavar='[tag] [value]', - help='Custom TLV that will be placed into protected area. ' - 'Add "0x" prefix if the value should be interpreted as an ' - 'integer, otherwise it will be interpreted as a string. ' - 'Specify the option multiple times to add multiple TLVs.') -@click.option('-R', '--erased-val', type=click.Choice(['0', '0xff']), - required=False, - help='The value that is read back from erased flash.') -@click.option('-x', '--hex-addr', type=BasedIntParamType(), required=False, - help='Adjust address in hex output file.') -@click.option('-L', '--load-addr', type=BasedIntParamType(), required=False, - help='Load address for image when it should run from RAM.') -@click.option('-F', '--rom-fixed', type=BasedIntParamType(), required=False, - help='Set flash address the image is built for.') -@click.option('--save-enctlv', default=False, is_flag=True, - help='When upgrading, save encrypted key TLVs instead of plain ' - 'keys. Enable when BOOT_SWAP_SAVE_ENCTLV config option ' - 'was set.') -@click.option('-E', '--encrypt', metavar='filename', - help='Encrypt image using the provided public key. ' - '(Not supported in direct-xip or ram-load mode.)') -@click.option('--encrypt-keylen', default='128', - type=click.Choice(['128','256']), - help='When encrypting the image using AES, select a 128 bit or ' - '256 bit key len.') -@click.option('-e', '--endian', type=click.Choice(['little', 'big']), - default='little', help="Select little or big endian") -@click.option('--overwrite-only', default=False, is_flag=True, - help='Use overwrite-only instead of swap upgrades') -@click.option('--boot-record', metavar='sw_type', help='Create CBOR encoded ' - 'boot record TLV. The sw_type represents the role of the ' - 'software component (e.g. CoFM for coprocessor firmware). ' - '[max. 12 characters]') -@click.option('-M', '--max-sectors', type=int, - help='When padding allow for this amount of sectors (defaults ' - 'to 128)') -@click.option('--confirm', default=False, is_flag=True, - help='When padding the image, mark it as confirmed (implies ' - '--pad)') -@click.option('--pad', default=False, is_flag=True, - help='Pad image to --slot-size bytes, adding trailer magic') -@click.option('-S', '--slot-size', type=BasedIntParamType(), required=True, - help='Size of the slot. If the slots have different sizes, use ' - 'the size of the secondary slot.') -@click.option('--pad-header', default=False, is_flag=True, - help='Add --header-size zeroed bytes at the beginning of the ' - 'image') -@click.option('-H', '--header-size', callback=validate_header_size, - type=BasedIntParamType(), required=True) -@click.option('--pad-sig', default=False, is_flag=True, - help='Add 0-2 bytes of padding to ECDSA signature ' - '(for mcuboot <1.5)') -@click.option('-d', '--dependencies', callback=get_dependencies, - required=False, help='''Add dependence on another image, format: - "(,), ... "''') -@click.option('-s', '--security-counter', callback=validate_security_counter, - help='Specify the value of security counter. Use the `auto` ' - 'keyword to automatically generate it from the image version.') -@click.option('-v', '--version', callback=validate_version, required=True) -@click.option('--align', type=click.Choice(['1', '2', '4', '8']), - required=True) -@click.option('--public-key-format', type=click.Choice(['hash', 'full']), - default='hash', help='In what format to add the public key to ' - 'the image manifest: full key or hash of the key.') -@click.option('-k', '--key', metavar='filename') -@click.command(help='''Create a signed or unsigned image\n + self.fail( + "%s is not a valid integer. Please use code literals " + "prefixed with 0b/0B, 0o/0O, or 0x/0X as necessary." % value, + param, + ctx, + ) + + +@click.argument("outfile") +@click.argument("infile") +@click.option( + "--custom-tlv", + required=False, + nargs=2, + default=[], + multiple=True, + metavar="[tag] [value]", + help="Custom TLV that will be placed into protected area. " + 'Add "0x" prefix if the value should be interpreted as an ' + "integer, otherwise it will be interpreted as a string. " + "Specify the option multiple times to add multiple TLVs.", +) +@click.option( + "-R", + "--erased-val", + type=click.Choice(["0", "0xff"]), + required=False, + help="The value that is read back from erased flash.", +) +@click.option("-x", "--hex-addr", type=BasedIntParamType(), required=False, help="Adjust address in hex output file.") +@click.option( + "-L", + "--load-addr", + type=BasedIntParamType(), + required=False, + help="Load address for image when it should run from RAM.", +) +@click.option( + "-F", "--rom-fixed", type=BasedIntParamType(), required=False, help="Set flash address the image is built for." +) +@click.option( + "--save-enctlv", + default=False, + is_flag=True, + help="When upgrading, save encrypted key TLVs instead of plain " + "keys. Enable when BOOT_SWAP_SAVE_ENCTLV config option " + "was set.", +) +@click.option( + "-E", + "--encrypt", + metavar="filename", + help="Encrypt image using the provided public key. (Not supported in direct-xip or ram-load mode.)", +) +@click.option( + "--encrypt-keylen", + default="128", + type=click.Choice(["128", "256"]), + help="When encrypting the image using AES, select a 128 bit or 256 bit key len.", +) +@click.option( + "-e", "--endian", type=click.Choice(["little", "big"]), default="little", help="Select little or big endian" +) +@click.option("--overwrite-only", default=False, is_flag=True, help="Use overwrite-only instead of swap upgrades") +@click.option( + "--boot-record", + metavar="sw_type", + help="Create CBOR encoded " + "boot record TLV. The sw_type represents the role of the " + "software component (e.g. CoFM for coprocessor firmware). " + "[max. 12 characters]", +) +@click.option("-M", "--max-sectors", type=int, help="When padding allow for this amount of sectors (defaults to 128)") +@click.option( + "--confirm", default=False, is_flag=True, help="When padding the image, mark it as confirmed (implies --pad)" +) +@click.option("--pad", default=False, is_flag=True, help="Pad image to --slot-size bytes, adding trailer magic") +@click.option( + "-S", + "--slot-size", + type=BasedIntParamType(), + required=True, + help="Size of the slot. If the slots have different sizes, use the size of the secondary slot.", +) +@click.option( + "--pad-header", default=False, is_flag=True, help="Add --header-size zeroed bytes at the beginning of the image" +) +@click.option("-H", "--header-size", callback=validate_header_size, type=BasedIntParamType(), required=True) +@click.option( + "--pad-sig", default=False, is_flag=True, help="Add 0-2 bytes of padding to ECDSA signature (for mcuboot <1.5)" +) +@click.option( + "-d", + "--dependencies", + callback=get_dependencies, + required=False, + help='''Add dependence on another image, format: + "(,), ... "''', +) +@click.option( + "-s", + "--security-counter", + callback=validate_security_counter, + help="Specify the value of security counter. Use the `auto` " + "keyword to automatically generate it from the image version.", +) +@click.option("-v", "--version", callback=validate_version, required=True) +@click.option("--align", type=click.Choice(["1", "2", "4", "8"]), required=True) +@click.option( + "--public-key-format", + type=click.Choice(["hash", "full"]), + default="hash", + help="In what format to add the public key to the image manifest: full key or hash of the key.", +) +@click.option("-k", "--key", metavar="filename") +@click.command( + help="""Create a signed or unsigned image\n INFILE and OUTFILE are parsed as Intel HEX if the params have - .hex extension, otherwise binary format is used''') -def sign(key, public_key_format, align, version, pad_sig, header_size, - pad_header, slot_size, pad, confirm, max_sectors, overwrite_only, - endian, encrypt_keylen, encrypt, infile, outfile, dependencies, - load_addr, hex_addr, erased_val, save_enctlv, security_counter, - boot_record, custom_tlv, rom_fixed): - + .hex extension, otherwise binary format is used""" +) +def sign( + key, + public_key_format, + align, + version, + pad_sig, + header_size, + pad_header, + slot_size, + pad, + confirm, + max_sectors, + overwrite_only, + endian, + encrypt_keylen, + encrypt, + infile, + outfile, + dependencies, + load_addr, + hex_addr, + erased_val, + save_enctlv, + security_counter, + boot_record, + custom_tlv, + rom_fixed, +): if confirm: # Confirmed but non-padded images don't make much sense, because # otherwise there's no trailer area for writing the confirmed status. pad = True - img = image.Image(version=decode_version(version), header_size=header_size, - pad_header=pad_header, pad=pad, confirm=confirm, - align=int(align), slot_size=slot_size, - max_sectors=max_sectors, overwrite_only=overwrite_only, - endian=endian, load_addr=load_addr, rom_fixed=rom_fixed, - erased_val=erased_val, save_enctlv=save_enctlv, - security_counter=security_counter) + img = image.Image( + version=decode_version(version), + header_size=header_size, + pad_header=pad_header, + pad=pad, + confirm=confirm, + align=int(align), + slot_size=slot_size, + max_sectors=max_sectors, + overwrite_only=overwrite_only, + endian=endian, + load_addr=load_addr, + rom_fixed=rom_fixed, + erased_val=erased_val, + save_enctlv=save_enctlv, + security_counter=security_counter, + ) img.load(infile) key = load_key(key) if key else None enckey = load_key(encrypt) if encrypt else None if enckey and key: - if ((isinstance(key, keys.ECDSA256P1) and - not isinstance(enckey, keys.ECDSA256P1Public)) - or (isinstance(key, keys.RSA) and - not isinstance(enckey, keys.RSAPublic))): + if (isinstance(key, keys.ECDSA256P1) and not isinstance(enckey, keys.ECDSA256P1Public)) or ( + isinstance(key, keys.RSA) and not isinstance(enckey, keys.RSAPublic) + ): # FIXME - raise click.UsageError("Signing and encryption must use the same " - "type of key") + raise click.UsageError("Signing and encryption must use the same type of key") - if pad_sig and hasattr(key, 'pad_sig'): + if pad_sig and hasattr(key, "pad_sig"): key.pad_sig = True # Get list of custom protected TLVs from the command-line @@ -334,29 +410,24 @@ def sign(key, public_key_format, align, version, pad_sig, header_size, for tlv in custom_tlv: tag = int(tlv[0], 0) if tag in custom_tlvs: - raise click.UsageError('Custom TLV %s already exists.' % hex(tag)) + raise click.UsageError("Custom TLV %s already exists." % hex(tag)) if tag in image.TLV_VALUES.values(): - raise click.UsageError( - 'Custom TLV %s conflicts with predefined TLV.' % hex(tag)) + raise click.UsageError("Custom TLV %s conflicts with predefined TLV." % hex(tag)) value = tlv[1] - if value.startswith('0x'): + if value.startswith("0x"): if len(value[2:]) % 2: - raise click.UsageError('Custom TLV length is odd.') + raise click.UsageError("Custom TLV length is odd.") custom_tlvs[tag] = bytes.fromhex(value[2:]) else: - custom_tlvs[tag] = value.encode('utf-8') + custom_tlvs[tag] = value.encode("utf-8") - img.create(key, public_key_format, enckey, dependencies, boot_record, - custom_tlvs, int(encrypt_keylen)) + img.create(key, public_key_format, enckey, dependencies, boot_record, custom_tlvs, int(encrypt_keylen)) img.save(outfile, hex_addr) class AliasesGroup(click.Group): - - _aliases = { - "create": "sign", - } + _aliases = {"create": "sign"} def list_commands(self, ctx): cmds = [k for k in self.commands] @@ -372,13 +443,12 @@ def get_command(self, ctx, cmd_name): return None -@click.command(help='Print imgtool version information') +@click.command(help="Print imgtool version information") def version(): print(imgtool_version) -@click.command(cls=AliasesGroup, - context_settings=dict(help_option_names=['-h', '--help'])) +@click.command(cls=AliasesGroup, context_settings=dict(help_option_names=["-h", "--help"])) def imgtool(): pass @@ -391,5 +461,5 @@ def imgtool(): imgtool.add_command(version) -if __name__ == '__main__': +if __name__ == "__main__": imgtool() diff --git a/tools/psa/tfm/bin_utils/imgtool/version.py b/tools/psa/tfm/bin_utils/imgtool/version.py index 6e38f445bb5..debfe9c8ada 100644 --- a/tools/psa/tfm/bin_utils/imgtool/version.py +++ b/tools/psa/tfm/bin_utils/imgtool/version.py @@ -24,23 +24,21 @@ from collections import namedtuple import re -SemiSemVersion = namedtuple('SemiSemVersion', ['major', 'minor', 'revision', - 'build']) +SemiSemVersion = namedtuple("SemiSemVersion", ["major", "minor", "revision", "build"]) -version_re = re.compile( - r"""^([1-9]\d*|0)(\.([1-9]\d*|0)(\.([1-9]\d*|0)(\+([1-9]\d*|0))?)?)?$""") +version_re = re.compile(r"""^([1-9]\d*|0)(\.([1-9]\d*|0)(\.([1-9]\d*|0)(\+([1-9]\d*|0))?)?)?$""") def decode_version(text): - """Decode the version string, which should be of the form maj.min.rev+build - """ + """Decode the version string, which should be of the form maj.min.rev+build""" m = version_re.match(text) if m: result = SemiSemVersion( - int(m.group(1)) if m.group(1) else 0, - int(m.group(3)) if m.group(3) else 0, - int(m.group(5)) if m.group(5) else 0, - int(m.group(7)) if m.group(7) else 0) + int(m.group(1)) if m.group(1) else 0, + int(m.group(3)) if m.group(3) else 0, + int(m.group(5)) if m.group(5) else 0, + int(m.group(7)) if m.group(7) else 0, + ) return result else: msg = "Invalid version number, should be maj.min.rev+build with later " @@ -48,7 +46,7 @@ def decode_version(text): raise ValueError(msg) -if __name__ == '__main__': +if __name__ == "__main__": print(decode_version("1.2")) print(decode_version("1.0")) print(decode_version("0.0.2+75")) diff --git a/tools/psa/tfm/bin_utils/macro_parser.py b/tools/psa/tfm/bin_utils/macro_parser.py index 12e8a92f1af..e03e5879df2 100644 --- a/tools/psa/tfm/bin_utils/macro_parser.py +++ b/tools/psa/tfm/bin_utils/macro_parser.py @@ -12,7 +12,10 @@ import os # Match (((x) + (y))) mode and ((x) + (y)) mode. x, y can be HEX or DEC value. -expression_re = re.compile(r"([(]?[(]?[(]?(([(]?(((0x)[0-9a-fA-F]+)|([0-9]+))[)]?)\s*([\+\-]\s*([(]?(((0x)[0-9a-fA-F]+)|([0-9]+))[)]?)\s*)*)[)]?\s*([\+\-])\s*[(]?(([(]?(((0x)[0-9a-fA-F]+)|([0-9]+))[)]?)\s*([\+\-]\s*([(]?(((0x)[0-9a-fA-F]+)|([0-9]+))[)]?)\s*)*)[)]?[)]?[)]?)|([(]?[(]?[(]?(([(]?(((0x)[0-9a-fA-F]+)|([0-9]+))[)]?)\s*([\+\-]\s*([(]?(((0x)[0-9a-fA-F]+)|([0-9]+))[)]?)\s*)*)[)]?[)]?[)]?)") +expression_re = re.compile( + r"([(]?[(]?[(]?(([(]?(((0x)[0-9a-fA-F]+)|([0-9]+))[)]?)\s*([\+\-]\s*([(]?(((0x)[0-9a-fA-F]+)|([0-9]+))[)]?)\s*)*)[)]?\s*([\+\-])\s*[(]?(([(]?(((0x)[0-9a-fA-F]+)|([0-9]+))[)]?)\s*([\+\-]\s*([(]?(((0x)[0-9a-fA-F]+)|([0-9]+))[)]?)\s*)*)[)]?[)]?[)]?)|([(]?[(]?[(]?(([(]?(((0x)[0-9a-fA-F]+)|([0-9]+))[)]?)\s*([\+\-]\s*([(]?(((0x)[0-9a-fA-F]+)|([0-9]+))[)]?)\s*)*)[)]?[)]?[)]?)" +) + # Simple parser that takes a string and evaluates an expression from it. # The expression might contain additions and subtractions amongst numbers that @@ -32,16 +35,16 @@ def parse_and_sum(text): msg += " (NON-)SECURE_IMAGE_MAX_SIZE macros" raise Exception(msg) - nums = re.findall(r'(0x[A-Fa-f0-9]+)|[\d]+', m.group(0)) + nums = re.findall(r"(0x[A-Fa-f0-9]+)|[\d]+", m.group(0)) for i in range(len(nums)): nums[i] = int(nums[i], 0) - ops = re.findall(r'\+|\-', m.group(0)) + ops = re.findall(r"\+|\-", m.group(0)) sum = nums[0] for i in range(len(ops)): - if ops[i] == '+': - sum += nums[i+1] + if ops[i] == "+": + sum += nums[i + 1] else: - sum -= nums[i+1] + sum -= nums[i + 1] return sum @@ -58,17 +61,16 @@ def evaluate_macro(file, regexp, matchGroupKey, matchGroupData, bracketless=Fals configFile = os.path.join(scriptsDir, file) macroValue = {} - with open(configFile, 'r') as macros_preprocessed_file: + with open(configFile, "r") as macros_preprocessed_file: for line in macros_preprocessed_file: if bracketless: - line=line.replace("(","") - line=line.replace(")","") + line = line.replace("(", "") + line = line.replace(")", "") m = regexp_compiled.match(line) if m is not None: - macroValue[m.group(matchGroupKey)] = \ - parse_and_sum(m.group(matchGroupData)) + macroValue[m.group(matchGroupKey)] = parse_and_sum(m.group(matchGroupData)) - if (matchGroupKey == 0 and not macroValue): + if matchGroupKey == 0 and not macroValue: macroValue["None"] = None return list(macroValue.values())[0] if (matchGroupKey == 0) else macroValue diff --git a/tools/psa/tfm/bin_utils/wrapper.py b/tools/psa/tfm/bin_utils/wrapper.py index 247cb1042fc..704ae92233f 100755 --- a/tools/psa/tfm/bin_utils/wrapper.py +++ b/tools/psa/tfm/bin_utils/wrapper.py @@ -20,7 +20,7 @@ import imgtool import imgtool.main -parser_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '../')) +parser_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "../")) sys.path.append(parser_path) import macro_parser @@ -28,68 +28,108 @@ load_addr_re = re.compile(r"^\s*RE_IMAGE_LOAD_ADDRESS\s*=\s*(.*)") rom_fixed_re = re.compile(r"^\s*RE_IMAGE_ROM_FIXED\s*=\s*(.*)") -#This works around Python 2 and Python 3 handling character encodings -#differently. More information about this issue at -#https://click.palletsprojects.com/en/5.x/python3 -os.environ['LC_ALL'] = 'C.UTF-8' -os.environ['LANG'] = 'C.UTF-8' +# This works around Python 2 and Python 3 handling character encodings +# differently. More information about this issue at +# https://click.palletsprojects.com/en/5.x/python3 +os.environ["LC_ALL"] = "C.UTF-8" +os.environ["LANG"] = "C.UTF-8" -@click.argument('outfile') -@click.argument('infile') -@click.option('-R', '--erased-val', type=click.Choice(['0', '0xff']), - required=False, help='The value that is read back from erased ' - 'flash.') -@click.option('-x', '--hex-addr', type=imgtool.main.BasedIntParamType(), - required=False, help='Adjust address in hex output file.') -@click.option('--save-enctlv', default=False, is_flag=True, - help='When upgrading, save encrypted key TLVs instead of plain ' - 'keys. Enable when BOOT_SWAP_SAVE_ENCTLV config option ' - 'was set.') -@click.option('-E', '--encrypt', metavar='filename', - help='Encrypt image using the provided public key') -@click.option('-e', '--endian', type=click.Choice(['little', 'big']), - default='little', help="Select little or big endian") -@click.option('--overwrite-only', default=False, is_flag=True, - help='Use overwrite-only instead of swap upgrades') -@click.option('-M', '--max-sectors', type=int, - help='When padding allow for this amount of sectors (defaults ' - 'to 128)') -@click.option('--confirm', default=False, is_flag=True, - help='When padding the image, mark it as confirmed') -@click.option('--pad', default=False, is_flag=True, - help='Pad image to the size determined by --layout, adding ' - 'trailer magic') -@click.option('-l', '--layout', help='The file containing the macros of the ' - 'slot sizes') -@click.option('--pad-header', default=False, is_flag=True, - help='Adds --erased-val (defaults to 0xff) --header-size times ' - 'at the beginning of the image') -@click.option('-H', '--header-size', - callback=imgtool.main.validate_header_size, - type=imgtool.main.BasedIntParamType(), required=True) -@click.option('-d', '--dependencies', callback=imgtool.main.get_dependencies, - required=False, help='''Add dependence on another image, format: - "(,), ... "''') -@click.option('-s', '--security-counter', - callback=imgtool.main.validate_security_counter, - help='Specify the value of security counter. Use the `auto` ' - 'keyword to automatically generate it from the image version.') -@click.option('-v', '--version', callback=imgtool.main.validate_version, - required=True) -@click.option('--align', type=click.Choice(['1', '2', '4', '8']), - required=True) -@click.option('--public-key-format', type=click.Choice(['hash', 'full']), - default='hash', help='In what format to add the public key to ' - 'the image manifest: full key or hash of the key.') -@click.option('-k', '--key', metavar='filename') -@click.command(help='''Create a signed or unsigned image\n - INFILE and OUTFILE are parsed as Intel HEX if the params have - .hex extension, otherwise binary format is used''') -def wrap(key, align, version, header_size, pad_header, layout, pad, confirm, - max_sectors, overwrite_only, endian, encrypt, infile, outfile, - dependencies, hex_addr, erased_val, save_enctlv, public_key_format, - security_counter): +@click.argument("outfile") +@click.argument("infile") +@click.option( + "-R", + "--erased-val", + type=click.Choice(["0", "0xff"]), + required=False, + help="The value that is read back from erased flash.", +) +@click.option( + "-x", "--hex-addr", type=imgtool.main.BasedIntParamType(), required=False, help="Adjust address in hex output file." +) +@click.option( + "--save-enctlv", + default=False, + is_flag=True, + help="When upgrading, save encrypted key TLVs instead of plain " + "keys. Enable when BOOT_SWAP_SAVE_ENCTLV config option " + "was set.", +) +@click.option("-E", "--encrypt", metavar="filename", help="Encrypt image using the provided public key") +@click.option( + "-e", "--endian", type=click.Choice(["little", "big"]), default="little", help="Select little or big endian" +) +@click.option("--overwrite-only", default=False, is_flag=True, help="Use overwrite-only instead of swap upgrades") +@click.option("-M", "--max-sectors", type=int, help="When padding allow for this amount of sectors (defaults to 128)") +@click.option("--confirm", default=False, is_flag=True, help="When padding the image, mark it as confirmed") +@click.option( + "--pad", default=False, is_flag=True, help="Pad image to the size determined by --layout, adding trailer magic" +) +@click.option("-l", "--layout", help="The file containing the macros of the slot sizes") +@click.option( + "--pad-header", + default=False, + is_flag=True, + help="Adds --erased-val (defaults to 0xff) --header-size times at the beginning of the image", +) +@click.option( + "-H", + "--header-size", + callback=imgtool.main.validate_header_size, + type=imgtool.main.BasedIntParamType(), + required=True, +) +@click.option( + "-d", + "--dependencies", + callback=imgtool.main.get_dependencies, + required=False, + help='''Add dependence on another image, format: + "(,), ... "''', +) +@click.option( + "-s", + "--security-counter", + callback=imgtool.main.validate_security_counter, + help="Specify the value of security counter. Use the `auto` " + "keyword to automatically generate it from the image version.", +) +@click.option("-v", "--version", callback=imgtool.main.validate_version, required=True) +@click.option("--align", type=click.Choice(["1", "2", "4", "8"]), required=True) +@click.option( + "--public-key-format", + type=click.Choice(["hash", "full"]), + default="hash", + help="In what format to add the public key to the image manifest: full key or hash of the key.", +) +@click.option("-k", "--key", metavar="filename") +@click.command( + help="""Create a signed or unsigned image\n + INFILE and OUTFILE are parsed as Intel HEX if the params have + .hex extension, otherwise binary format is used""" +) +def wrap( + key, + align, + version, + header_size, + pad_header, + layout, + pad, + confirm, + max_sectors, + overwrite_only, + endian, + encrypt, + infile, + outfile, + dependencies, + hex_addr, + erased_val, + save_enctlv, + public_key_format, + security_counter, +): slot_size = macro_parser.evaluate_macro(layout, sign_bin_size_re, 0, 1) load_addr = macro_parser.evaluate_macro(layout, load_addr_re, 0, 1) rom_fixed = macro_parser.evaluate_macro(layout, rom_fixed_re, 0, 1) @@ -100,29 +140,35 @@ def wrap(key, align, version, header_size, pad_header, layout, pad, confirm, else: boot_record = "NSPE_SPE" - img = imgtool.image.Image(version=imgtool.version.decode_version(version), - header_size=header_size, pad_header=pad_header, - pad=pad, confirm=confirm, align=int(align), - slot_size=slot_size, max_sectors=max_sectors, - overwrite_only=overwrite_only, endian=endian, - load_addr=load_addr, rom_fixed=rom_fixed, - erased_val=erased_val, - save_enctlv=save_enctlv, - security_counter=security_counter) + img = imgtool.image.Image( + version=imgtool.version.decode_version(version), + header_size=header_size, + pad_header=pad_header, + pad=pad, + confirm=confirm, + align=int(align), + slot_size=slot_size, + max_sectors=max_sectors, + overwrite_only=overwrite_only, + endian=endian, + load_addr=load_addr, + rom_fixed=rom_fixed, + erased_val=erased_val, + save_enctlv=save_enctlv, + security_counter=security_counter, + ) img.load(infile) key = imgtool.main.load_key(key) if key else None enckey = imgtool.main.load_key(encrypt) if encrypt else None if enckey and key: - if (isinstance(key, imgtool.keys.RSA) and - not isinstance(enckey, imgtool.keys.RSAPublic)): + if isinstance(key, imgtool.keys.RSA) and not isinstance(enckey, imgtool.keys.RSAPublic): # FIXME - raise click.UsageError("Signing and encryption must use the same " - "type of key") + raise click.UsageError("Signing and encryption must use the same type of key") img.create(key, public_key_format, enckey, dependencies, boot_record) img.save(outfile, hex_addr) -if __name__ == '__main__': +if __name__ == "__main__": wrap() diff --git a/tools/pyproject.toml b/tools/pyproject.toml index 4ee46c230d1..6e1b0ec7c97 100644 --- a/tools/pyproject.toml +++ b/tools/pyproject.toml @@ -71,6 +71,9 @@ unit-tests = [ "beautifulsoup4", "lxml" ] +linters = [ + "ruff" +] greentea = [ ## Additional requirements to install into the Mbed environment when running Greentea tests # For USB Device host tests @@ -103,3 +106,7 @@ mbedls = "mbed_lstools.main:mbedls_main" mbed-tools = "mbed_tools.cli.main:cli" memap = "memap.memap:main" ambiq_svl = "ambiq_svl.svl:cli" + +[tool.ruff] +line-length = 120 +src = ['python'] \ No newline at end of file diff --git a/tools/python/__init__.py b/tools/python/__init__.py index 2bae17afc88..04d33f049d1 100644 --- a/tools/python/__init__.py +++ b/tools/python/__init__.py @@ -1,4 +1,4 @@ # # Copyright (c) 2020-2023 Arm Limited and Contributors. All rights reserved. # SPDX-License-Identifier: Apache-2.0 -# \ No newline at end of file +# diff --git a/tools/python/ambiq_svl/__init__.py b/tools/python/ambiq_svl/__init__.py index 4b5043782c9..bd70daa8253 100644 --- a/tools/python/ambiq_svl/__init__.py +++ b/tools/python/ambiq_svl/__init__.py @@ -2,4 +2,4 @@ SparkFun Variable Loader Variable baud rate bootloader for Artemis Apollo3 modules SPDX-License-Identifier: MIT -""" \ No newline at end of file +""" diff --git a/tools/python/ambiq_svl/svl.py b/tools/python/ambiq_svl/svl.py index 551c694164b..eef71992689 100644 --- a/tools/python/ambiq_svl/svl.py +++ b/tools/python/ambiq_svl/svl.py @@ -64,38 +64,263 @@ barWidthInCharacters = 50 # Width of progress bar, ie [###### % complete crcTable = ( - 0x0000, 0x8005, 0x800F, 0x000A, 0x801B, 0x001E, 0x0014, 0x8011, - 0x8033, 0x0036, 0x003C, 0x8039, 0x0028, 0x802D, 0x8027, 0x0022, - 0x8063, 0x0066, 0x006C, 0x8069, 0x0078, 0x807D, 0x8077, 0x0072, - 0x0050, 0x8055, 0x805F, 0x005A, 0x804B, 0x004E, 0x0044, 0x8041, - 0x80C3, 0x00C6, 0x00CC, 0x80C9, 0x00D8, 0x80DD, 0x80D7, 0x00D2, - 0x00F0, 0x80F5, 0x80FF, 0x00FA, 0x80EB, 0x00EE, 0x00E4, 0x80E1, - 0x00A0, 0x80A5, 0x80AF, 0x00AA, 0x80BB, 0x00BE, 0x00B4, 0x80B1, - 0x8093, 0x0096, 0x009C, 0x8099, 0x0088, 0x808D, 0x8087, 0x0082, - 0x8183, 0x0186, 0x018C, 0x8189, 0x0198, 0x819D, 0x8197, 0x0192, - 0x01B0, 0x81B5, 0x81BF, 0x01BA, 0x81AB, 0x01AE, 0x01A4, 0x81A1, - 0x01E0, 0x81E5, 0x81EF, 0x01EA, 0x81FB, 0x01FE, 0x01F4, 0x81F1, - 0x81D3, 0x01D6, 0x01DC, 0x81D9, 0x01C8, 0x81CD, 0x81C7, 0x01C2, - 0x0140, 0x8145, 0x814F, 0x014A, 0x815B, 0x015E, 0x0154, 0x8151, - 0x8173, 0x0176, 0x017C, 0x8179, 0x0168, 0x816D, 0x8167, 0x0162, - 0x8123, 0x0126, 0x012C, 0x8129, 0x0138, 0x813D, 0x8137, 0x0132, - 0x0110, 0x8115, 0x811F, 0x011A, 0x810B, 0x010E, 0x0104, 0x8101, - 0x8303, 0x0306, 0x030C, 0x8309, 0x0318, 0x831D, 0x8317, 0x0312, - 0x0330, 0x8335, 0x833F, 0x033A, 0x832B, 0x032E, 0x0324, 0x8321, - 0x0360, 0x8365, 0x836F, 0x036A, 0x837B, 0x037E, 0x0374, 0x8371, - 0x8353, 0x0356, 0x035C, 0x8359, 0x0348, 0x834D, 0x8347, 0x0342, - 0x03C0, 0x83C5, 0x83CF, 0x03CA, 0x83DB, 0x03DE, 0x03D4, 0x83D1, - 0x83F3, 0x03F6, 0x03FC, 0x83F9, 0x03E8, 0x83ED, 0x83E7, 0x03E2, - 0x83A3, 0x03A6, 0x03AC, 0x83A9, 0x03B8, 0x83BD, 0x83B7, 0x03B2, - 0x0390, 0x8395, 0x839F, 0x039A, 0x838B, 0x038E, 0x0384, 0x8381, - 0x0280, 0x8285, 0x828F, 0x028A, 0x829B, 0x029E, 0x0294, 0x8291, - 0x82B3, 0x02B6, 0x02BC, 0x82B9, 0x02A8, 0x82AD, 0x82A7, 0x02A2, - 0x82E3, 0x02E6, 0x02EC, 0x82E9, 0x02F8, 0x82FD, 0x82F7, 0x02F2, - 0x02D0, 0x82D5, 0x82DF, 0x02DA, 0x82CB, 0x02CE, 0x02C4, 0x82C1, - 0x8243, 0x0246, 0x024C, 0x8249, 0x0258, 0x825D, 0x8257, 0x0252, - 0x0270, 0x8275, 0x827F, 0x027A, 0x826B, 0x026E, 0x0264, 0x8261, - 0x0220, 0x8225, 0x822F, 0x022A, 0x823B, 0x023E, 0x0234, 0x8231, - 0x8213, 0x0216, 0x021C, 0x8219, 0x0208, 0x820D, 0x8207, 0x0202) + 0x0000, + 0x8005, + 0x800F, + 0x000A, + 0x801B, + 0x001E, + 0x0014, + 0x8011, + 0x8033, + 0x0036, + 0x003C, + 0x8039, + 0x0028, + 0x802D, + 0x8027, + 0x0022, + 0x8063, + 0x0066, + 0x006C, + 0x8069, + 0x0078, + 0x807D, + 0x8077, + 0x0072, + 0x0050, + 0x8055, + 0x805F, + 0x005A, + 0x804B, + 0x004E, + 0x0044, + 0x8041, + 0x80C3, + 0x00C6, + 0x00CC, + 0x80C9, + 0x00D8, + 0x80DD, + 0x80D7, + 0x00D2, + 0x00F0, + 0x80F5, + 0x80FF, + 0x00FA, + 0x80EB, + 0x00EE, + 0x00E4, + 0x80E1, + 0x00A0, + 0x80A5, + 0x80AF, + 0x00AA, + 0x80BB, + 0x00BE, + 0x00B4, + 0x80B1, + 0x8093, + 0x0096, + 0x009C, + 0x8099, + 0x0088, + 0x808D, + 0x8087, + 0x0082, + 0x8183, + 0x0186, + 0x018C, + 0x8189, + 0x0198, + 0x819D, + 0x8197, + 0x0192, + 0x01B0, + 0x81B5, + 0x81BF, + 0x01BA, + 0x81AB, + 0x01AE, + 0x01A4, + 0x81A1, + 0x01E0, + 0x81E5, + 0x81EF, + 0x01EA, + 0x81FB, + 0x01FE, + 0x01F4, + 0x81F1, + 0x81D3, + 0x01D6, + 0x01DC, + 0x81D9, + 0x01C8, + 0x81CD, + 0x81C7, + 0x01C2, + 0x0140, + 0x8145, + 0x814F, + 0x014A, + 0x815B, + 0x015E, + 0x0154, + 0x8151, + 0x8173, + 0x0176, + 0x017C, + 0x8179, + 0x0168, + 0x816D, + 0x8167, + 0x0162, + 0x8123, + 0x0126, + 0x012C, + 0x8129, + 0x0138, + 0x813D, + 0x8137, + 0x0132, + 0x0110, + 0x8115, + 0x811F, + 0x011A, + 0x810B, + 0x010E, + 0x0104, + 0x8101, + 0x8303, + 0x0306, + 0x030C, + 0x8309, + 0x0318, + 0x831D, + 0x8317, + 0x0312, + 0x0330, + 0x8335, + 0x833F, + 0x033A, + 0x832B, + 0x032E, + 0x0324, + 0x8321, + 0x0360, + 0x8365, + 0x836F, + 0x036A, + 0x837B, + 0x037E, + 0x0374, + 0x8371, + 0x8353, + 0x0356, + 0x035C, + 0x8359, + 0x0348, + 0x834D, + 0x8347, + 0x0342, + 0x03C0, + 0x83C5, + 0x83CF, + 0x03CA, + 0x83DB, + 0x03DE, + 0x03D4, + 0x83D1, + 0x83F3, + 0x03F6, + 0x03FC, + 0x83F9, + 0x03E8, + 0x83ED, + 0x83E7, + 0x03E2, + 0x83A3, + 0x03A6, + 0x03AC, + 0x83A9, + 0x03B8, + 0x83BD, + 0x83B7, + 0x03B2, + 0x0390, + 0x8395, + 0x839F, + 0x039A, + 0x838B, + 0x038E, + 0x0384, + 0x8381, + 0x0280, + 0x8285, + 0x828F, + 0x028A, + 0x829B, + 0x029E, + 0x0294, + 0x8291, + 0x82B3, + 0x02B6, + 0x02BC, + 0x82B9, + 0x02A8, + 0x82AD, + 0x82A7, + 0x02A2, + 0x82E3, + 0x02E6, + 0x02EC, + 0x82E9, + 0x02F8, + 0x82FD, + 0x82F7, + 0x02F2, + 0x02D0, + 0x82D5, + 0x82DF, + 0x02DA, + 0x82CB, + 0x02CE, + 0x02C4, + 0x82C1, + 0x8243, + 0x0246, + 0x024C, + 0x8249, + 0x0258, + 0x825D, + 0x8257, + 0x0252, + 0x0270, + 0x8275, + 0x827F, + 0x027A, + 0x826B, + 0x026E, + 0x0264, + 0x8261, + 0x0220, + 0x8225, + 0x822F, + 0x022A, + 0x823B, + 0x023E, + 0x0234, + 0x8231, + 0x8213, + 0x0216, + 0x021C, + 0x8219, + 0x0208, + 0x820D, + 0x8207, + 0x0202, +) # *********************************************************************************** # # Compute CRC on a byte array @@ -104,7 +329,6 @@ def get_crc16(data): - # Table and code ported from Artemis SVL bootloader crc = 0x0000 data = bytearray(data) @@ -122,30 +346,30 @@ def get_crc16(data): # # *********************************************************************************** def wait_for_packet(ser): - - packet = {'len': 0, 'cmd': 0, 'data': 0, 'crc': 1, 'timeout': 1} + packet = {"len": 0, "cmd": 0, "data": 0, "crc": 1, "timeout": 1} n = ser.read(2) # get the number of bytes - if(len(n) < 2): + if len(n) < 2: return packet - packet['len'] = int.from_bytes(n, byteorder='big', signed=False) # - payload = ser.read(packet['len']) + packet["len"] = int.from_bytes(n, byteorder="big", signed=False) # + payload = ser.read(packet["len"]) - if(len(payload) != packet['len']): + if len(payload) != packet["len"]: return packet # all bytes received, so timeout is not true - packet['timeout'] = 0 + packet["timeout"] = 0 # cmd is the first byte of the payload - packet['cmd'] = payload[0] + packet["cmd"] = payload[0] # the data is the part of the payload that is not cmd or crc - packet['data'] = payload[1:packet['len']-2] + packet["data"] = payload[1 : packet["len"] - 2] # performing the crc on the whole payload should return 0 - packet['crc'] = get_crc16(payload) + packet["crc"] = get_crc16(payload) return packet + # *********************************************************************************** # # Send a packet @@ -156,12 +380,12 @@ def wait_for_packet(ser): def send_packet(ser, cmd, data): data = bytearray(data) num_bytes = 3 + len(data) - payload = bytearray(cmd.to_bytes(1, 'big')) + payload = bytearray(cmd.to_bytes(1, "big")) payload.extend(data) crc = get_crc16(payload) - payload.extend(bytearray(crc.to_bytes(2, 'big'))) + payload.extend(bytearray(crc.to_bytes(2, "big"))) - ser.write(num_bytes.to_bytes(2, 'big')) + ser.write(num_bytes.to_bytes(2, "big")) ser.write(bytes(payload)) @@ -171,26 +395,24 @@ def send_packet(ser, cmd, data): # # *********************************************************************************** def phase_setup(ser): + baud_detect_byte = b"U" - baud_detect_byte = b'U' - - verboseprint('\nPhase:\tSetup') + verboseprint("\nPhase:\tSetup") # Handle the serial startup blip ser.reset_input_buffer() - verboseprint('\tCleared startup blip') + verboseprint("\tCleared startup blip") - ser.write(baud_detect_byte) # send the baud detection character + ser.write(baud_detect_byte) # send the baud detection character packet = wait_for_packet(ser) - if(packet['timeout'] or packet['crc']): + if packet["timeout"] or packet["crc"]: return False # failed to enter bootloader - twopartprint('\t', 'Got SVL Bootloader Version: ' + - str(int.from_bytes(packet['data'], 'big'))) - verboseprint('\tSending \'enter bootloader\' command') + twopartprint("\t", "Got SVL Bootloader Version: " + str(int.from_bytes(packet["data"], "big"))) + verboseprint("\tSending 'enter bootloader' command") - send_packet(ser, SVL_CMD_BL, b'') + send_packet(ser, SVL_CMD_BL, b"") return True @@ -203,86 +425,80 @@ def phase_setup(ser): # # *********************************************************************************** def phase_bootload(ser): - startTime = time.time() - frame_size = 512*4 + frame_size = 512 * 4 resend_max = 4 resend_count = 0 - verboseprint('\nPhase:\tBootload') + verboseprint("\nPhase:\tBootload") - with open(args.binfile, mode='rb') as binfile: + with open(args.binfile, mode="rb") as binfile: application = binfile.read() total_len = len(application) - total_frames = math.ceil(total_len/frame_size) + total_frames = math.ceil(total_len / frame_size) curr_frame = 0 progressChars = 0 - if (not args.verbose): - print("[", end='') + if not args.verbose: + print("[", end="") - verboseprint('\thave ' + str(total_len) + - ' bytes to send in ' + str(total_frames) + ' frames') + verboseprint("\thave " + str(total_len) + " bytes to send in " + str(total_frames) + " frames") bl_done = False bl_succeeded = True - while((bl_done == False) and (bl_succeeded == True)): - + while (bl_done == False) and (bl_succeeded == True): # wait for indication by Artemis packet = wait_for_packet(ser) - if(packet['timeout'] or packet['crc']): - verboseprint('\n\tError receiving packet') + if packet["timeout"] or packet["crc"]: + verboseprint("\n\tError receiving packet") verboseprint(packet) - verboseprint('\n') + verboseprint("\n") bl_succeeded = False bl_done = True - if(packet['cmd'] == SVL_CMD_NEXT): + if packet["cmd"] == SVL_CMD_NEXT: # verboseprint('\tgot frame request') curr_frame += 1 resend_count = 0 - elif(packet['cmd'] == SVL_CMD_RETRY): - verboseprint('\t\tRetrying...') + elif packet["cmd"] == SVL_CMD_RETRY: + verboseprint("\t\tRetrying...") resend_count += 1 - if(resend_count >= resend_max): + if resend_count >= resend_max: bl_succeeded = False bl_done = True else: - print('Timeout or unknown error') + print("Timeout or unknown error") bl_succeeded = False bl_done = True - if(curr_frame <= total_frames): - frame_data = application[( - (curr_frame-1)*frame_size):((curr_frame-1+1)*frame_size)] - if(args.verbose): - verboseprint('\tSending frame #'+str(curr_frame) + - ', length: '+str(len(frame_data))) + if curr_frame <= total_frames: + frame_data = application[((curr_frame - 1) * frame_size) : ((curr_frame - 1 + 1) * frame_size)] + if args.verbose: + verboseprint("\tSending frame #" + str(curr_frame) + ", length: " + str(len(frame_data))) else: percentComplete = curr_frame * 100 / total_frames - percentCompleteInChars = math.ceil( - percentComplete / 100 * barWidthInCharacters) - while(progressChars < percentCompleteInChars): + percentCompleteInChars = math.ceil(percentComplete / 100 * barWidthInCharacters) + while progressChars < percentCompleteInChars: progressChars = progressChars + 1 - print('#', end='', flush=True) - if (percentComplete == 100): - print("]", end='') + print("#", end="", flush=True) + if percentComplete == 100: + print("]", end="") send_packet(ser, SVL_CMD_FRAME, frame_data) else: - send_packet(ser, SVL_CMD_DONE, b'') + send_packet(ser, SVL_CMD_DONE, b"") bl_done = True - if(bl_succeeded == True): - twopartprint('\n\t', 'Upload complete') + if bl_succeeded == True: + twopartprint("\n\t", "Upload complete") endTime = time.time() bps = total_len / (endTime - startTime) - verboseprint('\n\tNominal bootload bps: ' + str(round(bps, 2))) + verboseprint("\n\tNominal bootload bps: " + str(round(bps, 2))) else: - twopartprint('\n\t', 'Upload failed') + twopartprint("\n\t", "Upload failed") return bl_succeeded @@ -297,24 +513,24 @@ def phase_serial_port_help(): # First check to see if user has the given port open for dev in devices: - if(dev.device.upper() == args.port.upper()): - print(dev.device + " is currently open. Please close any other terminal programs that may be using " + - dev.device + " and try again.") + if dev.device.upper() == args.port.upper(): + print( + dev.device + + " is currently open. Please close any other terminal programs that may be using " + + dev.device + + " and try again." + ) exit() # otherwise, give user a list of possible com ports - print(args.port.upper() + - " not found but we detected the following serial ports:") + print(args.port.upper() + " not found but we detected the following serial ports:") for dev in devices: - if 'CH340' in dev.description: - print( - dev.description + ": Likely an Arduino or derivative. Try " + dev.device + ".") - elif 'FTDI' in dev.description: - print( - dev.description + ": Likely an Arduino or derivative. Try " + dev.device + ".") - elif 'USB Serial Device' in dev.description: - print( - dev.description + ": Possibly an Arduino or derivative.") + if "CH340" in dev.description: + print(dev.description + ": Likely an Arduino or derivative. Try " + dev.device + ".") + elif "FTDI" in dev.description: + print(dev.description + ": Likely an Arduino or derivative. Try " + dev.device + ".") + elif "USB Serial Device" in dev.description: + print(dev.description + ": Possibly an Arduino or derivative.") else: print(dev.description) @@ -325,17 +541,19 @@ def phase_serial_port_help(): # # *********************************************************************************** + def verboseprint(*pargs): if args.verbose: # Print each argument separately so caller doesn't need to # stuff everything to be printed into a single string for arg in pargs: - print(arg, end='', flush=True), + (print(arg, end="", flush=True),) print() + def twopartprint(verbosestr, printstr): if args.verbose: - print(verbosestr, end='') + print(verbosestr, end="") print(printstr) @@ -349,10 +567,9 @@ def main(): try: num_tries = 3 - print('\n\nArtemis SVL Bootloader') + print("\n\nArtemis SVL Bootloader") - verboseprint("Script version " + SCRIPT_VERSION_MAJOR + - "." + SCRIPT_VERSION_MINOR) + verboseprint("Script version " + SCRIPT_VERSION_MAJOR + "." + SCRIPT_VERSION_MINOR) if not os.path.exists(args.binfile): print("Bin file {} does not exist.".format(args.binfile)) @@ -362,31 +579,30 @@ def main(): entered_bootloader = False for _ in range(num_tries): - with serial.Serial(args.port, args.baud, timeout=args.timeout) as ser: - # startup time for Artemis bootloader (experimentally determined - 0.095 sec min delay) t_su = 0.15 - time.sleep(t_su) # Allow Artemis to come out of reset + time.sleep(t_su) # Allow Artemis to come out of reset # Perform baud rate negotiation entered_bootloader = phase_setup(ser) - if(entered_bootloader == True): + if entered_bootloader == True: bl_success = phase_bootload(ser) - if(bl_success == True): # Bootload - #print("Bootload complete!") + if bl_success == True: # Bootload + # print("Bootload complete!") break else: verboseprint("Failed to enter bootload phase") - if(bl_success == True): + if bl_success == True: break - if(entered_bootloader == False): + if entered_bootloader == False: print( - "Target failed to enter bootload mode. Verify the right COM port is selected and that your board has the SVL bootloader.") + "Target failed to enter bootload mode. Verify the right COM port is selected and that your board has the SVL bootloader." + ) except serial.SerialException: phase_serial_port_help() @@ -400,23 +616,19 @@ def main(): # # ****************************************************************************** def cli(): + parser = argparse.ArgumentParser(description="SparkFun Serial Bootloader for Artemis") - parser = argparse.ArgumentParser( - description='SparkFun Serial Bootloader for Artemis') + parser.add_argument("port", help="Serial COMx Port") - parser.add_argument('port', help='Serial COMx Port') + parser.add_argument("-b", dest="baud", default=115200, type=int, help="Baud Rate (default is 115200)") - parser.add_argument('-b', dest='baud', default=115200, type=int, - help='Baud Rate (default is 115200)') + parser.add_argument("-f", dest="binfile", default="", help="Binary file to program into the target device") - parser.add_argument('-f', dest='binfile', default='', - help='Binary file to program into the target device') + parser.add_argument("-v", "--verbose", default=0, help="Enable verbose output", action="store_true") - parser.add_argument("-v", "--verbose", default=0, help="Enable verbose output", - action="store_true") - - parser.add_argument("-t", "--timeout", default=0.50, help="Communication timeout in seconds (default 0.5)", - type=float) + parser.add_argument( + "-t", "--timeout", default=0.50, help="Communication timeout in seconds (default 0.5)", type=float + ) if len(sys.argv) < 2: print("No port selected. Detected Serial Ports:") @@ -429,5 +641,6 @@ def cli(): main() -if __name__ == '__main__': - cli() \ No newline at end of file + +if __name__ == "__main__": + cli() diff --git a/tools/python/install_bin_file.py b/tools/python/install_bin_file.py index 136550bcbfe..b37602570fa 100644 --- a/tools/python/install_bin_file.py +++ b/tools/python/install_bin_file.py @@ -18,22 +18,29 @@ def get_detected_targets(): targets = [] oldError = None - if os.name == 'nt': - oldError = ctypes.windll.kernel32.SetErrorMode(1) # Disable Windows error box temporarily. note that SEM_FAILCRITICALERRORS = 1 + if os.name == "nt": + oldError = ctypes.windll.kernel32.SetErrorMode( + 1 + ) # Disable Windows error box temporarily. note that SEM_FAILCRITICALERRORS = 1 mbeds = mbed_os_tools.detect.create() detect_muts_list = mbeds.list_mbeds() - if os.name == 'nt': + if os.name == "nt": ctypes.windll.kernel32.SetErrorMode(oldError) for mut in detect_muts_list: - targets.append({ - 'id': mut['target_id'], 'name': mut['platform_name'], - 'mount': mut['mount_point'], 'serial': mut['serial_port'], - 'uid': mut['target_id_usb_id'] - }) + targets.append( + { + "id": mut["target_id"], + "name": mut["platform_name"], + "mount": mut["mount_point"], + "serial": mut["serial_port"], + "uid": mut["target_id_usb_id"], + } + ) return targets + def error(lines, code=-1): sys.stderr.write("[install-bin-file] ERROR: %s\n" % (lines.pop(0),)) for line in lines: @@ -41,8 +48,13 @@ def error(lines, code=-1): sys.stderr.write("---\n") sys.exit(code) + if len(sys.argv) < 3: - print("Error: Usage: " + sys.argv[0] + " [Target UID for distinguishing multiple targets]") + print( + "Error: Usage: " + + sys.argv[0] + + " [Target UID for distinguishing multiple targets]" + ) sys.exit(1) bin_file = sys.argv[1] @@ -56,32 +68,33 @@ def error(lines, code=-1): all_connected = [] # Convert Dual Core target name to mbedls target name -if target_name.upper().endswith('_CM4') or target_name.upper().endswith('_CM7'): +if target_name.upper().endswith("_CM4") or target_name.upper().endswith("_CM7"): target_name = target_name[:-4] - print('Target to detect: %s' % (target_name,)) + print("Target to detect: %s" % (target_name,)) targets = get_detected_targets() if targets: for _target in targets: - - if _target['name'] is None: - if target_uid is not None and _target['uid'] == target_uid: + if _target["name"] is None: + if target_uid is not None and _target["uid"] == target_uid: # If we have an exact UID match and we don't know the name, then assume that # the UID is correct. all_connected.append(_target) else: - if _target['name'].upper() == target_name.upper(): - if target_uid is None or _target['uid'] == target_uid: + if _target["name"].upper() == target_name.upper(): + if target_uid is None or _target["uid"] == target_uid: # Name matches, UID either matches or was not specified all_connected.append(_target) -if len(all_connected) == 0 and len(targets) == 1 and targets[0]['name'] is None and target_uid is None: +if len(all_connected) == 0 and len(targets) == 1 and targets[0]["name"] is None and target_uid is None: # Special case: if we only have one board connected to the system and we aren't filtering by UID, then # assume it's the one we want even if we could not detect its name. all_connected.append(targets[0]) if len(all_connected) == 0: - error_lines = ["The target board you compiled for is not connected to your system.", - "Please reconnect it and retry the last command."] + error_lines = [ + "The target board you compiled for is not connected to your system.", + "Please reconnect it and retry the last command.", + ] if target_uid is None: error_lines.append("(Searched for any %s board.)" % (target_name,)) else: @@ -91,16 +104,16 @@ def error(lines, code=-1): elif len(all_connected) > 1: error_lines = ["There are multiple of the targeted board connected to the system. Which do you wish to flash?"] for target in all_connected: - error_lines.append("Board: %s, Mount Point: %s, UID: %s" % (target['name'], target['mount'], target['uid'])) + error_lines.append("Board: %s, Mount Point: %s, UID: %s" % (target["name"], target["mount"], target["uid"])) error_lines.append("Please set the CMake variable MBED_TARGET_UID to the UID of the board you wish to flash.") error(error_lines, 5) connected = all_connected[0] # apply new firmware if not os.path.exists(bin_file): - error("Build program file (firmware) not found \"%s\"" % bin_file, 1) -if not flash_dev(connected['mount'], bin_file, program_cycle_s=4): + error('Build program file (firmware) not found "%s"' % bin_file, 1) +if not flash_dev(connected["mount"], bin_file, program_cycle_s=4): error("Unable to flash the target board connected to your system.", 1) # reset board -reset_dev(port=connected["serial"], disk=connected['mount'], baudrate=serial_baud) +reset_dev(port=connected["serial"], disk=connected["mount"], baudrate=serial_baud) diff --git a/tools/python/mbed_host_tests/__init__.py b/tools/python/mbed_host_tests/__init__.py index a118cadc617..f24e01af029 100644 --- a/tools/python/mbed_host_tests/__init__.py +++ b/tools/python/mbed_host_tests/__init__.py @@ -17,7 +17,6 @@ Author: Przemyslaw Wirkus """ - """! @package mbed-host-tests Flash, reset and perform host supervised tests on mbed platforms. diff --git a/tools/python/mbed_host_tests/host_tests_conn_proxy/conn_primitive.py b/tools/python/mbed_host_tests/host_tests_conn_proxy/conn_primitive.py index e5885202865..6f7224b91f5 100644 --- a/tools/python/mbed_host_tests/host_tests_conn_proxy/conn_primitive.py +++ b/tools/python/mbed_host_tests/host_tests_conn_proxy/conn_primitive.py @@ -15,7 +15,4 @@ limitations under the License. """ -from mbed_os_tools.test.host_tests_conn_proxy.conn_primitive import ( - ConnectorPrimitiveException, - ConnectorPrimitive, -) +from mbed_os_tools.test.host_tests_conn_proxy.conn_primitive import ConnectorPrimitiveException, ConnectorPrimitive diff --git a/tools/python/mbed_host_tests/host_tests_conn_proxy/conn_primitive_fastmodel.py b/tools/python/mbed_host_tests/host_tests_conn_proxy/conn_primitive_fastmodel.py index fb23808c844..7f33be7aac7 100644 --- a/tools/python/mbed_host_tests/host_tests_conn_proxy/conn_primitive_fastmodel.py +++ b/tools/python/mbed_host_tests/host_tests_conn_proxy/conn_primitive_fastmodel.py @@ -15,6 +15,4 @@ limitations under the License. """ -from mbed_os_tools.test.host_tests_conn_proxy.conn_primitive_fastmodel import ( - FastmodelConnectorPrimitive, -) +from mbed_os_tools.test.host_tests_conn_proxy.conn_primitive_fastmodel import FastmodelConnectorPrimitive diff --git a/tools/python/mbed_host_tests/host_tests_conn_proxy/conn_primitive_remote.py b/tools/python/mbed_host_tests/host_tests_conn_proxy/conn_primitive_remote.py index 729bd602967..9d97f85d055 100644 --- a/tools/python/mbed_host_tests/host_tests_conn_proxy/conn_primitive_remote.py +++ b/tools/python/mbed_host_tests/host_tests_conn_proxy/conn_primitive_remote.py @@ -15,6 +15,4 @@ limitations under the License. """ -from mbed_os_tools.test.host_tests_conn_proxy.conn_primitive_remote import ( - RemoteConnectorPrimitive, -) +from mbed_os_tools.test.host_tests_conn_proxy.conn_primitive_remote import RemoteConnectorPrimitive diff --git a/tools/python/mbed_host_tests/host_tests_conn_proxy/conn_primitive_serial.py b/tools/python/mbed_host_tests/host_tests_conn_proxy/conn_primitive_serial.py index a11cc00e08e..cb065391994 100644 --- a/tools/python/mbed_host_tests/host_tests_conn_proxy/conn_primitive_serial.py +++ b/tools/python/mbed_host_tests/host_tests_conn_proxy/conn_primitive_serial.py @@ -15,7 +15,4 @@ limitations under the License. """ - -from mbed_os_tools.test.host_tests_conn_proxy.conn_primitive_serial import ( - SerialConnectorPrimitive, -) +from mbed_os_tools.test.host_tests_conn_proxy.conn_primitive_serial import SerialConnectorPrimitive diff --git a/tools/python/mbed_host_tests/host_tests_conn_proxy/conn_proxy.py b/tools/python/mbed_host_tests/host_tests_conn_proxy/conn_proxy.py index e2ea5c33269..0c9b3648f5c 100644 --- a/tools/python/mbed_host_tests/host_tests_conn_proxy/conn_proxy.py +++ b/tools/python/mbed_host_tests/host_tests_conn_proxy/conn_proxy.py @@ -15,8 +15,4 @@ limitations under the License. """ -from mbed_os_tools.test.host_tests_conn_proxy.conn_proxy import ( - KiViBufferWalker, - conn_primitive_factory, - conn_process, -) +from mbed_os_tools.test.host_tests_conn_proxy.conn_proxy import KiViBufferWalker, conn_primitive_factory, conn_process diff --git a/tools/python/mbed_host_tests/host_tests_plugins/host_test_plugins.py b/tools/python/mbed_host_tests/host_tests_plugins/host_test_plugins.py index d364dd681e0..de42a80e6b3 100644 --- a/tools/python/mbed_host_tests/host_tests_plugins/host_test_plugins.py +++ b/tools/python/mbed_host_tests/host_tests_plugins/host_test_plugins.py @@ -17,6 +17,4 @@ Author: Przemyslaw Wirkus """ -from mbed_os_tools.test.host_tests_plugins.host_test_plugins import ( - HostTestPluginBase, -) +from mbed_os_tools.test.host_tests_plugins.host_test_plugins import HostTestPluginBase diff --git a/tools/python/mbed_host_tests/host_tests_plugins/host_test_registry.py b/tools/python/mbed_host_tests/host_tests_plugins/host_test_registry.py index 454cd151c2c..715496fe045 100644 --- a/tools/python/mbed_host_tests/host_tests_plugins/host_test_registry.py +++ b/tools/python/mbed_host_tests/host_tests_plugins/host_test_registry.py @@ -17,6 +17,4 @@ Author: Przemyslaw Wirkus """ -from mbed_os_tools.test.host_tests_plugins.host_test_registry import ( - HostTestRegistry, -) +from mbed_os_tools.test.host_tests_plugins.host_test_registry import HostTestRegistry diff --git a/tools/python/mbed_host_tests/host_tests_plugins/module_copy_jn51xx.py b/tools/python/mbed_host_tests/host_tests_plugins/module_copy_jn51xx.py index 729250da63b..9197b1a672a 100644 --- a/tools/python/mbed_host_tests/host_tests_plugins/module_copy_jn51xx.py +++ b/tools/python/mbed_host_tests/host_tests_plugins/module_copy_jn51xx.py @@ -17,7 +17,4 @@ Author: Przemyslaw Wirkus """ -from mbed_os_tools.test.host_tests_plugins.module_copy_jn51xx import ( - HostTestPluginCopyMethod_JN51xx, - load_plugin, -) +from mbed_os_tools.test.host_tests_plugins.module_copy_jn51xx import HostTestPluginCopyMethod_JN51xx, load_plugin diff --git a/tools/python/mbed_host_tests/host_tests_plugins/module_copy_mbed.py b/tools/python/mbed_host_tests/host_tests_plugins/module_copy_mbed.py index 103092532f1..75c25dd0f7e 100644 --- a/tools/python/mbed_host_tests/host_tests_plugins/module_copy_mbed.py +++ b/tools/python/mbed_host_tests/host_tests_plugins/module_copy_mbed.py @@ -17,7 +17,4 @@ Author: Przemyslaw Wirkus """ -from mbed_os_tools.test.host_tests_plugins.module_copy_mbed import ( - HostTestPluginCopyMethod_Mbed, - load_plugin, -) +from mbed_os_tools.test.host_tests_plugins.module_copy_mbed import HostTestPluginCopyMethod_Mbed, load_plugin diff --git a/tools/python/mbed_host_tests/host_tests_plugins/module_copy_mps2.py b/tools/python/mbed_host_tests/host_tests_plugins/module_copy_mps2.py index 436644f521a..4d84c03f017 100644 --- a/tools/python/mbed_host_tests/host_tests_plugins/module_copy_mps2.py +++ b/tools/python/mbed_host_tests/host_tests_plugins/module_copy_mps2.py @@ -17,7 +17,4 @@ Author: Przemyslaw Wirkus """ -from mbed_os_tools.test.host_tests_plugins.module_copy_mps2 import ( - HostTestPluginCopyMethod_MPS2, - load_plugin, -) +from mbed_os_tools.test.host_tests_plugins.module_copy_mps2 import HostTestPluginCopyMethod_MPS2, load_plugin diff --git a/tools/python/mbed_host_tests/host_tests_plugins/module_copy_pyocd.py b/tools/python/mbed_host_tests/host_tests_plugins/module_copy_pyocd.py index 498ee371f86..316bf0b2e1d 100644 --- a/tools/python/mbed_host_tests/host_tests_plugins/module_copy_pyocd.py +++ b/tools/python/mbed_host_tests/host_tests_plugins/module_copy_pyocd.py @@ -17,7 +17,4 @@ Author: Russ Butler """ -from mbed_os_tools.test.host_tests_plugins.module_copy_pyocd import ( - HostTestPluginCopyMethod_pyOCD, - load_plugin, -) +from mbed_os_tools.test.host_tests_plugins.module_copy_pyocd import HostTestPluginCopyMethod_pyOCD, load_plugin diff --git a/tools/python/mbed_host_tests/host_tests_plugins/module_copy_shell.py b/tools/python/mbed_host_tests/host_tests_plugins/module_copy_shell.py index 02644e2a83d..a41467cf82a 100644 --- a/tools/python/mbed_host_tests/host_tests_plugins/module_copy_shell.py +++ b/tools/python/mbed_host_tests/host_tests_plugins/module_copy_shell.py @@ -17,7 +17,4 @@ Author: Przemyslaw Wirkus """ -from mbed_os_tools.test.host_tests_plugins.module_copy_shell import ( - HostTestPluginCopyMethod_Shell, - load_plugin, -) +from mbed_os_tools.test.host_tests_plugins.module_copy_shell import HostTestPluginCopyMethod_Shell, load_plugin diff --git a/tools/python/mbed_host_tests/host_tests_plugins/module_copy_silabs.py b/tools/python/mbed_host_tests/host_tests_plugins/module_copy_silabs.py index 7917926cdac..ae886de9f81 100644 --- a/tools/python/mbed_host_tests/host_tests_plugins/module_copy_silabs.py +++ b/tools/python/mbed_host_tests/host_tests_plugins/module_copy_silabs.py @@ -17,7 +17,4 @@ Author: Przemyslaw Wirkus """ -from mbed_os_tools.test.host_tests_plugins.module_copy_silabs import ( - HostTestPluginCopyMethod_Silabs, - load_plugin, -) +from mbed_os_tools.test.host_tests_plugins.module_copy_silabs import HostTestPluginCopyMethod_Silabs, load_plugin diff --git a/tools/python/mbed_host_tests/host_tests_plugins/module_copy_stlink.py b/tools/python/mbed_host_tests/host_tests_plugins/module_copy_stlink.py index 696594f3449..604c604d730 100644 --- a/tools/python/mbed_host_tests/host_tests_plugins/module_copy_stlink.py +++ b/tools/python/mbed_host_tests/host_tests_plugins/module_copy_stlink.py @@ -17,7 +17,4 @@ Author: Przemyslaw Wirkus """ -from mbed_os_tools.test.host_tests_plugins.module_copy_stlink import ( - HostTestPluginCopyMethod_Stlink, - load_plugin, -) +from mbed_os_tools.test.host_tests_plugins.module_copy_stlink import HostTestPluginCopyMethod_Stlink, load_plugin diff --git a/tools/python/mbed_host_tests/host_tests_plugins/module_copy_ublox.py b/tools/python/mbed_host_tests/host_tests_plugins/module_copy_ublox.py index d9d7e94e1ba..d51a022c0e4 100644 --- a/tools/python/mbed_host_tests/host_tests_plugins/module_copy_ublox.py +++ b/tools/python/mbed_host_tests/host_tests_plugins/module_copy_ublox.py @@ -17,7 +17,4 @@ Author: Przemyslaw Wirkus """ -from mbed_os_tools.test.host_tests_plugins.module_copy_ublox import ( - HostTestPluginCopyMethod_ublox, - load_plugin, -) +from mbed_os_tools.test.host_tests_plugins.module_copy_ublox import HostTestPluginCopyMethod_ublox, load_plugin diff --git a/tools/python/mbed_host_tests/host_tests_plugins/module_reset_jn51xx.py b/tools/python/mbed_host_tests/host_tests_plugins/module_reset_jn51xx.py index a296c961a9b..80152fe86b3 100644 --- a/tools/python/mbed_host_tests/host_tests_plugins/module_reset_jn51xx.py +++ b/tools/python/mbed_host_tests/host_tests_plugins/module_reset_jn51xx.py @@ -17,7 +17,4 @@ Author: Przemyslaw Wirkus """ -from mbed_os_tools.test.host_tests_plugins.module_reset_jn51xx import ( - HostTestPluginResetMethod_JN51xx, - load_plugin, -) +from mbed_os_tools.test.host_tests_plugins.module_reset_jn51xx import HostTestPluginResetMethod_JN51xx, load_plugin diff --git a/tools/python/mbed_host_tests/host_tests_plugins/module_reset_mbed.py b/tools/python/mbed_host_tests/host_tests_plugins/module_reset_mbed.py index 13024110fe0..f5811a2f013 100644 --- a/tools/python/mbed_host_tests/host_tests_plugins/module_reset_mbed.py +++ b/tools/python/mbed_host_tests/host_tests_plugins/module_reset_mbed.py @@ -17,7 +17,4 @@ Author: Przemyslaw Wirkus """ -from mbed_os_tools.test.host_tests_plugins.module_reset_mbed import ( - HostTestPluginResetMethod_Mbed, - load_plugin, -) +from mbed_os_tools.test.host_tests_plugins.module_reset_mbed import HostTestPluginResetMethod_Mbed, load_plugin diff --git a/tools/python/mbed_host_tests/host_tests_plugins/module_reset_mps2.py b/tools/python/mbed_host_tests/host_tests_plugins/module_reset_mps2.py index 1708c757570..2573b3a6958 100644 --- a/tools/python/mbed_host_tests/host_tests_plugins/module_reset_mps2.py +++ b/tools/python/mbed_host_tests/host_tests_plugins/module_reset_mps2.py @@ -17,7 +17,4 @@ Author: Przemyslaw Wirkus """ -from mbed_os_tools.test.host_tests_plugins.module_reset_mps2 import ( - HostTestPluginResetMethod_MPS2, - load_plugin, -) +from mbed_os_tools.test.host_tests_plugins.module_reset_mps2 import HostTestPluginResetMethod_MPS2, load_plugin diff --git a/tools/python/mbed_host_tests/host_tests_plugins/module_reset_pyocd.py b/tools/python/mbed_host_tests/host_tests_plugins/module_reset_pyocd.py index 189140d5d5f..eed22c86d81 100644 --- a/tools/python/mbed_host_tests/host_tests_plugins/module_reset_pyocd.py +++ b/tools/python/mbed_host_tests/host_tests_plugins/module_reset_pyocd.py @@ -17,7 +17,4 @@ Author: Russ Butler """ -from mbed_os_tools.test.host_tests_plugins.module_reset_pyocd import ( - HostTestPluginResetMethod_pyOCD, - load_plugin, -) +from mbed_os_tools.test.host_tests_plugins.module_reset_pyocd import HostTestPluginResetMethod_pyOCD, load_plugin diff --git a/tools/python/mbed_host_tests/host_tests_plugins/module_reset_silabs.py b/tools/python/mbed_host_tests/host_tests_plugins/module_reset_silabs.py index e33c7a6bad7..dbb1ae828a1 100644 --- a/tools/python/mbed_host_tests/host_tests_plugins/module_reset_silabs.py +++ b/tools/python/mbed_host_tests/host_tests_plugins/module_reset_silabs.py @@ -17,7 +17,4 @@ Author: Przemyslaw Wirkus """ -from mbed_os_tools.test.host_tests_plugins.module_reset_silabs import ( - HostTestPluginResetMethod_SiLabs, - load_plugin, -) +from mbed_os_tools.test.host_tests_plugins.module_reset_silabs import HostTestPluginResetMethod_SiLabs, load_plugin diff --git a/tools/python/mbed_host_tests/host_tests_plugins/module_reset_stlink.py b/tools/python/mbed_host_tests/host_tests_plugins/module_reset_stlink.py index a87ed479f19..88724e8136c 100644 --- a/tools/python/mbed_host_tests/host_tests_plugins/module_reset_stlink.py +++ b/tools/python/mbed_host_tests/host_tests_plugins/module_reset_stlink.py @@ -17,7 +17,4 @@ Author: Przemyslaw Wirkus """ -from mbed_os_tools.test.host_tests_plugins.module_reset_stlink import ( - HostTestPluginResetMethod_Stlink, - load_plugin, -) +from mbed_os_tools.test.host_tests_plugins.module_reset_stlink import HostTestPluginResetMethod_Stlink, load_plugin diff --git a/tools/python/mbed_host_tests/host_tests_plugins/module_reset_ublox.py b/tools/python/mbed_host_tests/host_tests_plugins/module_reset_ublox.py index 2d36c251a16..424b7ad5788 100644 --- a/tools/python/mbed_host_tests/host_tests_plugins/module_reset_ublox.py +++ b/tools/python/mbed_host_tests/host_tests_plugins/module_reset_ublox.py @@ -17,7 +17,4 @@ Author: Przemyslaw Wirkus """ -from mbed_os_tools.test.host_tests_plugins.module_reset_ublox import ( - HostTestPluginResetMethod_ublox, - load_plugin, -) +from mbed_os_tools.test.host_tests_plugins.module_reset_ublox import HostTestPluginResetMethod_ublox, load_plugin diff --git a/tools/python/mbed_host_tests/host_tests_runner/host_test.py b/tools/python/mbed_host_tests/host_tests_runner/host_test.py index ca9dcf75571..258dc03bf4e 100644 --- a/tools/python/mbed_host_tests/host_tests_runner/host_test.py +++ b/tools/python/mbed_host_tests/host_tests_runner/host_test.py @@ -17,8 +17,4 @@ Author: Przemyslaw Wirkus """ -from mbed_os_tools.test.host_tests_runner.host_test import ( - HostTestResults, - Test, - DefaultTestSelectorBase, -) +from mbed_os_tools.test.host_tests_runner.host_test import HostTestResults, Test, DefaultTestSelectorBase diff --git a/tools/python/mbed_host_tests/host_tests_runner/host_test_default.py b/tools/python/mbed_host_tests/host_tests_runner/host_test_default.py index 9fe28f511bb..f56b47905ec 100644 --- a/tools/python/mbed_host_tests/host_tests_runner/host_test_default.py +++ b/tools/python/mbed_host_tests/host_tests_runner/host_test_default.py @@ -17,6 +17,4 @@ Author: Przemyslaw Wirkus """ -from mbed_os_tools.test.host_tests_runner.host_test_default import ( - DefaultTestSelector, -) +from mbed_os_tools.test.host_tests_runner.host_test_default import DefaultTestSelector diff --git a/tools/python/mbed_host_tests/host_tests_toolbox/host_functional.py b/tools/python/mbed_host_tests/host_tests_toolbox/host_functional.py index ab700444ad8..77c05269b3f 100644 --- a/tools/python/mbed_host_tests/host_tests_toolbox/host_functional.py +++ b/tools/python/mbed_host_tests/host_tests_toolbox/host_functional.py @@ -17,8 +17,4 @@ Author: Przemyslaw Wirkus """ -from mbed_os_tools.test.host_tests_toolbox.host_functional import ( - flash_dev, - reset_dev, - handle_send_break_cmd, -) +from mbed_os_tools.test.host_tests_toolbox.host_functional import flash_dev, reset_dev, handle_send_break_cmd diff --git a/tools/python/mbed_host_tests/mbedflsh.py b/tools/python/mbed_host_tests/mbedflsh.py index a9c8792c32a..6be2b0ac840 100644 --- a/tools/python/mbed_host_tests/mbedflsh.py +++ b/tools/python/mbed_host_tests/mbedflsh.py @@ -25,41 +25,43 @@ def cmd_parser_setup(): - """! Creates simple command line parser - """ + """! Creates simple command line parser""" parser = optparse.OptionParser() - parser.add_option('-f', '--file', - dest='filename', - help='File to flash onto mbed device') - - parser.add_option("-d", "--disk", - dest="disk", - help="Target disk (mount point) path. Example: F:, /mnt/MBED", - metavar="DISK_PATH") - - copy_methods_str = "Plugin support: " + ', '.join(host_tests_plugins.get_plugin_caps('CopyMethod')) - - parser.add_option("-c", "--copy", - dest="copy_method", - default='shell', - help="Copy (flash the target) method selector. " + copy_methods_str, - metavar="COPY_METHOD") - - parser.add_option('', '--plugins', - dest='list_plugins', - default=False, - action="store_true", - help='Prints registered plugins and exits') - - parser.add_option('', '--version', - dest='version', - default=False, - action="store_true", - help='Prints package version and exits') - - parser.description = """Flash mbed devices from command line.""" \ + parser.add_option("-f", "--file", dest="filename", help="File to flash onto mbed device") + + parser.add_option( + "-d", "--disk", dest="disk", help="Target disk (mount point) path. Example: F:, /mnt/MBED", metavar="DISK_PATH" + ) + + copy_methods_str = "Plugin support: " + ", ".join(host_tests_plugins.get_plugin_caps("CopyMethod")) + + parser.add_option( + "-c", + "--copy", + dest="copy_method", + default="shell", + help="Copy (flash the target) method selector. " + copy_methods_str, + metavar="COPY_METHOD", + ) + + parser.add_option( + "", + "--plugins", + dest="list_plugins", + default=False, + action="store_true", + help="Prints registered plugins and exits", + ) + + parser.add_option( + "", "--version", dest="version", default=False, action="store_true", help="Prints package version and exits" + ) + + parser.description = ( + """Flash mbed devices from command line.""" """This module is using build in to mbed-host-tests plugins used for flashing mbed devices""" + ) parser.epilog = """Example: mbedflsh -d E: -f /path/to/file.bin""" (opts, args) = parser.parse_args() @@ -75,25 +77,25 @@ def main(): if opts.version: import pkg_resources # part of setuptools + version = pkg_resources.require("mbed-host-tests")[0].version print(version) sys.exit(0) - elif opts.list_plugins: # --plugins option + elif opts.list_plugins: # --plugins option host_tests_plugins.print_plugin_info() sys.exit(0) else: pass if opts.filename: - print("mbedflsh: opening file %s..."% opts.filename) - result = host_tests_plugins.call_plugin('CopyMethod', - opts.copy_method, - image_path=opts.filename, - destination_disk=opts.disk) + print("mbedflsh: opening file %s..." % opts.filename) + result = host_tests_plugins.call_plugin( + "CopyMethod", opts.copy_method, image_path=opts.filename, destination_disk=opts.disk + ) errorlevel_flag = result == True return errorlevel_flag -if __name__ == '__main__': +if __name__ == "__main__": exit(main()) diff --git a/tools/python/mbed_host_tests/mbedhtrun.py b/tools/python/mbed_host_tests/mbedhtrun.py index dd1f2e38564..a8576aa0fcd 100644 --- a/tools/python/mbed_host_tests/mbedhtrun.py +++ b/tools/python/mbed_host_tests/mbedhtrun.py @@ -33,8 +33,9 @@ def main(): result = 0 cli_params = init_host_test_cli_params() - if cli_params.version: # --version - import pkg_resources # part of setuptools + if cli_params.version: # --version + import pkg_resources # part of setuptools + version = pkg_resources.require("mbed-host-tests")[0].version print(version) elif cli_params.send_break_cmd: # -b with -p PORT (and optional -r RESET_TYPE) @@ -43,7 +44,7 @@ def main(): disk=cli_params.disk, reset_type=cli_params.forced_reset_type, baudrate=cli_params.baud_rate, - verbose=cli_params.verbose + verbose=cli_params.verbose, ) else: test_selector = DefaultTestSelector(cli_params) @@ -62,5 +63,5 @@ def main(): return result -if __name__ == '__main__': - exit(main()) \ No newline at end of file +if __name__ == "__main__": + exit(main()) diff --git a/tools/python/mbed_lstools/lstools_base.py b/tools/python/mbed_lstools/lstools_base.py index 04bd2e8917d..89f3c69ed29 100644 --- a/tools/python/mbed_lstools/lstools_base.py +++ b/tools/python/mbed_lstools/lstools_base.py @@ -19,7 +19,4 @@ import json import logging -from mbed_os_tools.detect.lstools_base import ( - FSInteraction, - MbedLsToolsBase, -) +from mbed_os_tools.detect.lstools_base import FSInteraction, MbedLsToolsBase diff --git a/tools/python/mbed_lstools/main.py b/tools/python/mbed_lstools/main.py index f25f98383b1..07ab9348bfc 100644 --- a/tools/python/mbed_lstools/main.py +++ b/tools/python/mbed_lstools/main.py @@ -1,4 +1,3 @@ - """ mbed SDK Copyright (c) 2011-2018 ARM Limited @@ -23,73 +22,94 @@ # Make sure that any global generic setup is run from . import lstools_base -from mbed_os_tools.detect.main import ( - create, - mbed_os_support, - mbed_lstools_os_info, - mock_platform -) +from mbed_os_tools.detect.main import create, mbed_os_support, mbed_lstools_os_info, mock_platform import logging + logger = logging.getLogger("mbedls.main") logger.addHandler(logging.NullHandler()) del logging + def get_version(): - """! Get mbed-ls Python module version string """ + """! Get mbed-ls Python module version string""" import mbed_os_tools + return mbed_os_tools.VERSION + def print_version(mbeds, args): print(get_version()) + def print_mbeds(mbeds, args, simple): devices = mbeds.list_mbeds(unique_names=True, read_details_txt=True) if devices: from prettytable import PrettyTable, HEADER - columns = ['platform_name', 'platform_name_unique', 'mount_point', - 'serial_port', 'target_id', 'daplink_version'] - columns_header = ['platform_name', 'platform_name_unique', 'mount_point', - 'serial_port', 'target_id', 'interface_version'] + + columns = [ + "platform_name", + "platform_name_unique", + "mount_point", + "serial_port", + "target_id", + "daplink_version", + ] + columns_header = [ + "platform_name", + "platform_name_unique", + "mount_point", + "serial_port", + "target_id", + "interface_version", + ] pt = PrettyTable(columns_header, junction_char="|", hrules=HEADER) - pt.align = 'l' + pt.align = "l" for d in devices: - pt.add_row([d.get(col, None) or 'unknown' for col in columns]) - print(pt.get_string(border=not simple, header=not simple, - padding_width=1, sortby='platform_name_unique')) + pt.add_row([d.get(col, None) or "unknown" for col in columns]) + print(pt.get_string(border=not simple, header=not simple, padding_width=1, sortby="platform_name_unique")) + def print_table(mbeds, args): return print_mbeds(mbeds, args, False) + def print_simple(mbeds, args): return print_mbeds(mbeds, args, True) + def list_platforms(mbeds, args): print(mbeds.list_manufacture_ids()) + def mbeds_as_json(mbeds, args): - print(json.dumps(mbeds.list_mbeds(unique_names=True, - read_details_txt=True), - indent=4, sort_keys=True)) + print(json.dumps(mbeds.list_mbeds(unique_names=True, read_details_txt=True), indent=4, sort_keys=True)) + def json_by_target_id(mbeds, args): - print(json.dumps({m['target_id']: m for m - in mbeds.list_mbeds(unique_names=True, - read_details_txt=True)}, - indent=4, sort_keys=True)) + print( + json.dumps( + {m["target_id"]: m for m in mbeds.list_mbeds(unique_names=True, read_details_txt=True)}, + indent=4, + sort_keys=True, + ) + ) + def json_platforms(mbeds, args): platforms = set() for d in mbeds.list_mbeds(): - platforms |= set([d['platform_name']]) + platforms |= set([d["platform_name"]]) print(json.dumps(list(platforms), indent=4, sort_keys=True)) + def json_platforms_ext(mbeds, args): platforms = defaultdict(lambda: 0) for d in mbeds.list_mbeds(): - platforms[d['platform_name']] += 1 + platforms[d["platform_name"]] += 1 print(json.dumps(platforms, indent=4, sort_keys=True)) + def parse_cli(to_parse): """! Parse the command line @@ -102,71 +122,108 @@ def parse_cli(to_parse): parser = argparse.ArgumentParser() parser.set_defaults(command=print_table) - commands = parser.add_argument_group('sub commands')\ - .add_mutually_exclusive_group() + commands = parser.add_argument_group("sub commands").add_mutually_exclusive_group() commands.add_argument( - '-s', '--simple', dest='command', action='store_const', + "-s", + "--simple", + dest="command", + action="store_const", const=print_simple, - help='list attached targets without column headers and borders') + help="list attached targets without column headers and borders", + ) commands.add_argument( - '-j', '--json', dest='command', action='store_const', + "-j", + "--json", + dest="command", + action="store_const", const=mbeds_as_json, - help='list attached targets with detailed information in JSON format') + help="list attached targets with detailed information in JSON format", + ) commands.add_argument( - '-J', '--json-by-target-id', dest='command', action='store_const', + "-J", + "--json-by-target-id", + dest="command", + action="store_const", const=json_by_target_id, - help='map attached targets from their target ID to their detailed ' - 'information in JSON format') + help="map attached targets from their target ID to their detailed information in JSON format", + ) commands.add_argument( - '-p', '--json-platforms', dest='command', action='store_const', + "-p", + "--json-platforms", + dest="command", + action="store_const", const=json_platforms, - help='list attached platform names in JSON format.') + help="list attached platform names in JSON format.", + ) commands.add_argument( - '-P', '--json-platforms-ext', dest='command', action='store_const', + "-P", + "--json-platforms-ext", + dest="command", + action="store_const", const=json_platforms_ext, - help='map attached platform names to the number of attached boards in ' - 'JSON format') + help="map attached platform names to the number of attached boards in JSON format", + ) commands.add_argument( - '-l', '--list', dest='command', action='store_const', + "-l", + "--list", + dest="command", + action="store_const", const=list_platforms, - help='list all target IDs and their corresponding platform names ' - 'understood by mbed-ls') + help="list all target IDs and their corresponding platform names understood by mbed-ls", + ) commands.add_argument( - '--version', dest='command', action='store_const', const=print_version, - help='print package version and exit') + "--version", dest="command", action="store_const", const=print_version, help="print package version and exit" + ) commands.add_argument( - '-m', '--mock', metavar='ID:NAME', - help='substitute or create a target ID to platform name mapping used' - 'when invoking mbedls in the current directory') + "-m", + "--mock", + metavar="ID:NAME", + help="substitute or create a target ID to platform name mapping used" + "when invoking mbedls in the current directory", + ) parser.add_argument( - '--skip-retarget', dest='skip_retarget', default=False, + "--skip-retarget", + dest="skip_retarget", + default=False, action="store_true", - help='skip parsing and interpretation of the re-target file,' - ' `./mbedls.json`') + help="skip parsing and interpretation of the re-target file, `./mbedls.json`", + ) parser.add_argument( - '-u', '--list-unmounted', dest='list_unmounted', default=False, - action='store_true', - help='list mbeds, regardless of whether they are mounted or not') + "-u", + "--list-unmounted", + dest="list_unmounted", + default=False, + action="store_true", + help="list mbeds, regardless of whether they are mounted or not", + ) parser.add_argument( - '-d', '--debug', dest='debug', default=False, action="store_true", - help='outputs extra debug information useful when creating issues!') + "-d", + "--debug", + dest="debug", + default=False, + action="store_true", + help="outputs extra debug information useful when creating issues!", + ) args = parser.parse_args(to_parse) if args.mock: args.command = mock_platform return args + def start_logging(): try: import colorlog - colorlog.basicConfig( - format='%(log_color)s%(levelname)s%(reset)s:%(name)s:%(message)s') + + colorlog.basicConfig(format="%(log_color)s%(levelname)s%(reset)s:%(name)s:%(message)s") except ImportError: import logging + logging.basicConfig() del logging + def mbedls_main(): """! Function used to drive CLI (command line interface) application @return Function exits with success code @@ -176,6 +233,7 @@ def mbedls_main(): args = parse_cli(sys.argv[1:]) import logging + root_logger = logging.getLogger("mbedls") if args.debug: root_logger.setLevel(logging.DEBUG) @@ -183,14 +241,14 @@ def mbedls_main(): root_logger.setLevel(logging.INFO) del logging logger.debug("mbed-ls ver. %s", get_version()) - logger.debug("host: %s", str(mbed_lstools_os_info())) + logger.debug("host: %s", str(mbed_lstools_os_info())) - mbeds = create(skip_retarget=args.skip_retarget, - list_unmounted=args.list_unmounted, - force_mock=args.command is mock_platform) + mbeds = create( + skip_retarget=args.skip_retarget, list_unmounted=args.list_unmounted, force_mock=args.command is mock_platform + ) if mbeds is None: - logger.critical('This platform is not supported! Pull requests welcome at github.com/ARMmbed/mbed-ls') + logger.critical("This platform is not supported! Pull requests welcome at github.com/ARMmbed/mbed-ls") sys.exit(1) ret_code = args.command(mbeds, args) diff --git a/tools/python/mbed_lstools/platform_database.py b/tools/python/mbed_lstools/platform_database.py index 335dfb9d46f..e34439c4c74 100644 --- a/tools/python/mbed_lstools/platform_database.py +++ b/tools/python/mbed_lstools/platform_database.py @@ -21,7 +21,7 @@ LOCAL_PLATFORM_DATABASE, LOCAL_MOCKS_DATABASE, DEFAULT_PLATFORM_DB, - PlatformDatabase + PlatformDatabase, ) """ diff --git a/tools/python/mbed_lstools/windows.py b/tools/python/mbed_lstools/windows.py index 521ed09a0bf..3e4028de1aa 100644 --- a/tools/python/mbed_lstools/windows.py +++ b/tools/python/mbed_lstools/windows.py @@ -15,7 +15,4 @@ limitations under the License. """ -from mbed_os_tools.detect.windows import ( - MbedLsToolsWin7, - CompatibleIDsNotFoundException -) +from mbed_os_tools.detect.windows import MbedLsToolsWin7, CompatibleIDsNotFoundException diff --git a/tools/python/mbed_os_tools/detect/darwin.py b/tools/python/mbed_os_tools/detect/darwin.py index 701ac5cbf8b..b0d9b766111 100644 --- a/tools/python/mbed_os_tools/detect/darwin.py +++ b/tools/python/mbed_os_tools/detect/darwin.py @@ -49,19 +49,19 @@ def _plist_from_popen(popen): except ExpatError: # Beautiful soup ensures the XML is properly formed after it is parsed # so that it can be used by other less lenient commands without problems - xml_representation = BeautifulSoup(out.decode('utf8'), 'xml') + xml_representation = BeautifulSoup(out.decode("utf8"), "xml") if not xml_representation.get_text(): # The output is not in the XML format return loads(out) - return loads(xml_representation.decode().encode('utf8')) + return loads(xml_representation.decode().encode("utf8")) except ExpatError: return [] def _find_TTY(obj): - """ Find the first tty (AKA IODialinDevice) that we can find in the - children of the specified object, or None if no tty is present. + """Find the first tty (AKA IODialinDevice) that we can find in the + children of the specified object, or None if no tty is present. """ try: return obj["IODialinDevice"] @@ -74,16 +74,12 @@ def _find_TTY(obj): def _prune(current, keys): - """ Reduce the amount of data we have to sift through to only - include the specified keys, and children that contain the - specified keys + """Reduce the amount of data we have to sift through to only + include the specified keys, and children that contain the + specified keys """ pruned_current = {k: current[k] for k in keys if k in current} - pruned_children = list( - filter( - None, [_prune(c, keys) for c in current.get("IORegistryEntryChildren", [])] - ) - ) + pruned_children = list(filter(None, [_prune(c, keys) for c in current.get("IORegistryEntryChildren", [])])) keep_current = any(k in current for k in keys) or pruned_children if keep_current: if pruned_children: @@ -94,14 +90,14 @@ def _prune(current, keys): def _dfs_usb_info(obj, parents): - """ Find all of the usb info that we can from this particular IORegistry - tree with depth first search (and searching the parent stack....) + """Find all of the usb info that we can from this particular IORegistry + tree with depth first search (and searching the parent stack....) """ output = {} if ( - "BSD Name" in obj - and obj["BSD Name"].startswith("disk") - and mbed_volume_name_match.search(obj["IORegistryEntryName"]) + "BSD Name" in obj + and obj["BSD Name"].startswith("disk") + and mbed_volume_name_match.search(obj["IORegistryEntryName"]) ): disk_id = obj["BSD Name"] usb_info = {"serial": None, "vendor_id": None, "product_id": None, "tty": None} @@ -123,8 +119,7 @@ def _dfs_usb_info(obj, parents): class MbedLsToolsDarwin(MbedLsToolsBase): - """ mbed-enabled platform detection on Mac OS X - """ + """mbed-enabled platform detection on Mac OS X""" def __init__(self, **kwargs): MbedLsToolsBase.__init__(self, **kwargs) @@ -149,25 +144,18 @@ def find_candidates(self): ] def _mount_points(self): - """ Returns map {volume_id: mount_point} """ - diskutil_ls = subprocess.Popen( - ["diskutil", "list", "-plist"], stdout=subprocess.PIPE - ) + """Returns map {volume_id: mount_point}""" + diskutil_ls = subprocess.Popen(["diskutil", "list", "-plist"], stdout=subprocess.PIPE) disks = _plist_from_popen(diskutil_ls) if logger.isEnabledFor(DEBUG): import pprint - logger.debug( - "disks dict \n%s", pprint.PrettyPrinter(indent=2).pformat(disks) - ) - return { - disk["DeviceIdentifier"]: disk.get("MountPoint", None) - for disk in disks["AllDisksAndPartitions"] - } + logger.debug("disks dict \n%s", pprint.PrettyPrinter(indent=2).pformat(disks)) + return {disk["DeviceIdentifier"]: disk.get("MountPoint", None) for disk in disks["AllDisksAndPartitions"]} def _volumes(self): - """ returns a map {volume_id: {serial:, vendor_id:, product_id:, tty:}""" + """returns a map {volume_id: {serial:, vendor_id:, product_id:, tty:}""" # to find all the possible mbed volumes, we look for registry entries # under all possible USB tree which have a "BSD Name" that starts with @@ -177,13 +165,7 @@ def _volumes(self): # serial number, and then search down again to find a tty that's part # of the same composite device # ioreg -a -r -n -l - usb_controllers = [ - "AppleUSBXHCI", - "AppleUSBUHCI", - "AppleUSBEHCI", - "AppleUSBOHCI", - "IOUSBHostDevice", - ] + usb_controllers = ["AppleUSBXHCI", "AppleUSBUHCI", "AppleUSBEHCI", "AppleUSBOHCI", "IOUSBHostDevice"] cmp_par = "-n" # For El Captain we need to list all the instances of (-c) rather than @@ -193,33 +175,19 @@ def _volumes(self): usb_tree = [] for usb_controller in usb_controllers: - ioreg_usb = subprocess.Popen( - ["ioreg", "-a", "-r", cmp_par, usb_controller, "-l"], - stdout=subprocess.PIPE, - ) + ioreg_usb = subprocess.Popen(["ioreg", "-a", "-r", cmp_par, usb_controller, "-l"], stdout=subprocess.PIPE) usb_tree.extend(_plist_from_popen(ioreg_usb)) r = {} for name, obj in enumerate(usb_tree): pruned_obj = _prune( - obj, - [ - "USB Serial Number", - "idVendor", - "BSD Name", - "IORegistryEntryName", - "idProduct", - "IODialinDevice", - ], + obj, ["USB Serial Number", "idVendor", "BSD Name", "IORegistryEntryName", "idProduct", "IODialinDevice"] ) if logger.isEnabledFor(DEBUG): import pprint - logger.debug( - "finding in \n%s", - pprint.PrettyPrinter(indent=2).pformat(pruned_obj), - ) + logger.debug("finding in \n%s", pprint.PrettyPrinter(indent=2).pformat(pruned_obj)) r.update(_dfs_usb_info(pruned_obj, [])) logger.debug("_volumes return %r", r) diff --git a/tools/python/mbed_os_tools/detect/linux.py b/tools/python/mbed_os_tools/detect/linux.py index 2800c4501c7..aee97298c1c 100644 --- a/tools/python/mbed_os_tools/detect/linux.py +++ b/tools/python/mbed_os_tools/detect/linux.py @@ -36,12 +36,10 @@ def _readlink(link): class MbedLsToolsLinuxGeneric(MbedLsToolsBase): - """ mbed-enabled platform for Linux with udev - """ + """mbed-enabled platform for Linux with udev""" def __init__(self, **kwargs): - """! ctor - """ + """! ctor""" MbedLsToolsBase.__init__(self, **kwargs) self.nlp = re.compile(r"(pci|usb)-[0-9a-zA-Z:_-]*_(?P[0-9a-zA-Z]*)-.*$") self.mmp = re.compile(r"(?P(/[^/ ]*)+) on (?P(/[^/ ]*)+) ") @@ -73,9 +71,7 @@ def _dev_by_id(self, device_type): """ dir = os.path.join("/dev", device_type, "by-id") if os.path.isdir(dir): - to_ret = dict( - self._hex_ids([os.path.join(dir, f) for f in os.listdir(dir)]) - ) + to_ret = dict(self._hex_ids([os.path.join(dir, f) for f in os.listdir(dir)])) logger.debug("Found %s devices by id %r", device_type, to_ret) return to_ret else: @@ -133,15 +129,11 @@ def _sysfs_block_devices(self, block_devices): end_index = index if end_index is None: - logger.debug( - "Did not find suitable usb folder for usb info: %s", full_sysfs_path - ) + logger.debug("Did not find suitable usb folder for usb info: %s", full_sysfs_path) continue usb_info_rel_path = path_parts[: end_index + 1] - usb_info_path = os.path.join( - SYSFS_BLOCK_DEVICE_PATH, os.sep.join(usb_info_rel_path) - ) + usb_info_path = os.path.join(SYSFS_BLOCK_DEVICE_PATH, os.sep.join(usb_info_rel_path)) vendor_id = None product_id = None @@ -153,25 +145,14 @@ def _sysfs_block_devices(self, block_devices): with open(vendor_id_file_paths, "r") as vendor_file: vendor_id = vendor_file.read().strip() except OSError as e: - logger.debug( - "Failed to read vendor id file %s weith error:", - vendor_id_file_paths, - e, - ) + logger.debug("Failed to read vendor id file %s weith error:", vendor_id_file_paths, e) try: with open(product_id_file_paths, "r") as product_file: product_id = product_file.read().strip() except OSError as e: - logger.debug( - "Failed to read product id file %s weith error:", - product_id_file_paths, - e, - ) - - result[device_names[common_device_name]] = { - "vendor_id": vendor_id, - "product_id": product_id, - } + logger.debug("Failed to read product id file %s weith error:", product_id_file_paths, e) + + result[device_names[common_device_name]] = {"vendor_id": vendor_id, "product_id": product_id} return result diff --git a/tools/python/mbed_os_tools/detect/lstools_base.py b/tools/python/mbed_os_tools/detect/lstools_base.py index 6fdb6a0995a..6681e9b6e3e 100644 --- a/tools/python/mbed_os_tools/detect/lstools_base.py +++ b/tools/python/mbed_os_tools/detect/lstools_base.py @@ -23,11 +23,7 @@ import functools import json -from .platform_database import ( - PlatformDatabase, - LOCAL_PLATFORM_DATABASE, - LOCAL_MOCKS_DATABASE, -) +from .platform_database import PlatformDatabase, LOCAL_PLATFORM_DATABASE, LOCAL_MOCKS_DATABASE mbedls_root_logger = logging.getLogger("mbedls") mbedls_root_logger.setLevel(logging.WARNING) @@ -57,7 +53,7 @@ class FSInteraction(object): class MbedLsToolsBase(ABC): - """ Base class for mbed-lstools, defines mbed-ls tools interface for + """Base class for mbed-lstools, defines mbed-ls tools interface for mbed-enabled devices detection for various hosts """ @@ -72,22 +68,14 @@ class MbedLsToolsBase(ABC): DETAILS_TXT_NAME = "DETAILS.TXT" MBED_HTM_NAME = "mbed.htm" - VENDOR_ID_DEVICE_TYPE_MAP = { - "0483": "stlink", - "0d28": "daplink", - "1366": "jlink", - "03eb": "atmel", - } + VENDOR_ID_DEVICE_TYPE_MAP = {"0483": "stlink", "0d28": "daplink", "1366": "jlink", "03eb": "atmel"} def __init__(self, list_unmounted=False, **kwargs): - """ ctor - """ + """ctor""" self.retarget_data = {} # Used to retarget mbed-enabled platform properties platform_dbs = [] - if isfile(self.MOCK_FILE_NAME) or ( - "force_mock" in kwargs and kwargs["force_mock"] - ): + if isfile(self.MOCK_FILE_NAME) or ("force_mock" in kwargs and kwargs["force_mock"]): platform_dbs.append(self.MOCK_FILE_NAME) elif isfile(LOCAL_MOCKS_DATABASE): platform_dbs.append(LOCAL_MOCKS_DATABASE) @@ -115,7 +103,7 @@ def list_mbeds( unique_names=False, read_details_txt=False, ): - """ List details of connected devices + """List details of connected devices @return Returns list of structures with detailed info about each mbed @param fs_interaction A member of the FSInteraction class that picks the trade of between quality of service and speed @@ -137,8 +125,7 @@ def list_mbeds( for device in candidates: device["device_type"] = self._detect_device_type(device) if ( - not device["mount_point"] - or not self.mount_point_ready(device["mount_point"]) + not device["mount_point"] or not self.mount_point_ready(device["mount_point"]) ) and not self.list_unmounted: if device["target_id_usb_id"] and device["serial_port"]: logger.warning( @@ -148,9 +135,7 @@ def list_mbeds( ) else: platform_data = self.plat_db.get( - device["target_id_usb_id"][0:4], - device_type=device["device_type"] or "daplink", - verbose_data=True, + device["target_id_usb_id"][0:4], device_type=device["device_type"] or "daplink", verbose_data=True ) device.update(platform_data or {"platform_name": None}) maybe_device = { @@ -158,32 +143,23 @@ def list_mbeds( FSInteraction.AfterFilter: self._fs_after_id_check, FSInteraction.Never: self._fs_never, }[fs_interaction](device, filter_function, read_details_txt) - if maybe_device and ( - maybe_device["mount_point"] or self.list_unmounted - ): + if maybe_device and (maybe_device["mount_point"] or self.list_unmounted): if unique_names: name = device["platform_name"] platform_count.setdefault(name, -1) platform_count[name] += 1 - device["platform_name_unique"] = "%s[%d]" % ( - name, - platform_count[name], - ) + device["platform_name_unique"] = "%s[%d]" % (name, platform_count[name]) try: device.update(self.retarget_data[device["target_id"]]) logger.debug( - "retargeting %s with %r", - device["target_id"], - self.retarget_data[device["target_id"]], + "retargeting %s with %r", device["target_id"], self.retarget_data[device["target_id"]] ) except KeyError: pass # This is done for API compatibility, would prefer for this to # just be None - device["device_type"] = ( - device["device_type"] if device["device_type"] else "unknown" - ) + device["device_type"] = device["device_type"] if device["device_type"] else "unknown" result.append(maybe_device) return result @@ -222,10 +198,10 @@ def _fs_after_id_check(self, device, filter_function, read_details_txt): return None def _update_device_from_fs(self, device, read_details_txt): - """ Updates the device information based on files from its 'mount_point' - @param device Dictionary containing device information - @param read_details_txt A boolean controlling the presense of the - output dict attributes read from other files present on the 'mount_point' + """Updates the device information based on files from its 'mount_point' + @param device Dictionary containing device information + @param read_details_txt A boolean controlling the presense of the + output dict attributes read from other files present on the 'mount_point' """ if not device.get("mount_point", None): return @@ -247,23 +223,22 @@ def _update_device_from_fs(self, device, read_details_txt): except (OSError, IOError) as e: logger.warning( - 'Marking device with mount point "%s" as unmounted due to the ' - "following error: %s", + 'Marking device with mount point "%s" as unmounted due to the following error: %s', device["mount_point"], e, ) device["mount_point"] = None def _detect_device_type(self, device): - """ Returns a string of the device type - @param device Dictionary containing device information - @return Device type located in VENDOR_ID_DEVICE_TYPE_MAP or None if unknown + """Returns a string of the device type + @param device Dictionary containing device information + @return Device type located in VENDOR_ID_DEVICE_TYPE_MAP or None if unknown """ return self.VENDOR_ID_DEVICE_TYPE_MAP.get(device.get("vendor_id")) def _update_device_details_daplink_compatible(self, device, read_details_txt): - """ Updates the daplink-specific device information based on files from its + """Updates the daplink-specific device information based on files from its 'mount_point' @param device Dictionary containing device information @param read_details_txt A boolean controlling the presense of the @@ -273,23 +248,12 @@ def _update_device_details_daplink_compatible(self, device, read_details_txt): if self.MBED_HTM_NAME.lower() in lowercase_directory_entries: self._update_device_from_htm(device) elif not read_details_txt: - logger.debug( - "Since mbed.htm is not present, attempting to use " - "details.txt for the target id" - ) + logger.debug("Since mbed.htm is not present, attempting to use details.txt for the target id") read_details_txt = True - if ( - read_details_txt - and self.DETAILS_TXT_NAME.lower() in lowercase_directory_entries - ): + if read_details_txt and self.DETAILS_TXT_NAME.lower() in lowercase_directory_entries: details_txt = self._details_txt(device["mount_point"]) or {} - device.update( - { - "daplink_%s" % f.lower().replace(" ", "_"): v - for f, v in details_txt.items() - } - ) + device.update({"daplink_%s" % f.lower().replace(" ", "_"): v for f, v in details_txt.items()}) # If details.txt contains the target id, this is the most trusted source if device.get("daplink_unique_id", None): @@ -297,21 +261,17 @@ def _update_device_details_daplink_compatible(self, device, read_details_txt): if device["target_id"]: identifier = device["target_id"][0:4] - platform_data = self.plat_db.get( - identifier, device_type="daplink", verbose_data=True - ) + platform_data = self.plat_db.get(identifier, device_type="daplink", verbose_data=True) if not platform_data: - logger.warning( - 'daplink entry: "%s" not found in platform database', identifier - ) + logger.warning('daplink entry: "%s" not found in platform database', identifier) else: device.update(platform_data) else: device["platform_name"] = None def _update_device_details_jlink(self, device, _): - """ Updates the jlink-specific device information based on files from its 'mount_point' - @param device Dictionary containing device information + """Updates the jlink-specific device information based on files from its 'mount_point' + @param device Dictionary containing device information """ lower_case_map = {e.lower(): e for e in device["directory_entries"]} @@ -332,13 +292,9 @@ def _update_device_details_jlink(self, device, _): if m: device["url"] = m.group(1).strip() identifier = device["url"].split("/")[-1] - platform_data = self.plat_db.get( - identifier, device_type="jlink", verbose_data=True - ) + platform_data = self.plat_db.get(identifier, device_type="jlink", verbose_data=True) if not platform_data: - logger.warning( - 'jlink entry: "%s", not found in platform database', identifier - ) + logger.warning('jlink entry: "%s", not found in platform database', identifier) else: device.update(platform_data) break @@ -349,32 +305,20 @@ def _update_device_from_htm(self, device): """ htm_target_id, daplink_info = self._read_htm_ids(device["mount_point"]) if daplink_info: - device.update( - { - "daplink_%s" % f.lower().replace(" ", "_"): v - for f, v in daplink_info.items() - } - ) + device.update({"daplink_%s" % f.lower().replace(" ", "_"): v for f, v in daplink_info.items()}) if htm_target_id: - logger.debug( - "Found htm target id, %s, for usb target id %s", - htm_target_id, - device["target_id_usb_id"], - ) + logger.debug("Found htm target id, %s, for usb target id %s", htm_target_id, device["target_id_usb_id"]) device["target_id"] = htm_target_id else: - logger.debug( - "Could not read htm on from usb id %s. Falling back to usb id", - device["target_id_usb_id"], - ) + logger.debug("Could not read htm on from usb id %s. Falling back to usb id", device["target_id_usb_id"]) device["target_id"] = device["target_id_usb_id"] device["target_id_mbed_htm"] = htm_target_id def _update_device_details_atmel(self, device, _): - """ Updates the Atmel device information based on files from its 'mount_point' - @param device Dictionary containing device information - @param read_details_txt A boolean controlling the presense of the - output dict attributes read from other files present on the 'mount_point' + """Updates the Atmel device information based on files from its 'mount_point' + @param device Dictionary containing device information + @param read_details_txt A boolean controlling the presense of the + output dict attributes read from other files present on the 'mount_point' """ # Atmel uses a system similar to DAPLink, but there's no details.txt with a @@ -384,9 +328,7 @@ def _update_device_details_atmel(self, device, _): # to redirect to platform page device["target_id"] = device["target_id_usb_id"][4:8] - platform_data = self.plat_db.get( - device["target_id"], device_type="atmel", verbose_data=True - ) + platform_data = self.plat_db.get(device["target_id"], device_type="atmel", verbose_data=True) device.update(platform_data or {"platform_name": None}) @@ -427,7 +369,7 @@ def retarget(self): return self.retarget_data def get_dummy_platform(self, platform_name): - """! Returns simple dummy platform """ + """! Returns simple dummy platform""" if not hasattr(self, "dummy_counter"): self.dummy_counter = {} # platform: counter @@ -436,8 +378,7 @@ def get_dummy_platform(self, platform_name): platform = { "platform_name": platform_name, - "platform_name_unique": "%s[%d]" - % (platform_name, self.dummy_counter[platform_name]), + "platform_name_unique": "%s[%d]" % (platform_name, self.dummy_counter[platform_name]), "mount_point": "DUMMY", "serial_port": "DUMMY", "target_id": "DUMMY", @@ -514,25 +455,25 @@ def _htm_lines(self, mount_point): def _details_txt(self, mount_point): """! Load DETAILS.TXT to dictionary: - DETAILS.TXT example: - Version: 0226 - Build: Aug 24 2015 17:06:30 - Git Commit SHA: 27a236b9fe39c674a703c5c89655fbd26b8e27e1 - Git Local mods: Yes - - or: - - # DAPLink Firmware - see https://mbed.com/daplink - Unique ID: 0240000029164e45002f0012706e0006f301000097969900 - HIF ID: 97969900 - Auto Reset: 0 - Automation allowed: 0 - Daplink Mode: Interface - Interface Version: 0240 - Git SHA: c765cbb590f57598756683254ca38b211693ae5e - Local Mods: 0 - USB Interfaces: MSD, CDC, HID - Interface CRC: 0x26764ebf + DETAILS.TXT example: + Version: 0226 + Build: Aug 24 2015 17:06:30 + Git Commit SHA: 27a236b9fe39c674a703c5c89655fbd26b8e27e1 + Git Local mods: Yes + + or: + + # DAPLink Firmware - see https://mbed.com/daplink + Unique ID: 0240000029164e45002f0012706e0006f301000097969900 + HIF ID: 97969900 + Auto Reset: 0 + Automation allowed: 0 + Daplink Mode: Interface + Interface Version: 0240 + Git SHA: c765cbb590f57598756683254ca38b211693ae5e + Local Mods: 0 + USB Interfaces: MSD, CDC, HID + Interface CRC: 0x26764ebf """ if mount_point: @@ -572,8 +513,7 @@ def _target_id_from_htm(self, line): return None def mount_point_ready(self, path): - """! Check if a mount point is ready for file operations - """ + """! Check if a mount point is ready for file operations""" return exists(path) and isdir(path) @staticmethod @@ -602,10 +542,7 @@ def list_mbeds_ext(self): return self.list_mbeds(unique_names=True, read_details_txt=True) - @deprecated( - "List formatting methods are deprecated for a simpler API. " - "Please use 'list_mbeds' instead." - ) + @deprecated("List formatting methods are deprecated for a simpler API. Please use 'list_mbeds' instead.") def list_manufacture_ids(self): """! Creates list of all available mappings for target_id -> Platform @return String with table formatted output @@ -622,10 +559,7 @@ def list_manufacture_ids(self): return pt.get_string() - @deprecated( - "List formatting methods are deprecated to simplify the API. " - "Please use 'list_mbeds' instead." - ) + @deprecated("List formatting methods are deprecated to simplify the API. Please use 'list_mbeds' instead.") def list_platforms(self): """! Useful if you just want to know which platforms are currently available on the system @@ -639,10 +573,7 @@ def list_platforms(self): result.append(platform_name) return result - @deprecated( - "List formatting methods are deprecated to simplify the API. " - "Please use 'list_mbeds' instead." - ) + @deprecated("List formatting methods are deprecated to simplify the API. Please use 'list_mbeds' instead.") def list_platforms_ext(self): """! Useful if you just want to know how many platforms of each type are currently available on the system @@ -658,10 +589,7 @@ def list_platforms_ext(self): result[platform_name] += 1 return result - @deprecated( - "List formatting methods are deprecated to simplify the API. " - "Please use 'list_mbeds' instead." - ) + @deprecated("List formatting methods are deprecated to simplify the API. Please use 'list_mbeds' instead.") def list_mbeds_by_targetid(self): """! Get information about mbeds with extended parameters/info included @return Returns dictionary where keys are TargetIDs and values are mbed @@ -675,13 +603,8 @@ def list_mbeds_by_targetid(self): result[target_id] = mbed return result - @deprecated( - "List formatting methods are deprecated to simplify the API. " - "Please use 'list_mbeds' instead." - ) - def get_string( - self, border=False, header=True, padding_width=1, sortby="platform_name" - ): + @deprecated("List formatting methods are deprecated to simplify the API. Please use 'list_mbeds' instead.") + def get_string(self, border=False, header=True, padding_width=1, sortby="platform_name"): """! Printing with some sql table like decorators @param border Table border visibility @param header Table header visibility @@ -716,17 +639,12 @@ def get_string( for col in columns: row.append(mbed[col] if col in mbed and mbed[col] else "unknown") pt.add_row(row) - result = pt.get_string( - border=border, header=header, padding_width=padding_width, sortby=sortby - ) + result = pt.get_string(border=border, header=header, padding_width=padding_width, sortby=sortby) return result # Private functions supporting API - @deprecated( - "This method will be removed from the public API. " - "Please use 'list_mbeds' instead" - ) + @deprecated("This method will be removed from the public API. Please use 'list_mbeds' instead") def get_json_data_from_file(self, json_spec_filename, verbose=False): """! Loads from file JSON formatted string to data structure @return None if JSON can be loaded @@ -736,69 +654,43 @@ def get_json_data_from_file(self, json_spec_filename, verbose=False): try: return json.load(data_file) except ValueError as json_error_msg: - logger.error( - "Parsing file(%s): %s", json_spec_filename, json_error_msg - ) + logger.error("Parsing file(%s): %s", json_spec_filename, json_error_msg) return None except IOError as fileopen_error_msg: logger.warning(fileopen_error_msg) return None - @deprecated( - "This method will be removed from the public API. " - "Please use 'list_mbeds' instead" - ) + @deprecated("This method will be removed from the public API. Please use 'list_mbeds' instead") def get_htm_target_id(self, mount_point): target_id, _ = self._read_htm_ids(mount_point) return target_id - @deprecated( - "This method will be removed from the public API. " - "Please use 'list_mbeds' instead" - ) + @deprecated("This method will be removed from the public API. Please use 'list_mbeds' instead") def get_mbed_htm(self, mount_point): _, build_info = self._read_htm_ids(mount_point) return build_info - @deprecated( - "This method will be removed from the public API. " - "Please use 'list_mbeds' instead" - ) + @deprecated("This method will be removed from the public API. Please use 'list_mbeds' instead") def get_mbed_htm_comment_section_ver_build(self, line): return self._mbed_htm_comment_section_ver_build(line) - @deprecated( - "This method will be removed from the public API. " - "Please use 'list_mbeds' instead" - ) + @deprecated("This method will be removed from the public API. Please use 'list_mbeds' instead") def get_mbed_htm_lines(self, mount_point): return self._htm_lines(mount_point) - @deprecated( - "This method will be removed from the public API. " - "Please use 'list_mbeds' instead" - ) + @deprecated("This method will be removed from the public API. Please use 'list_mbeds' instead") def get_details_txt(self, mount_point): return self._details_txt(mount_point) - @deprecated( - "This method will be removed from the public API. " - "Please use 'list_mbeds' instead" - ) + @deprecated("This method will be removed from the public API. Please use 'list_mbeds' instead") def parse_details_txt(self, lines): return self._parse_details(lines) - @deprecated( - "This method will be removed from the public API. " - "Please use 'list_mbeds' instead" - ) + @deprecated("This method will be removed from the public API. Please use 'list_mbeds' instead") def scan_html_line_for_target_id(self, line): return self._target_id_from_htm(line) @staticmethod - @deprecated( - "This method will be removed from the public API. " - "Please use 'list_mbeds' instead" - ) + @deprecated("This method will be removed from the public API. Please use 'list_mbeds' instead") def run_cli_process(cmd, shell=True): return MbedLsToolsBase._run_cli_process(cmd, shell) diff --git a/tools/python/mbed_os_tools/detect/main.py b/tools/python/mbed_os_tools/detect/main.py index 031205198f3..4133c56e3c8 100644 --- a/tools/python/mbed_os_tools/detect/main.py +++ b/tools/python/mbed_os_tools/detect/main.py @@ -1,4 +1,3 @@ - # Copyright (c) 2018, Arm Limited and affiliates. # SPDX-License-Identifier: Apache-2.0 # @@ -76,13 +75,7 @@ def mbed_lstools_os_info(): @return Returns tuple with information about OS and host platform """ - result = ( - os.name, - platform.system(), - platform.release(), - platform.version(), - sys.platform, - ) + result = (os.name, platform.system(), platform.release(), platform.version(), sys.platform) return result diff --git a/tools/python/mbed_os_tools/detect/platform_database.py b/tools/python/mbed_os_tools/detect/platform_database.py index c2170ca7f7f..7040723cf91 100644 --- a/tools/python/mbed_os_tools/detect/platform_database.py +++ b/tools/python/mbed_os_tools/detect/platform_database.py @@ -41,353 +41,338 @@ LOCAL_MOCKS_DATABASE = join(user_data_dir("mbedls"), "mock.json") DEFAULT_PLATFORM_DB = { - u"daplink": { - u"0200": u"KL25Z", - u"0201": u"KW41Z", - u"0210": u"KL05Z", - u"0214": u"HEXIWEAR", - u"0217": u"K82F", - u"0218": u"KL82Z", - u"0220": u"KL46Z", - u"0227": u"MIMXRT1050_EVK", - u"0228": u"RAPIDIOT_K64F", - u"0230": u"K20D50M", - u"0231": u"K22F", - u"0234": u"RAPIDIOT_KW41Z", - u"0236": u"LPC55S69", - u"0240": u"K64F", - u"0245": u"K64F", - u"0250": u"KW24D", - u"0261": u"KL27Z", - u"0262": u"KL43Z", - u"0300": u"MTS_GAMBIT", - u"0305": u"MTS_MDOT_F405RG", - u"0310": u"MTS_DRAGONFLY_F411RE", - u"0311": u"K66F", - u"0312": u"MTS_DRAGONFLY_L471QG", - u"0313": u"MTS_DRAGONFLY_L496VG", - u"0315": u"MTS_MDOT_F411RE", - u"0316": u"MTS_DRAGONFLY_F413RH", - u"0350": u"XDOT_L151CC", - u"0360": u"HANI_IOT", - u"0400": u"MAXWSNENV", - u"0405": u"MAX32600MBED", - u"0407": u"MAX32620HSP", - u"0408": u"MAX32625NEXPAQ", - u"0409": u"MAX32630FTHR", - u"0410": u"ETTEPLAN_LORA", - u"0415": u"MAX32625MBED", - u"0416": u"MAX32625PICO", - u"0418": u"MAX32620FTHR", - u"0419": u"MAX35103EVKIT2", - u"0421": u"MAX32660EVSYS", - u"0424": u"MAX32670EVKIT", - u"0450": u"MTB_UBLOX_ODIN_W2", - u"0451": u"MTB_MXCHIP_EMW3166", - u"0452": u"MTB_LAIRD_BL600", - u"0453": u"MTB_MTS_XDOT", - u"0454": u"MTB_MTS_DRAGONFLY", - u"0455": u"MTB_UBLOX_NINA_B1", - u"0456": u"MTB_MURATA_ABZ", - u"0457": u"MTB_RAK811", - u"0458": u"ADV_WISE_1510", - u"0459": u"ADV_WISE_1530", - u"0460": u"ADV_WISE_1570", - u"0461": u"MTB_LAIRD_BL652", - u"0462": u"MTB_USI_WM_BN_BM_22", - u"0465": u"MTB_LAIRD_BL654", - u"0466": u"MTB_MURATA_WSM_BL241", - u"0467": u"MTB_STM_S2LP", - u"0468": u"MTB_STM_L475", - u"0469": u"MTB_STM32_F439", - u"0472": u"MTB_ACONNO_ACN52832", - u"0602": u"EV_COG_AD3029LZ", - u"0603": u"EV_COG_AD4050LZ", - u"0604": u"SDP_K1", - u"0700": u"NUCLEO_F103RB", - u"0705": u"NUCLEO_F302R8", - u"0710": u"NUCLEO_L152RE", - u"0715": u"NUCLEO_L053R8", - u"0720": u"NUCLEO_F401RE", - u"0725": u"NUCLEO_F030R8", - u"0729": u"NUCLEO_G071RB", - u"0730": u"NUCLEO_F072RB", - u"0735": u"NUCLEO_F334R8", - u"0740": u"NUCLEO_F411RE", - u"0742": u"NUCLEO_F413ZH", - u"0743": u"DISCO_F413ZH", - u"0744": u"NUCLEO_F410RB", - u"0745": u"NUCLEO_F303RE", - u"0746": u"DISCO_F303VC", - u"0747": u"NUCLEO_F303ZE", - u"0750": u"NUCLEO_F091RC", - u"0755": u"NUCLEO_F070RB", - u"0760": u"NUCLEO_L073RZ", - u"0764": u"DISCO_L475VG_IOT01A", - u"0765": u"NUCLEO_L476RG", - u"0766": u"SILICA_SENSOR_NODE", - u"0770": u"NUCLEO_L432KC", - u"0774": u"DISCO_L4R9I", - u"0775": u"NUCLEO_F303K8", - u"0776": u"NUCLEO_L4R5ZI", - u"0777": u"NUCLEO_F446RE", - u"0778": u"NUCLEO_F446ZE", - u"0779": u"NUCLEO_L433RC_P", - u"0780": u"NUCLEO_L011K4", - u"0781": u"NUCLEO_L4R5ZI_P", - u"0783": u"NUCLEO_L010RB", - u"0785": u"NUCLEO_F042K6", - u"0788": u"DISCO_F469NI", - u"0790": u"NUCLEO_L031K6", - u"0791": u"NUCLEO_F031K6", - u"0795": u"DISCO_F429ZI", - u"0796": u"NUCLEO_F429ZI", - u"0797": u"NUCLEO_F439ZI", - u"0805": u"DISCO_L053C8", - u"0810": u"DISCO_F334C8", - u"0812": u"NUCLEO_F722ZE", - u"0813": u"NUCLEO_H743ZI", - u"0814": u"DISCO_H747I", - u"0815": u"DISCO_F746NG", - u"0816": u"NUCLEO_F746ZG", - u"0817": u"DISCO_F769NI", - u"0818": u"NUCLEO_F767ZI", - u"0820": u"DISCO_L476VG", - u"0821": u"NUCLEO_L452RE", - u"0822": u"DISCO_L496AG", - u"0823": u"NUCLEO_L496ZG", - u"0824": u"LPC824", - u"0825": u"DISCO_F412ZG", - u"0826": u"NUCLEO_F412ZG", - u"0827": u"NUCLEO_L486RG", - u"0828": u"NUCLEO_L496ZG_P", - u"0829": u"NUCLEO_L452RE_P", - u"0830": u"DISCO_F407VG", - u"0832": u"NUCLEO_H503RB", - u"0833": u"DISCO_L072CZ_LRWAN1", - u"0835": u"NUCLEO_F207ZG", - u"0836": u"NUCLEO_H743ZI2", - u"0839": u"NUCLEO_WB55RG", - u"0840": u"B96B_F446VE", - u"0841": u"NUCLEO_G474RE", - u"0842": u"NUCLEO_H753ZI", - u"0843": u"NUCLEO_H745ZI_Q", - u"0844": u"NUCLEO_H755ZI_Q", - u"0847": u"DISCO_H745I", - u"0849": u"NUCLEO_G070RB", - u"0850": u"NUCLEO_G431RB", - u"0851": u"NUCLEO_G431KB", - u"0852": u"NUCLEO_G031K8", - u"0853": u"NUCLEO_F301K8", - u"0854": u"NUCLEO_L552ZE_Q", - u"0855": u"DISCO_L562QE", - u"0858": u"DISCO_H750B", - u"0859": u"DISCO_H7B3I", - u"0860": u"NUCLEO_H7A3ZI_Q", - u"0863": u"DISCO_L4P5G", - u"0865": u"NUCLEO_L4P5ZG", - u"0866": u"NUCLEO_WL55JC", - u"0871": u"NUCLEO_H723ZG", - u"0872": u"NUCLEO_G0B1RE", - u"0875": u"DISCO_H735G", - u"0878": u"NUCLEO_H563ZI", - u"0879": u"NUCLEO_F756ZG", - u"0882": u"NUCLEO_G491RE", - u"0883": u"NUCLEO_WB15CC", - u"0884": u"DISCO_WB5MMG", - u"0885": u"B_L4S5I_IOT01A", - u"0886": u"NUCLEO_U575ZI_Q", - u"0887": u"B_U585I_IOT02A", - u"0888": u"NUCLEO_U083RC", - u"0900": u"SAMR21G18A", - u"0905": u"SAMD21G18A", - u"0910": u"SAML21J18A", - u"0915": u"SAMD21J18A", - u"1000": u"LPC2368", - u"1010": u"LPC1768", - u"1017": u"HRM1017", - u"1018": u"SSCI824", - u"1019": u"TY51822R3", - u"1022": u"RO359B", - u"1034": u"LPC11U34", - u"1040": u"LPC11U24", - u"1045": u"LPC11U24", - u"1050": u"LPC812", - u"1054": u"LPC54114", - u"1056": u"LPC546XX", - u"1060": u"LPC4088", - u"1061": u"LPC11U35_401", - u"1062": u"LPC4088_DM", - u"1070": u"NRF51822", - u"1075": u"NRF51822_OTA", - u"1080": u"OC_MBUINO", - u"1090": u"RBLAB_NRF51822", - u"1093": u"RBLAB_BLENANO2", - u"1095": u"RBLAB_BLENANO", - u"1100": u"NRF51_DK", - u"1101": u"NRF52_DK", - u"1102": u"NRF52840_DK", - u"1105": u"NRF51_DK_OTA", - u"1114": u"LPC1114", - u"1120": u"NRF51_DONGLE", - u"1130": u"NRF51822_SBK", - u"1140": u"WALLBOT_BLE", - u"1168": u"LPC11U68", - u"1200": u"NCS36510", - u"1234": u"UBLOX_C027", - u"1236": u"UBLOX_EVK_ODIN_W2", - u"1237": u"UBLOX_EVK_NINA_B1", - u"1280": u"OKDO_ODIN_W2", - u"1300": u"NUC472-NUTINY", - u"1301": u"NUMBED", - u"1302": u"NUMAKER_PFM_NUC472", - u"1303": u"NUMAKER_PFM_M453", - u"1304": u"NUMAKER_PFM_M487", - u"1305": u"NU_PFM_M2351", - u"1306": u"NUMAKER_PFM_NANO130", - u"1307": u"NUMAKER_PFM_NUC240", - u"1308": u"NUMAKER_IOT_M487", - u"1309": u"NUMAKER_IOT_M252", - u"1310": u"NUMAKER_IOT_M263A", - u"1312": u"NU_M2354", - u"1313": u"NUMAKER_IOT_M467", - u"1500": u"RHOMBIO_L476DMW1K", - u"1549": u"LPC1549", - u"1600": u"LPC4330_M4", - u"1605": u"LPC4330_M4", - u'1900': u'CY8CKIT_062_WIFI_BT', - u'1901': u'CY8CPROTO_062_4343W', - u'1902': u'CY8CKIT_062_BLE', - u'1903': u'CYW9P62S1_43012EVB_01', - u'1904': u'CY8CPROTO_063_BLE', - u'1905': u'CY8CKIT_062S2_4343W', - u'1906': u'CYW943012P6EVB_01', - u'1907': u'CY8CPROTO_064_SB', - u'1908': u'CYW9P62S1_43438EVB_01', - u'1909': u'CY8CPROTO_062S2_43012', - u'190A': u'CY8CKIT_064S2_4343W', - u'190B': u'CY8CKIT_062S2_43012', - u'190C': u'CY8CPROTO_064B0S3', - u'190E': u'CY8CPROTO_062S3_4343W', - u'190F': u'CY8CPROTO_064B0S1_BLE', - u'1910': u'CY8CKIT064B0S2_4343W', - u'1911': u'CY8CKIT064S0S2_4343W', - u'1912': u'CYSBSYSKIT_01', - u"2000": u"EFM32_G8XX_STK", - u"2005": u"EFM32HG_STK3400", - u"2010": u"EFM32WG_STK3800", - u"2015": u"EFM32GG_STK3700", - u"2020": u"EFM32LG_STK3600", - u"2025": u"EFM32TG_STK3300", - u"2030": u"EFM32ZG_STK3200", - u"2035": u"EFM32PG_STK3401", - u"2040": u"EFM32PG12_STK3402", - u"2041": u"TB_SENSE_12", - u"2042": u"EFM32GG11_STK3701", - u"2043": u"EFM32TG11_STK3301", - u"2045": u"TB_SENSE_1", - u"2100": u"XBED_LPC1768", - u"2201": u"WIZWIKI_W7500", - u"2202": u"WIZWIKI_W7500ECO", - u"2203": u"WIZWIKI_W7500P", - u"2600": u"EP_AGORA", - u"3001": u"LPC11U24", - u"3101": u"SDT32620B", - u"3102": u"SDT32625B", - u"3103": u"SDT51822B", - u"3104": u"SDT52832B", - u"3105": u"SDT64B", - u"3701": u"S5JS100", - u"3702": u"S3JT100", - u"3703": u"S1SBP6A", - u"4000": u"LPC11U35_Y5_MBUG", - u"4005": u"NRF51822_Y5_MBUG", - u"4100": u"MOTE_L152RC", - u"4337": u"LPC4337", - u"4500": u"DELTA_DFCM_NNN40", - u"4501": u"DELTA_DFBM_NQ620", - u"4502": u"DELTA_DFCM_NNN50", - u"4600": u"REALTEK_RTL8195AM", - u"5000": u"ARM_MPS2", - u"5001": u"ARM_IOTSS_BEID", - u"5002": u"ARM_BEETLE_SOC", - u"5003": u"ARM_MPS2_M0P", - u"5004": u"ARM_CM3DS_MPS2", - u"5005": u"ARM_MPS2_M0DS", - u"5006": u"ARM_MUSCA_A1", - u"5007": u"ARM_MUSCA_B1", - u"5009": u"ARM_MUSCA_S1", - u"5020": u"HOME_GATEWAY_6LOWPAN", - u"5500": u"RZ_A1H", - u"5501": u"GR_LYCHEE", - u"5502": u"GR_MANGO", - u"6000": u"FUTURE_SEQUANA", - u"6660": u"NZ32_SC151", - u"7011": u"TMPM066", - u"7012": u"TMPM3H6", - u"7013": u"TMPM46B", - u"7014": u"TMPM3HQ", - u"7015": u"TMPM4G9", - u"7020": u"TMPM4KN", - u"7402": u"MBED_BR_HAT", - u"7778": u"TEENSY3_1", - u"8001": u"UNO_91H", - u"8012": u"TT_M3HQ", - u"8013": u"TT_M4G9", - u"8080": u"FF1705_L151CC", - u"8081": u"FF_LPC546XX", - u"9001": u"LPC1347", - u"9002": u"LPC11U24", - u"9003": u"LPC1347", - u"9004": u"ARCH_PRO", - u"9006": u"LPC11U24", - u"9007": u"LPC11U35_501", - u"9008": u"XADOW_M0", - u"9009": u"ARCH_BLE", - u"9010": u"ARCH_GPRS", - u"9011": u"ARCH_MAX", - u"9012": u"SEEED_TINY_BLE", - u"9014": u"WIO_3G", - u"9015": u"WIO_BG96", - u"9017": u"WIO_EMW3166", - u"9020": u"UHURU_RAVEN", - u"9900": u"NRF51_MICROBIT", - u"C002": u"VK_RZ_A1H", - u"C005": u"MTM_MTCONNECT04S", - u"C006": u"VBLUNO51", - u"C008": u"SAKURAIO_EVB_01", - u"C030": u"UBLOX_C030_U201", - u"C031": u"UBLOX_C030_N211", - u"C032": u"UBLOX_C030_R404M", - u"C033": u"UBLOX_C030_R410M", - u"C034": u"UBLOX_C030_S200", - u"C035": u"UBLOX_C030_R3121", - u"C036": u"UBLOX_C030_R412M", - u"RIOT": u"RIOT", + "daplink": { + "0200": "KL25Z", + "0201": "KW41Z", + "0210": "KL05Z", + "0214": "HEXIWEAR", + "0217": "K82F", + "0218": "KL82Z", + "0220": "KL46Z", + "0227": "MIMXRT1050_EVK", + "0228": "RAPIDIOT_K64F", + "0230": "K20D50M", + "0231": "K22F", + "0234": "RAPIDIOT_KW41Z", + "0236": "LPC55S69", + "0240": "K64F", + "0245": "K64F", + "0250": "KW24D", + "0261": "KL27Z", + "0262": "KL43Z", + "0300": "MTS_GAMBIT", + "0305": "MTS_MDOT_F405RG", + "0310": "MTS_DRAGONFLY_F411RE", + "0311": "K66F", + "0312": "MTS_DRAGONFLY_L471QG", + "0313": "MTS_DRAGONFLY_L496VG", + "0315": "MTS_MDOT_F411RE", + "0316": "MTS_DRAGONFLY_F413RH", + "0350": "XDOT_L151CC", + "0360": "HANI_IOT", + "0400": "MAXWSNENV", + "0405": "MAX32600MBED", + "0407": "MAX32620HSP", + "0408": "MAX32625NEXPAQ", + "0409": "MAX32630FTHR", + "0410": "ETTEPLAN_LORA", + "0415": "MAX32625MBED", + "0416": "MAX32625PICO", + "0418": "MAX32620FTHR", + "0419": "MAX35103EVKIT2", + "0421": "MAX32660EVSYS", + "0424": "MAX32670EVKIT", + "0450": "MTB_UBLOX_ODIN_W2", + "0451": "MTB_MXCHIP_EMW3166", + "0452": "MTB_LAIRD_BL600", + "0453": "MTB_MTS_XDOT", + "0454": "MTB_MTS_DRAGONFLY", + "0455": "MTB_UBLOX_NINA_B1", + "0456": "MTB_MURATA_ABZ", + "0457": "MTB_RAK811", + "0458": "ADV_WISE_1510", + "0459": "ADV_WISE_1530", + "0460": "ADV_WISE_1570", + "0461": "MTB_LAIRD_BL652", + "0462": "MTB_USI_WM_BN_BM_22", + "0465": "MTB_LAIRD_BL654", + "0466": "MTB_MURATA_WSM_BL241", + "0467": "MTB_STM_S2LP", + "0468": "MTB_STM_L475", + "0469": "MTB_STM32_F439", + "0472": "MTB_ACONNO_ACN52832", + "0602": "EV_COG_AD3029LZ", + "0603": "EV_COG_AD4050LZ", + "0604": "SDP_K1", + "0700": "NUCLEO_F103RB", + "0705": "NUCLEO_F302R8", + "0710": "NUCLEO_L152RE", + "0715": "NUCLEO_L053R8", + "0720": "NUCLEO_F401RE", + "0725": "NUCLEO_F030R8", + "0729": "NUCLEO_G071RB", + "0730": "NUCLEO_F072RB", + "0735": "NUCLEO_F334R8", + "0740": "NUCLEO_F411RE", + "0742": "NUCLEO_F413ZH", + "0743": "DISCO_F413ZH", + "0744": "NUCLEO_F410RB", + "0745": "NUCLEO_F303RE", + "0746": "DISCO_F303VC", + "0747": "NUCLEO_F303ZE", + "0750": "NUCLEO_F091RC", + "0755": "NUCLEO_F070RB", + "0760": "NUCLEO_L073RZ", + "0764": "DISCO_L475VG_IOT01A", + "0765": "NUCLEO_L476RG", + "0766": "SILICA_SENSOR_NODE", + "0770": "NUCLEO_L432KC", + "0774": "DISCO_L4R9I", + "0775": "NUCLEO_F303K8", + "0776": "NUCLEO_L4R5ZI", + "0777": "NUCLEO_F446RE", + "0778": "NUCLEO_F446ZE", + "0779": "NUCLEO_L433RC_P", + "0780": "NUCLEO_L011K4", + "0781": "NUCLEO_L4R5ZI_P", + "0783": "NUCLEO_L010RB", + "0785": "NUCLEO_F042K6", + "0788": "DISCO_F469NI", + "0790": "NUCLEO_L031K6", + "0791": "NUCLEO_F031K6", + "0795": "DISCO_F429ZI", + "0796": "NUCLEO_F429ZI", + "0797": "NUCLEO_F439ZI", + "0805": "DISCO_L053C8", + "0810": "DISCO_F334C8", + "0812": "NUCLEO_F722ZE", + "0813": "NUCLEO_H743ZI", + "0814": "DISCO_H747I", + "0815": "DISCO_F746NG", + "0816": "NUCLEO_F746ZG", + "0817": "DISCO_F769NI", + "0818": "NUCLEO_F767ZI", + "0820": "DISCO_L476VG", + "0821": "NUCLEO_L452RE", + "0822": "DISCO_L496AG", + "0823": "NUCLEO_L496ZG", + "0824": "LPC824", + "0825": "DISCO_F412ZG", + "0826": "NUCLEO_F412ZG", + "0827": "NUCLEO_L486RG", + "0828": "NUCLEO_L496ZG_P", + "0829": "NUCLEO_L452RE_P", + "0830": "DISCO_F407VG", + "0832": "NUCLEO_H503RB", + "0833": "DISCO_L072CZ_LRWAN1", + "0835": "NUCLEO_F207ZG", + "0836": "NUCLEO_H743ZI2", + "0839": "NUCLEO_WB55RG", + "0840": "B96B_F446VE", + "0841": "NUCLEO_G474RE", + "0842": "NUCLEO_H753ZI", + "0843": "NUCLEO_H745ZI_Q", + "0844": "NUCLEO_H755ZI_Q", + "0847": "DISCO_H745I", + "0849": "NUCLEO_G070RB", + "0850": "NUCLEO_G431RB", + "0851": "NUCLEO_G431KB", + "0852": "NUCLEO_G031K8", + "0853": "NUCLEO_F301K8", + "0854": "NUCLEO_L552ZE_Q", + "0855": "DISCO_L562QE", + "0858": "DISCO_H750B", + "0859": "DISCO_H7B3I", + "0860": "NUCLEO_H7A3ZI_Q", + "0863": "DISCO_L4P5G", + "0865": "NUCLEO_L4P5ZG", + "0866": "NUCLEO_WL55JC", + "0871": "NUCLEO_H723ZG", + "0872": "NUCLEO_G0B1RE", + "0875": "DISCO_H735G", + "0878": "NUCLEO_H563ZI", + "0879": "NUCLEO_F756ZG", + "0882": "NUCLEO_G491RE", + "0883": "NUCLEO_WB15CC", + "0884": "DISCO_WB5MMG", + "0885": "B_L4S5I_IOT01A", + "0886": "NUCLEO_U575ZI_Q", + "0887": "B_U585I_IOT02A", + "0888": "NUCLEO_U083RC", + "0900": "SAMR21G18A", + "0905": "SAMD21G18A", + "0910": "SAML21J18A", + "0915": "SAMD21J18A", + "1000": "LPC2368", + "1010": "LPC1768", + "1017": "HRM1017", + "1018": "SSCI824", + "1019": "TY51822R3", + "1022": "RO359B", + "1034": "LPC11U34", + "1040": "LPC11U24", + "1045": "LPC11U24", + "1050": "LPC812", + "1054": "LPC54114", + "1056": "LPC546XX", + "1060": "LPC4088", + "1061": "LPC11U35_401", + "1062": "LPC4088_DM", + "1070": "NRF51822", + "1075": "NRF51822_OTA", + "1080": "OC_MBUINO", + "1090": "RBLAB_NRF51822", + "1093": "RBLAB_BLENANO2", + "1095": "RBLAB_BLENANO", + "1100": "NRF51_DK", + "1101": "NRF52_DK", + "1102": "NRF52840_DK", + "1105": "NRF51_DK_OTA", + "1114": "LPC1114", + "1120": "NRF51_DONGLE", + "1130": "NRF51822_SBK", + "1140": "WALLBOT_BLE", + "1168": "LPC11U68", + "1200": "NCS36510", + "1234": "UBLOX_C027", + "1236": "UBLOX_EVK_ODIN_W2", + "1237": "UBLOX_EVK_NINA_B1", + "1280": "OKDO_ODIN_W2", + "1300": "NUC472-NUTINY", + "1301": "NUMBED", + "1302": "NUMAKER_PFM_NUC472", + "1303": "NUMAKER_PFM_M453", + "1304": "NUMAKER_PFM_M487", + "1305": "NU_PFM_M2351", + "1306": "NUMAKER_PFM_NANO130", + "1307": "NUMAKER_PFM_NUC240", + "1308": "NUMAKER_IOT_M487", + "1309": "NUMAKER_IOT_M252", + "1310": "NUMAKER_IOT_M263A", + "1312": "NU_M2354", + "1313": "NUMAKER_IOT_M467", + "1500": "RHOMBIO_L476DMW1K", + "1549": "LPC1549", + "1600": "LPC4330_M4", + "1605": "LPC4330_M4", + "1900": "CY8CKIT_062_WIFI_BT", + "1901": "CY8CPROTO_062_4343W", + "1902": "CY8CKIT_062_BLE", + "1903": "CYW9P62S1_43012EVB_01", + "1904": "CY8CPROTO_063_BLE", + "1905": "CY8CKIT_062S2_4343W", + "1906": "CYW943012P6EVB_01", + "1907": "CY8CPROTO_064_SB", + "1908": "CYW9P62S1_43438EVB_01", + "1909": "CY8CPROTO_062S2_43012", + "190A": "CY8CKIT_064S2_4343W", + "190B": "CY8CKIT_062S2_43012", + "190C": "CY8CPROTO_064B0S3", + "190E": "CY8CPROTO_062S3_4343W", + "190F": "CY8CPROTO_064B0S1_BLE", + "1910": "CY8CKIT064B0S2_4343W", + "1911": "CY8CKIT064S0S2_4343W", + "1912": "CYSBSYSKIT_01", + "2000": "EFM32_G8XX_STK", + "2005": "EFM32HG_STK3400", + "2010": "EFM32WG_STK3800", + "2015": "EFM32GG_STK3700", + "2020": "EFM32LG_STK3600", + "2025": "EFM32TG_STK3300", + "2030": "EFM32ZG_STK3200", + "2035": "EFM32PG_STK3401", + "2040": "EFM32PG12_STK3402", + "2041": "TB_SENSE_12", + "2042": "EFM32GG11_STK3701", + "2043": "EFM32TG11_STK3301", + "2045": "TB_SENSE_1", + "2100": "XBED_LPC1768", + "2201": "WIZWIKI_W7500", + "2202": "WIZWIKI_W7500ECO", + "2203": "WIZWIKI_W7500P", + "2600": "EP_AGORA", + "3001": "LPC11U24", + "3101": "SDT32620B", + "3102": "SDT32625B", + "3103": "SDT51822B", + "3104": "SDT52832B", + "3105": "SDT64B", + "3701": "S5JS100", + "3702": "S3JT100", + "3703": "S1SBP6A", + "4000": "LPC11U35_Y5_MBUG", + "4005": "NRF51822_Y5_MBUG", + "4100": "MOTE_L152RC", + "4337": "LPC4337", + "4500": "DELTA_DFCM_NNN40", + "4501": "DELTA_DFBM_NQ620", + "4502": "DELTA_DFCM_NNN50", + "4600": "REALTEK_RTL8195AM", + "5000": "ARM_MPS2", + "5001": "ARM_IOTSS_BEID", + "5002": "ARM_BEETLE_SOC", + "5003": "ARM_MPS2_M0P", + "5004": "ARM_CM3DS_MPS2", + "5005": "ARM_MPS2_M0DS", + "5006": "ARM_MUSCA_A1", + "5007": "ARM_MUSCA_B1", + "5009": "ARM_MUSCA_S1", + "5020": "HOME_GATEWAY_6LOWPAN", + "5500": "RZ_A1H", + "5501": "GR_LYCHEE", + "5502": "GR_MANGO", + "6000": "FUTURE_SEQUANA", + "6660": "NZ32_SC151", + "7011": "TMPM066", + "7012": "TMPM3H6", + "7013": "TMPM46B", + "7014": "TMPM3HQ", + "7015": "TMPM4G9", + "7020": "TMPM4KN", + "7402": "MBED_BR_HAT", + "7778": "TEENSY3_1", + "8001": "UNO_91H", + "8012": "TT_M3HQ", + "8013": "TT_M4G9", + "8080": "FF1705_L151CC", + "8081": "FF_LPC546XX", + "9001": "LPC1347", + "9002": "LPC11U24", + "9003": "LPC1347", + "9004": "ARCH_PRO", + "9006": "LPC11U24", + "9007": "LPC11U35_501", + "9008": "XADOW_M0", + "9009": "ARCH_BLE", + "9010": "ARCH_GPRS", + "9011": "ARCH_MAX", + "9012": "SEEED_TINY_BLE", + "9014": "WIO_3G", + "9015": "WIO_BG96", + "9017": "WIO_EMW3166", + "9020": "UHURU_RAVEN", + "9900": "NRF51_MICROBIT", + "C002": "VK_RZ_A1H", + "C005": "MTM_MTCONNECT04S", + "C006": "VBLUNO51", + "C008": "SAKURAIO_EVB_01", + "C030": "UBLOX_C030_U201", + "C031": "UBLOX_C030_N211", + "C032": "UBLOX_C030_R404M", + "C033": "UBLOX_C030_R410M", + "C034": "UBLOX_C030_S200", + "C035": "UBLOX_C030_R3121", + "C036": "UBLOX_C030_R412M", + "RIOT": "RIOT", }, - u"jlink": { - u"X729475D28G": { - u"platform_name": u"NRF51_DK", - u"jlink_device_name": u"nRF51422_xxAC", - }, - u"X349858SLYN": { - u"platform_name": u"NRF52_DK", - u"jlink_device_name": u"nRF52832_xxaa", - }, - u"FRDM-KL25Z": { - u"platform_name": u"KL25Z", - u"jlink_device_name": u"MKL25Z128xxx4", - }, - u"FRDM-KL27Z": { - u"platform_name": u"KL27Z", - u"jlink_device_name": u"MKL27Z64xxx4", - }, - u"FRDM-KL43Z": { - u"platform_name": u"KL43Z", - u"jlink_device_name": u"MKL43Z256xxx4", - }, + "jlink": { + "X729475D28G": {"platform_name": "NRF51_DK", "jlink_device_name": "nRF51422_xxAC"}, + "X349858SLYN": {"platform_name": "NRF52_DK", "jlink_device_name": "nRF52832_xxaa"}, + "FRDM-KL25Z": {"platform_name": "KL25Z", "jlink_device_name": "MKL25Z128xxx4"}, + "FRDM-KL27Z": {"platform_name": "KL27Z", "jlink_device_name": "MKL27Z64xxx4"}, + "FRDM-KL43Z": {"platform_name": "KL43Z", "jlink_device_name": "MKL43Z256xxx4"}, }, - u"atmel": {u"2241": "SAML21J18A"}, + "atmel": {"2241": "SAML21J18A"}, } @@ -465,21 +450,16 @@ def __init__(self, database_files, primary_database=None): if new_db: for device_type in new_db: - duplicates = self._keys[device_type].intersection( - set(new_db[device_type].keys()) - ) + duplicates = self._keys[device_type].intersection(set(new_db[device_type].keys())) duplicates = set(["%s.%s" % (device_type, k) for k in duplicates]) if duplicates: logger.warning( - "Duplicate platform ids found: %s," - " ignoring the definitions from %s", + "Duplicate platform ids found: %s, ignoring the definitions from %s", " ".join(duplicates), db, ) self._dbs[db] = new_db - self._keys[device_type] = self._keys[device_type].union( - new_db[device_type].keys() - ) + self._keys[device_type] = self._keys[device_type].union(new_db[device_type].keys()) else: self._dbs[db] = new_db @@ -517,15 +497,10 @@ def _update_db(self): finally: lock.release() else: - logger.error( - "Could not update platform database: " - "Lock acquire failed after 60 seconds" - ) + logger.error("Could not update platform database: Lock acquire failed after 60 seconds") return False else: - logger.error( - "Can't update platform database: destination database is ambiguous" - ) + logger.error("Can't update platform database: destination database is ambiguous") return False def add(self, id, platform_name, permanent=False, device_type="daplink"): diff --git a/tools/python/mbed_os_tools/detect/windows.py b/tools/python/mbed_os_tools/detect/windows.py index fae8472a5ea..b6af7a596a4 100644 --- a/tools/python/mbed_os_tools/detect/windows.py +++ b/tools/python/mbed_os_tools/detect/windows.py @@ -34,13 +34,7 @@ MAX_COMPOSITE_DEVICE_SUBDEVICES = 8 -MBED_STORAGE_DEVICE_VENDOR_STRINGS = [ - "ven_mbed", - "ven_segger", - "ven_arm_v2m", - "ven_nxp", - "ven_atmel", -] +MBED_STORAGE_DEVICE_VENDOR_STRINGS = ["ven_mbed", "ven_segger", "ven_arm_v2m", "ven_nxp", "ven_atmel"] def _get_values_with_numeric_keys(reg_key): @@ -76,9 +70,7 @@ def _get_cached_mounted_points(): result = [] try: # Open the registry key for mounted devices - mounted_devices_key = winreg.OpenKey( - winreg.HKEY_LOCAL_MACHINE, "SYSTEM\\MountedDevices" - ) + mounted_devices_key = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, "SYSTEM\\MountedDevices") for v in _iter_vals(mounted_devices_key): # Valid entries have the following format: \\DosDevices\\D: if "DosDevices" not in v[0]: @@ -95,9 +87,7 @@ def _get_cached_mounted_points(): continue mount_point = mount_point_match.group(1) - logger.debug( - "Mount point %s found for volume %s", mount_point, volume_string - ) + logger.debug("Mount point %s found for volume %s", mount_point, volume_string) result.append({"mount_point": mount_point, "volume_string": volume_string}) except OSError: @@ -109,9 +99,7 @@ def _get_cached_mounted_points(): def _get_disks(): logger.debug("Fetching mounted devices from disk service registry entry") try: - disks_key = winreg.OpenKey( - winreg.HKEY_LOCAL_MACHINE, "SYSTEM\\CurrentControlSet\\Services\\Disk\\Enum" - ) + disks_key = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, "SYSTEM\\CurrentControlSet\\Services\\Disk\\Enum") disk_strings = _get_values_with_numeric_keys(disks_key) return [v for v in disk_strings if _is_mbed_volume(v)] except OSError: @@ -122,10 +110,7 @@ def _get_disks(): def _get_usb_storage_devices(): logger.debug("Fetching usb storage devices from USBSTOR service registry entry") try: - usbstor_key = winreg.OpenKey( - winreg.HKEY_LOCAL_MACHINE, - "SYSTEM\\CurrentControlSet\\Services\\USBSTOR\\Enum", - ) + usbstor_key = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, "SYSTEM\\CurrentControlSet\\Services\\USBSTOR\\Enum") return _get_values_with_numeric_keys(usbstor_key) except OSError: logger.debug("No USBSTOR service found, no device can be detected") @@ -148,18 +133,11 @@ def _determine_valid_non_composite_devices(devices, target_id_usb_id_mount_point try: capability = _determine_subdevice_capability(device_key) except CompatibleIDsNotFoundException: - logger.debug( - 'Expected %s to have subkey "CompatibleIDs". Skipping.', - device_key_string, - ) + logger.debug('Expected %s to have subkey "CompatibleIDs". Skipping.', device_key_string) continue if capability != "msd": - logger.debug( - "Expected msd device but got %s, skipping %s", - capability, - device["full_path"], - ) + logger.debug("Expected msd device but got %s, skipping %s", capability, device["full_path"]) continue target_id_usb_id = device["entry_key_string"] @@ -169,9 +147,7 @@ def _determine_valid_non_composite_devices(devices, target_id_usb_id_mount_point "mount_point": target_id_usb_id_mount_point_map[target_id_usb_id], } - candidates[target_id_usb_id].update( - _vid_pid_path_to_usb_info(device["vid_pid_path"]) - ) + candidates[target_id_usb_id].update(_vid_pid_path_to_usb_info(device["vid_pid_path"])) except KeyError: pass @@ -220,22 +196,19 @@ def _vid_pid_path_to_usb_info(vid_pid_path): def _iter_keys_as_str(key): - """! Iterate over subkeys of a key returning subkey as string - """ + """! Iterate over subkeys of a key returning subkey as string""" for i in range(winreg.QueryInfoKey(key)[0]): yield winreg.EnumKey(key, i) def _iter_keys(key): - """! Iterate over subkeys of a key - """ + """! Iterate over subkeys of a key""" for i in range(winreg.QueryInfoKey(key)[0]): yield winreg.OpenKey(key, winreg.EnumKey(key, i)) def _iter_vals(key): - """! Iterate over values of a key - """ + """! Iterate over values of a key""" logger.debug("_iter_vals %r", key) for i in range(winreg.QueryInfoKey(key)[1]): yield winreg.EnumValue(key, i) @@ -246,8 +219,7 @@ class CompatibleIDsNotFoundException(Exception): class MbedLsToolsWin7(MbedLsToolsBase): - """ mbed-enabled platform detection for Windows - """ + """mbed-enabled platform detection for Windows""" def __init__(self, **kwargs): MbedLsToolsBase.__init__(self, **kwargs) @@ -265,8 +237,7 @@ def find_candidates(self): if match_string in cached_mount_point_info["volume_string"]: # TargetID is a hex string with 10-48 chars target_id_usb_id_match = re.search( - "[&#]([0-9A-Za-z]{10,48})[&#]", - cached_mount_point_info["volume_string"], + "[&#]([0-9A-Za-z]{10,48})[&#]", cached_mount_point_info["volume_string"] ) if not target_id_usb_id_match: logger.debug( @@ -276,16 +247,13 @@ def find_candidates(self): ) continue - target_id_usb_id_mount_point_map[ - target_id_usb_id_match.group(1) - ] = cached_mount_point_info["mount_point"] + target_id_usb_id_mount_point_map[target_id_usb_id_match.group(1)] = cached_mount_point_info[ + "mount_point" + ] disks.pop(index) break - logger.debug( - "target_id_usb_id -> mount_point mapping: %s ", - target_id_usb_id_mount_point_map, - ) + logger.debug("target_id_usb_id -> mount_point mapping: %s ", target_id_usb_id_mount_point_map) non_composite_devices = [] composite_devices = [] for vid_pid_path in usb_storage_devices: @@ -295,10 +263,7 @@ def find_candidates(self): vid_pid_components = vid_pid_path_componets[1].split("&") if len(vid_pid_components) != 2 and len(vid_pid_components) != 3: - logger.debug( - "Skipping USBSTOR device with unusual VID/PID string format '%s'", - vid_pid_path, - ) + logger.debug("Skipping USBSTOR device with unusual VID/PID string format '%s'", vid_pid_path) continue device = { @@ -315,60 +280,40 @@ def find_candidates(self): candidates = defaultdict(dict) candidates.update( - _determine_valid_non_composite_devices( - non_composite_devices, target_id_usb_id_mount_point_map - ) + _determine_valid_non_composite_devices(non_composite_devices, target_id_usb_id_mount_point_map) ) # Now we'll find all valid VID/PID and target ID combinations - target_id_usb_ids = set(target_id_usb_id_mount_point_map.keys()) - set( - candidates.keys() - ) + target_id_usb_ids = set(target_id_usb_id_mount_point_map.keys()) - set(candidates.keys()) vid_pid_entry_key_string_map = defaultdict(set) for device in composite_devices: - vid_pid_entry_key_string_map[device["vid_pid_path"]].add( - device["entry_key_string"] - ) + vid_pid_entry_key_string_map[device["vid_pid_path"]].add(device["entry_key_string"]) vid_pid_target_id_usb_id_map = defaultdict(dict) usb_key_string = "SYSTEM\\CurrentControlSet\\Enum\\USB" for vid_pid_path, entry_key_strings in vid_pid_entry_key_string_map.items(): vid_pid_key_string = "%s\\%s" % (usb_key_string, vid_pid_path) try: - vid_pid_key = winreg.OpenKey( - winreg.HKEY_LOCAL_MACHINE, vid_pid_key_string - ) - target_id_usb_id_sub_keys = set( - [k for k in _iter_keys_as_str(vid_pid_key)] - ) + vid_pid_key = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, vid_pid_key_string) + target_id_usb_id_sub_keys = set([k for k in _iter_keys_as_str(vid_pid_key)]) except OSError: logger.debug('VID/PID "%s" not found', vid_pid_key_string) continue - overlapping_target_id_usb_ids = target_id_usb_id_sub_keys.intersection( - set(target_id_usb_ids) - ) + overlapping_target_id_usb_ids = target_id_usb_id_sub_keys.intersection(set(target_id_usb_ids)) for target_id_usb_id in overlapping_target_id_usb_ids: - composite_device_key_string = "%s\\%s" % ( - vid_pid_key_string, - target_id_usb_id, - ) + composite_device_key_string = "%s\\%s" % (vid_pid_key_string, target_id_usb_id) composite_device_key = winreg.OpenKey(vid_pid_key, target_id_usb_id) entry_key_string = target_id_usb_id is_prefix = False try: - new_entry_key_string, _ = winreg.QueryValueEx( - composite_device_key, "ParentIdPrefix" - ) + new_entry_key_string, _ = winreg.QueryValueEx(composite_device_key, "ParentIdPrefix") - if any( - e.startswith(new_entry_key_string) for e in entry_key_strings - ): + if any(e.startswith(new_entry_key_string) for e in entry_key_strings): logger.debug( - "Assigning new entry key string of %s to device %s, " - "as found in ParentIdPrefix", + "Assigning new entry key string of %s to device %s, as found in ParentIdPrefix", new_entry_key_string, target_id_usb_id, ) @@ -376,8 +321,7 @@ def find_candidates(self): is_prefix = True except OSError: logger.debug( - 'Device %s did not have a "ParentIdPrefix" key, ' - "sticking with %s as entry key string", + 'Device %s did not have a "ParentIdPrefix" key, sticking with %s as entry key string', composite_device_key_string, target_id_usb_id, ) @@ -387,96 +331,56 @@ def find_candidates(self): "is_prefix": is_prefix, } - for ( - vid_pid_path, - entry_key_string_target_id_usb_id_map, - ) in vid_pid_target_id_usb_id_map.items(): - for composite_device_subdevice_number in range( - MAX_COMPOSITE_DEVICE_SUBDEVICES - ): + for vid_pid_path, entry_key_string_target_id_usb_id_map in vid_pid_target_id_usb_id_map.items(): + for composite_device_subdevice_number in range(MAX_COMPOSITE_DEVICE_SUBDEVICES): subdevice_type_key_string = "%s\\%s&MI_0%d" % ( usb_key_string, vid_pid_path, composite_device_subdevice_number, ) try: - subdevice_type_key = winreg.OpenKey( - winreg.HKEY_LOCAL_MACHINE, subdevice_type_key_string - ) + subdevice_type_key = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, subdevice_type_key_string) except OSError: - logger.debug( - "Composite device subdevice key %s was not found, skipping", - subdevice_type_key_string, - ) + logger.debug("Composite device subdevice key %s was not found, skipping", subdevice_type_key_string) continue - for ( - entry_key_string, - entry_data, - ) in entry_key_string_target_id_usb_id_map.items(): + for entry_key_string, entry_data in entry_key_string_target_id_usb_id_map.items(): if entry_data["is_prefix"]: - prepared_entry_key_string = "%s&000%d" % ( - entry_key_string, - composite_device_subdevice_number, - ) + prepared_entry_key_string = "%s&000%d" % (entry_key_string, composite_device_subdevice_number) else: prepared_entry_key_string = entry_key_string - subdevice_key_string = "%s\\%s" % ( - subdevice_type_key_string, - prepared_entry_key_string, - ) + subdevice_key_string = "%s\\%s" % (subdevice_type_key_string, prepared_entry_key_string) try: - subdevice_key = winreg.OpenKey( - subdevice_type_key, prepared_entry_key_string - ) + subdevice_key = winreg.OpenKey(subdevice_type_key, prepared_entry_key_string) except OSError: - logger.debug( - "Sub-device %s not found, skipping", subdevice_key_string - ) + logger.debug("Sub-device %s not found, skipping", subdevice_key_string) continue try: capability = _determine_subdevice_capability(subdevice_key) except CompatibleIDsNotFoundException: - logger.debug( - 'Expected %s to have subkey "CompatibleIDs". Skipping.', - subdevice_key_string, - ) + logger.debug('Expected %s to have subkey "CompatibleIDs". Skipping.', subdevice_key_string) continue if capability == "msd": - candidates[entry_data["target_id_usb_id"]][ - "mount_point" - ] = target_id_usb_id_mount_point_map[ + candidates[entry_data["target_id_usb_id"]]["mount_point"] = target_id_usb_id_mount_point_map[ entry_data["target_id_usb_id"] ] - candidates[entry_data["target_id_usb_id"]].update( - _vid_pid_path_to_usb_info(vid_pid_path) - ) + candidates[entry_data["target_id_usb_id"]].update(_vid_pid_path_to_usb_info(vid_pid_path)) elif capability == "serial": try: - device_parameters_key = winreg.OpenKey( - subdevice_key, "Device Parameters" - ) + device_parameters_key = winreg.OpenKey(subdevice_key, "Device Parameters") except OSError: - logger.debug( - 'Key "Device Parameters" not under serial device entry' - ) + logger.debug('Key "Device Parameters" not under serial device entry') continue try: - candidates[entry_data["target_id_usb_id"]][ - "serial_port" - ], _ = winreg.QueryValueEx( + candidates[entry_data["target_id_usb_id"]]["serial_port"], _ = winreg.QueryValueEx( device_parameters_key, "PortName" ) - candidates[entry_data["target_id_usb_id"]].update( - _vid_pid_path_to_usb_info(vid_pid_path) - ) + candidates[entry_data["target_id_usb_id"]].update(_vid_pid_path_to_usb_info(vid_pid_path)) except OSError: - logger.debug( - '"PortName" value not found under serial device entry' - ) + logger.debug('"PortName" value not found under serial device entry') continue final_candidates = [] @@ -508,10 +412,6 @@ def mount_point_ready(self, path): if result: logger.debug("Mount point %s is ready", path) else: - logger.debug( - "Mount point %s reported not ready with error '%s'", - path, - stderr.strip(), - ) + logger.debug("Mount point %s reported not ready with error '%s'", path, stderr.strip()) return result diff --git a/tools/python/mbed_os_tools/test/__init__.py b/tools/python/mbed_os_tools/test/__init__.py index 25045382c5d..b33d3d8ab49 100644 --- a/tools/python/mbed_os_tools/test/__init__.py +++ b/tools/python/mbed_os_tools/test/__init__.py @@ -54,32 +54,14 @@ def init_host_test_cli_params() -> Any: """ parser = ArgumentParser() - parser.add_argument( - "-m", - "--micro", - dest="micro", - help="Target microcontroller name", - metavar="MICRO", - ) + parser.add_argument("-m", "--micro", dest="micro", help="Target microcontroller name", metavar="MICRO") - parser.add_argument( - "-p", "--port", dest="port", help="Serial port of the target", metavar="PORT" - ) + parser.add_argument("-p", "--port", dest="port", help="Serial port of the target", metavar="PORT") - parser.add_argument( - "-d", - "--disk", - dest="disk", - help="Target disk (mount point) path", - metavar="DISK_PATH", - ) + parser.add_argument("-d", "--disk", dest="disk", help="Target disk (mount point) path", metavar="DISK_PATH") parser.add_argument( - "-t", - "--target-id", - dest="target_id", - help="Unique Target Id or mbed platform", - metavar="TARGET_ID", + "-t", "--target-id", dest="target_id", help="Unique Target Id or mbed platform", metavar="TARGET_ID" ) parser.add_argument( @@ -113,16 +95,10 @@ def init_host_test_cli_params() -> Any: ) parser.add_argument( - "-f", - "--image-path", - dest="image_path", - help="Path with target's binary image", - metavar="IMAGE_PATH", + "-f", "--image-path", dest="image_path", help="Path with target's binary image", metavar="IMAGE_PATH" ) - copy_methods_str = "Plugin support: " + ", ".join( - host_tests_plugins.get_plugin_caps("CopyMethod") - ) + copy_methods_str = "Plugin support: " + ", ".join(host_tests_plugins.get_plugin_caps("CopyMethod")) parser.add_argument( "-c", @@ -153,15 +129,10 @@ def init_host_test_cli_params() -> Any: metavar="TAG_FILTERS", ) - reset_methods_str = "Plugin support: " + ", ".join( - host_tests_plugins.get_plugin_caps("ResetMethod") - ) + reset_methods_str = "Plugin support: " + ", ".join(host_tests_plugins.get_plugin_caps("ResetMethod")) parser.add_argument( - "-r", - "--reset", - dest="forced_reset_type", - help="Forces different type of reset. " + reset_methods_str, + "-r", "--reset", dest="forced_reset_type", help="Forces different type of reset. " + reset_methods_str ) parser.add_argument( @@ -213,25 +184,15 @@ def init_host_test_cli_params() -> Any: ) parser.add_argument( - "--test-cfg", - dest="json_test_configuration", - help="Pass to host test class data about host test configuration", + "--test-cfg", dest="json_test_configuration", help="Pass to host test class data about host test configuration" ) parser.add_argument( - "--list", - dest="list_reg_hts", - default=False, - action="store_true", - help="Prints registered host test and exits", + "--list", dest="list_reg_hts", default=False, action="store_true", help="Prints registered host test and exits" ) parser.add_argument( - "--plugins", - dest="list_plugins", - default=False, - action="store_true", - help="Prints registered plugins and exits", + "--plugins", dest="list_plugins", default=False, action="store_true", help="Prints registered plugins and exits" ) parser.add_argument( @@ -246,20 +207,14 @@ def init_host_test_cli_params() -> Any: ) # Show --fm option only if "fm_agent" module installed - fm_help=SUPPRESS + fm_help = SUPPRESS try: - if importlib.util.find_spec('fm_agent') is not None: - fm_help='Fast Model connection, This option requires mbed-fastmodel-agent module installed, list CONFIGs via "mbedfm"' + if importlib.util.find_spec("fm_agent") is not None: + fm_help = 'Fast Model connection, This option requires mbed-fastmodel-agent module installed, list CONFIGs via "mbedfm"' except ModuleNotFoundError: pass - parser.add_argument( - "--fm", - dest="fast_model_connection", - metavar="CONFIG", - default=None, - help=fm_help, - ) + parser.add_argument("--fm", dest="fast_model_connection", metavar="CONFIG", default=None, help=fm_help) parser.add_argument( "--run", @@ -292,10 +247,7 @@ def init_host_test_cli_params() -> Any: default=60, metavar="NUMBER", type=int, - help=( - "Timeout in sec for readiness of mount point and serial port of " - "local or remote device. Default 60 sec" - ), + help=("Timeout in sec for readiness of mount point and serial port of local or remote device. Default 60 sec"), ) parser.add_argument( @@ -320,20 +272,10 @@ def init_host_test_cli_params() -> Any: metavar="BAUD_RATE", ) - parser.add_argument( - "-v", - "--verbose", - dest="verbose", - default=False, - action="store_true", - help="More verbose mode", - ) + parser.add_argument("-v", "--verbose", dest="verbose", default=False, action="store_true", help="More verbose mode") parser.add_argument( - "--serial-output-file", - dest="serial_output_file", - default=None, - help="Save target serial output to this file.", + "--serial-output-file", dest="serial_output_file", default=None, help="Save target serial output to this file." ) parser.add_argument( @@ -344,25 +286,13 @@ def init_host_test_cli_params() -> Any: ) parser.add_argument( - "--version", - dest="version", - default=False, - action="store_true", - help="Prints package version and exits", + "--version", dest="version", default=False, action="store_true", help="Prints package version and exits" ) - parser.add_argument( - "--format", - dest="format", - help="Image file format passed to pyocd (elf, bin, hex, axf...).", - ) + parser.add_argument("--format", dest="format", help="Image file format passed to pyocd (elf, bin, hex, axf...).") - parser.description = ( - """Flash, reset and perform host supervised tests on mbed platforms""" - ) - parser.epilog = ( - """Example: mbedhtrun -d E: -p COM5 -f "test.bin" -C 4 -c shell -m K64F""" - ) + parser.description = """Flash, reset and perform host supervised tests on mbed platforms""" + parser.epilog = """Example: mbedhtrun -d E: -p COM5 -f "test.bin" -C 4 -c shell -m K64F""" if len(sys.argv) == 1: parser.print_help() diff --git a/tools/python/mbed_os_tools/test/__main__.py b/tools/python/mbed_os_tools/test/__main__.py index 83e0eb5744e..d948965d194 100644 --- a/tools/python/mbed_os_tools/test/__main__.py +++ b/tools/python/mbed_os_tools/test/__main__.py @@ -14,4 +14,5 @@ # limitations under the License. from .mbed_greentea_cli import main + main() diff --git a/tools/python/mbed_os_tools/test/cmake_handlers.py b/tools/python/mbed_os_tools/test/cmake_handlers.py index b4c15ac5a20..cb28c3a774f 100644 --- a/tools/python/mbed_os_tools/test/cmake_handlers.py +++ b/tools/python/mbed_os_tools/test/cmake_handlers.py @@ -30,9 +30,7 @@ def load_ctest_testsuite(link_target, binary_type=".bin", verbose=False): try: with open(ctest_path) as ctest_file: for line in ctest_file: - line_parse = parse_ctesttestfile_line( - link_target, binary_type, line, verbose=verbose - ) + line_parse = parse_ctesttestfile_line(link_target, binary_type, line, verbose=verbose) if line_parse: test_case, test_case_path = line_parse result[test_case] = test_case_path @@ -63,7 +61,7 @@ def parse_ctesttestfile_line(link_target, binary_type, line, verbose=False): add_test(mbed-test-ticker "mbed-test-ticker") add_test(mbed-test-hello "mbed-test-hello") """ - add_test_pattern = r'[adtesADTES_]{8}\([\w\d_-]+ \"([\w\d_-]+)\"' + add_test_pattern = r"[adtesADTES_]{8}\([\w\d_-]+ \"([\w\d_-]+)\"" re_ptrn = re.compile(add_test_pattern) if line.lower().startswith("add_test"): m = re_ptrn.search(line) @@ -84,11 +82,7 @@ def list_binaries_for_targets(build_dir="./build", verbose_footer=False): """ dir = build_dir sub_dirs = ( - [ - os.path.join(dir, o) - for o in os.listdir(dir) - if os.path.isdir(os.path.join(dir, o)) - ] + [os.path.join(dir, o) for o in os.listdir(dir) if os.path.isdir(os.path.join(dir, o))] if os.path.exists(dir) else [] ) @@ -117,10 +111,7 @@ def count_tests(): gt_logger.gt_log_warn("no tests found in current location") if verbose_footer: - print( - "\nExample: execute 'mbedgt -t TARGET_NAME -n TEST_NAME' to run " - "test TEST_NAME for target TARGET_NAME" - ) + print("\nExample: execute 'mbedgt -t TARGET_NAME -n TEST_NAME' to run test TEST_NAME for target TARGET_NAME") def list_binaries_for_builds(test_spec, verbose_footer=False): @@ -130,10 +121,7 @@ def list_binaries_for_builds(test_spec, verbose_footer=False): """ test_builds = test_spec.get_test_builds() for tb in test_builds: - gt_logger.gt_log( - "available tests for build '%s', location '%s'" - % (tb.get_name(), tb.get_path()) - ) + gt_logger.gt_log("available tests for build '%s', location '%s'" % (tb.get_name(), tb.get_path())) for tc in sorted(tb.get_tests().keys()): gt_logger.gt_log_tab("test '%s'" % tc) diff --git a/tools/python/mbed_os_tools/test/host_tests/base_host_test.py b/tools/python/mbed_os_tools/test/host_tests/base_host_test.py index 4ef229b7c04..c2e8bfb32c4 100644 --- a/tools/python/mbed_os_tools/test/host_tests/base_host_test.py +++ b/tools/python/mbed_os_tools/test/host_tests/base_host_test.py @@ -19,30 +19,30 @@ class BaseHostTestAbstract(object): - """ Base class for each host-test test cases with standard - setup, test and teardown set of functions + """Base class for each host-test test cases with standard + setup, test and teardown set of functions """ - name = '' # name of the host test (used for local registration) - __event_queue = None # To main even loop - __dut_event_queue = None # To DUT - script_location = None # Path to source file used to load host test + name = "" # name of the host test (used for local registration) + __event_queue = None # To main even loop + __dut_event_queue = None # To DUT + script_location = None # Path to source file used to load host test __config = {} def __notify_prn(self, text): if self.__event_queue: - self.__event_queue.put(('__notify_prn', text, time())) + self.__event_queue.put(("__notify_prn", text, time())) def __notify_conn_lost(self, text): if self.__event_queue: - self.__event_queue.put(('__notify_conn_lost', text, time())) + self.__event_queue.put(("__notify_conn_lost", text, time())) def __notify_sync_failed(self, text): if self.__event_queue: - self.__event_queue.put(('__notify_sync_failed', text, time())) + self.__event_queue.put(("__notify_sync_failed", text, time())) def __notify_dut(self, key, value): - """! Send data over serial to DUT """ + """! Send data over serial to DUT""" if self.__dut_event_queue: self.__dut_event_queue.put((key, value, time())) @@ -51,7 +51,7 @@ def notify_complete(self, result=None): @param result True for success, False failure. If None - no action in main even loop """ if self.__event_queue: - self.__event_queue.put(('__notify_complete', result, time())) + self.__event_queue.put(("__notify_complete", result, time())) def reset_dut(self, value): """ @@ -59,7 +59,7 @@ def reset_dut(self, value): :return: """ if self.__event_queue: - self.__event_queue.put(('__reset_dut', value, time())) + self.__event_queue.put(("__reset_dut", value, time())) def reset(self): """ @@ -76,17 +76,17 @@ def notify_conn_lost(self, text): self.__notify_conn_lost(text) def log(self, text): - """! Send log message to main event loop """ + """! Send log message to main event loop""" self.__notify_prn(text) def send_kv(self, key, value): - """! Send Key-Value data to DUT """ + """! Send Key-Value data to DUT""" self.__notify_dut(key, value) def setup_communication(self, event_queue, dut_event_queue, config={}): - """! Setup queues used for IPC """ - self.__event_queue = event_queue # To main even loop - self.__dut_event_queue = dut_event_queue # To DUT + """! Setup queues used for IPC""" + self.__event_queue = event_queue # To main even loop + self.__dut_event_queue = dut_event_queue # To DUT self.__config = config def get_config_item(self, name): @@ -99,15 +99,15 @@ def get_config_item(self, name): return self.__config.get(name, None) def setup(self): - """! Setup your tests and callbacks """ + """! Setup your tests and callbacks""" raise NotImplementedError def result(self): - """! Returns host test result (True, False or None) """ + """! Returns host test result (True, False or None)""" raise NotImplementedError def teardown(self): - """! Blocking always guaranteed test teardown """ + """! Blocking always guaranteed test teardown""" raise NotImplementedError @@ -118,42 +118,43 @@ def event_callback(key): :param key: :return: """ + def decorator(func): func.event_key = key return func + return decorator class HostTestCallbackBase(BaseHostTestAbstract): - def __init__(self): BaseHostTestAbstract.__init__(self) self.__callbacks = {} self.__restricted_callbacks = [ - '__coverage_start', - '__testcase_start', - '__testcase_finish', - '__testcase_summary', - '__exit', - '__exit_event_queue' + "__coverage_start", + "__testcase_start", + "__testcase_finish", + "__testcase_summary", + "__exit", + "__exit_event_queue", ] self.__consume_by_default = [ - '__coverage_start', - '__testcase_start', - '__testcase_finish', - '__testcase_count', - '__testcase_name', - '__testcase_summary', - '__rxd_line', + "__coverage_start", + "__testcase_start", + "__testcase_finish", + "__testcase_count", + "__testcase_name", + "__testcase_summary", + "__rxd_line", ] self.__assign_default_callbacks() self.__assign_decorated_callbacks() def __callback_default(self, key, value, timestamp): - """! Default callback """ - #self.log("CALLBACK: key=%s, value=%s, timestamp=%f"% (key, value, timestamp)) + """! Default callback""" + # self.log("CALLBACK: key=%s, value=%s, timestamp=%f"% (key, value, timestamp)) pass def __default_end_callback(self, key, value, timestamp): @@ -168,7 +169,7 @@ def __default_end_callback(self, key, value, timestamp): :param timestamp: :return: """ - self.notify_complete(value == 'success') + self.notify_complete(value == "success") def _default_mbed_error_callback(self, key: str, value: str, timestamp: float): # End the test immediately on error by default. This prevents wasting test runner time @@ -177,11 +178,11 @@ def _default_mbed_error_callback(self, key: str, value: str, timestamp: float): self.notify_complete(False) def __assign_default_callbacks(self): - """! Assigns default callback handlers """ + """! Assigns default callback handlers""" for key in self.__consume_by_default: self.__callbacks[key] = self.__callback_default # Register default handler for event 'end' before assigning user defined callbacks to let users over write it. - self.register_callback('end', self.__default_end_callback) + self.register_callback("end", self.__default_end_callback) self.register_callback("mbed_error", self._default_mbed_error_callback) # Register empty callbacks for the error details, to prevent "orphan event" warnings @@ -204,15 +205,15 @@ def event_handler(self, key, value, timestamp): :return: """ for name, method in inspect.getmembers(self, inspect.ismethod): - key = getattr(method, 'event_key', None) + key = getattr(method, "event_key", None) if key: self.register_callback(key, method) def register_callback(self, key, callback, force=False): """! Register callback for a specific event (key: event name) - @param key String with name of the event - @param callback Callable which will be registstered for event "key" - @param force God mode + @param key String with name of the event + @param callback Callable which will be registstered for event "key" + @param force God mode """ # Non-string keys are not allowed @@ -228,26 +229,30 @@ def register_callback(self, key, callback, force=False): if ismethod(callback): arg_count = callback.__code__.co_argcount if arg_count != 4: - err_msg = "callback 'self.%s('%s', ...)' defined with %d arguments"% (callback.__name__, key, arg_count) - err_msg += ", should have 4 arguments: self.%s(self, key, value, timestamp)"% callback.__name__ + err_msg = "callback 'self.%s('%s', ...)' defined with %d arguments" % ( + callback.__name__, + key, + arg_count, + ) + err_msg += ", should have 4 arguments: self.%s(self, key, value, timestamp)" % callback.__name__ raise TypeError(err_msg) # When callback is just a function should have 3 arguments func(key, value, timestamp) if isfunction(callback): arg_count = callback.__code__.co_argcount if arg_count != 3: - err_msg = "callback '%s('%s', ...)' defined with %d arguments"% (callback.__name__, key, arg_count) - err_msg += ", should have 3 arguments: %s(key, value, timestamp)"% callback.__name__ + err_msg = "callback '%s('%s', ...)' defined with %d arguments" % (callback.__name__, key, arg_count) + err_msg += ", should have 3 arguments: %s(key, value, timestamp)" % callback.__name__ raise TypeError(err_msg) if not force: # Event starting with '__' are reserved - if key.startswith('__'): + if key.startswith("__"): raise ValueError("event key starting with '__' are reserved") # We predefined few callbacks you can't use if key in self.__restricted_callbacks: - raise ValueError("we predefined few callbacks you can't use e.g. '%s'"% key) + raise ValueError("we predefined few callbacks you can't use e.g. '%s'" % key) self.__callbacks[key] = callback @@ -265,14 +270,13 @@ def teardown(self): class BaseHostTest(HostTestCallbackBase): - __BaseHostTest_Called = False def base_host_test_inited(self): - """ This function will check if BaseHostTest ctor was called - Call to BaseHostTest is required in order to force required - interfaces implementation. - @return Returns True if ctor was called (ok behaviour) + """This function will check if BaseHostTest ctor was called + Call to BaseHostTest is required in order to force required + interfaces implementation. + @return Returns True if ctor was called (ok behaviour) """ return self.__BaseHostTest_Called diff --git a/tools/python/mbed_os_tools/test/host_tests/default_auto.py b/tools/python/mbed_os_tools/test/host_tests/default_auto.py index 2b3781a9938..e7e0b8f4f9e 100644 --- a/tools/python/mbed_os_tools/test/host_tests/default_auto.py +++ b/tools/python/mbed_os_tools/test/host_tests/default_auto.py @@ -18,7 +18,8 @@ class DefaultAuto(BaseHostTest): - """ Simple, basic host test's test runner waiting for serial port - output from MUT, no supervision over test running in MUT is executed. + """Simple, basic host test's test runner waiting for serial port + output from MUT, no supervision over test running in MUT is executed. """ + pass diff --git a/tools/python/mbed_os_tools/test/host_tests/detect_auto.py b/tools/python/mbed_os_tools/test/host_tests/detect_auto.py index c437912e216..066fc781919 100644 --- a/tools/python/mbed_os_tools/test/host_tests/detect_auto.py +++ b/tools/python/mbed_os_tools/test/host_tests/detect_auto.py @@ -16,6 +16,7 @@ import re from .. import BaseHostTest + class DetectPlatformTest(BaseHostTest): PATTERN_MICRO_NAME = r"Target '(\w+)'" re_detect_micro_name = re.compile(PATTERN_MICRO_NAME) @@ -26,9 +27,9 @@ def result(self): def test(self, selftest): result = True - c = selftest.mbed.serial_readline() # {{start}} preamble + c = selftest.mbed.serial_readline() # {{start}} preamble if c is None: - return selftest.RESULT_IO_SERIAL + return selftest.RESULT_IO_SERIAL selftest.notify(c.strip()) selftest.notify("HOST: Detecting target name...") @@ -44,14 +45,15 @@ def test(self, selftest): micro_name = m.groups()[0] micro_cmp = selftest.mbed.options.micro == micro_name result = result and micro_cmp - selftest.notify("HOST: MUT Target name '%s', expected '%s'... [%s]"% (micro_name, - selftest.mbed.options.micro, - "OK" if micro_cmp else "FAIL")) + selftest.notify( + "HOST: MUT Target name '%s', expected '%s'... [%s]" + % (micro_name, selftest.mbed.options.micro, "OK" if micro_cmp else "FAIL") + ) for i in range(0, 2): c = selftest.mbed.serial_readline() if c is None: - return selftest.RESULT_IO_SERIAL + return selftest.RESULT_IO_SERIAL selftest.notify(c.strip()) return selftest.RESULT_SUCCESS if result else selftest.RESULT_FAILURE diff --git a/tools/python/mbed_os_tools/test/host_tests/dev_null_auto.py b/tools/python/mbed_os_tools/test/host_tests/dev_null_auto.py index 6b173fdb377..db92c92868b 100644 --- a/tools/python/mbed_os_tools/test/host_tests/dev_null_auto.py +++ b/tools/python/mbed_os_tools/test/host_tests/dev_null_auto.py @@ -15,8 +15,8 @@ from .. import BaseHostTest -class DevNullTest(BaseHostTest): +class DevNullTest(BaseHostTest): __result = None def _callback_result(self, key, value, timestamp): diff --git a/tools/python/mbed_os_tools/test/host_tests/echo.py b/tools/python/mbed_os_tools/test/host_tests/echo.py index 81245349c3c..3115f286c14 100644 --- a/tools/python/mbed_os_tools/test/host_tests/echo.py +++ b/tools/python/mbed_os_tools/test/host_tests/echo.py @@ -17,8 +17,8 @@ import uuid from .. import BaseHostTest -class EchoTest(BaseHostTest): +class EchoTest(BaseHostTest): __result = None echo_count = 0 count = 0 diff --git a/tools/python/mbed_os_tools/test/host_tests/hello_auto.py b/tools/python/mbed_os_tools/test/host_tests/hello_auto.py index 587bcbc9e04..3800cf47edb 100644 --- a/tools/python/mbed_os_tools/test/host_tests/hello_auto.py +++ b/tools/python/mbed_os_tools/test/host_tests/hello_auto.py @@ -15,6 +15,7 @@ from .. import BaseHostTest + class HelloTest(BaseHostTest): HELLO_WORLD = "Hello World" diff --git a/tools/python/mbed_os_tools/test/host_tests/rtc_auto.py b/tools/python/mbed_os_tools/test/host_tests/rtc_auto.py index 4a87b209ac3..12bb7ac7b35 100644 --- a/tools/python/mbed_os_tools/test/host_tests/rtc_auto.py +++ b/tools/python/mbed_os_tools/test/host_tests/rtc_auto.py @@ -17,6 +17,7 @@ from time import time, strftime, gmtime from .. import BaseHostTest + class RTCTest(BaseHostTest): PATTERN_RTC_VALUE = r"\[(\d+)\] \[(\d+-\d+-\d+ \d+:\d+:\d+ [AaPpMm]{2})\]" re_detect_rtc_value = re.compile(PATTERN_RTC_VALUE) @@ -35,9 +36,9 @@ def _callback_end(self, key, value, timestamp): self.notify_complete() def setup(self): - self.register_callback('timestamp', self._callback_timestamp) - self.register_callback('rtc', self._callback_rtc) - self.register_callback('end', self._callback_end) + self.register_callback("timestamp", self._callback_timestamp) + self.register_callback("rtc", self._callback_rtc) + self.register_callback("end", self._callback_end) def result(self): def check_strftimes_format(t): diff --git a/tools/python/mbed_os_tools/test/host_tests/wait_us_auto.py b/tools/python/mbed_os_tools/test/host_tests/wait_us_auto.py index 80eef240221..ffe58be96ed 100644 --- a/tools/python/mbed_os_tools/test/host_tests/wait_us_auto.py +++ b/tools/python/mbed_os_tools/test/host_tests/wait_us_auto.py @@ -18,30 +18,31 @@ class WaitusTest(BaseHostTest): - """ This test is reading single characters from stdio - and measures time between their occurrences. + """This test is reading single characters from stdio + and measures time between their occurrences. """ + __result = None - DEVIATION = 0.10 # +/-10% + DEVIATION = 0.10 # +/-10% ticks = [] def _callback_exit(self, key, value, timeout): self.notify_complete() def _callback_tick(self, key, value, timestamp): - """ {{tick;%d}}} """ + """{{tick;%d}}}""" self.log("tick! " + str(timestamp)) self.ticks.append((key, value, timestamp)) def setup(self): - self.register_callback('exit', self._callback_exit) - self.register_callback('tick', self._callback_tick) + self.register_callback("exit", self._callback_exit) + self.register_callback("tick", self._callback_tick) def result(self): def sub_timestamps(t1, t2): delta = t1 - t2 deviation = abs(delta - 1.0) - #return True if delta > 0 and deviation <= self.DEVIATION else False + # return True if delta > 0 and deviation <= self.DEVIATION else False return deviation <= self.DEVIATION # Check if time between ticks was accurate diff --git a/tools/python/mbed_os_tools/test/host_tests_conn_proxy/conn_primitive.py b/tools/python/mbed_os_tools/test/host_tests_conn_proxy/conn_primitive.py index 7513596e83b..554257d92df 100644 --- a/tools/python/mbed_os_tools/test/host_tests_conn_proxy/conn_primitive.py +++ b/tools/python/mbed_os_tools/test/host_tests_conn_proxy/conn_primitive.py @@ -20,11 +20,11 @@ class ConnectorPrimitiveException(Exception): """ Exception in connector primitive module. """ + pass class ConnectorPrimitive(object): - def __init__(self, name): self.LAST_ERROR = None self.logger = HtrunLogger(name) @@ -37,7 +37,7 @@ def write_kv(self, key, value): @return Returns buffer with K-V message sent to DUT on success, None on failure """ # All Key-Value messages ends with newline character - kv_buff = "{{%s;%s}}"% (key, value) + '\n' + kv_buff = "{{%s;%s}}" % (key, value) + "\n" if self.write(kv_buff): self.logger.prn_txd(kv_buff.rstrip()) @@ -61,12 +61,11 @@ def write(self, payload, log=False): raise NotImplementedError def flush(self): - """! Flush read/write channels of DUT """ + """! Flush read/write channels of DUT""" raise NotImplementedError def reset(self): - """! Reset the dut - """ + """! Reset the dut""" raise NotImplementedError def connected(self): @@ -82,6 +81,5 @@ def error(self): return self.LAST_ERROR def finish(self): - """! Handle DUT dtor like (close resource) operations here - """ + """! Handle DUT dtor like (close resource) operations here""" raise NotImplementedError diff --git a/tools/python/mbed_os_tools/test/host_tests_conn_proxy/conn_primitive_fastmodel.py b/tools/python/mbed_os_tools/test/host_tests_conn_proxy/conn_primitive_fastmodel.py index 90e7e01b936..1f9f4e0d902 100644 --- a/tools/python/mbed_os_tools/test/host_tests_conn_proxy/conn_primitive_fastmodel.py +++ b/tools/python/mbed_os_tools/test/host_tests_conn_proxy/conn_primitive_fastmodel.py @@ -20,11 +20,11 @@ class FastmodelConnectorPrimitive(ConnectorPrimitive): def __init__(self, name, config): ConnectorPrimitive.__init__(self, name) - self.config = config - self.fm_config = config.get('fm_config', None) - self.platform_name = config.get('platform_name', None) - self.image_path = config.get('image_path', None) - self.polling_timeout = int(config.get('polling_timeout', 60)) + self.config = config + self.fm_config = config.get("fm_config", None) + self.platform_name = config.get("platform_name", None) + self.image_path = config.get("image_path", None) + self.polling_timeout = int(config.get("polling_timeout", 60)) # FastModel Agent tool-kit self.fm_agent_module = None @@ -32,15 +32,13 @@ def __init__(self, name, config): # Initialize FastModel if self.__fastmodel_init(): - # FastModel Launch load and run, equivalent to DUT connection, flashing and reset... self.__fastmodel_launch() self.__fastmodel_load(self.image_path) self.__fastmodel_run() - def __fastmodel_init(self): - """! Initialize models using fm_agent APIs """ + """! Initialize models using fm_agent APIs""" self.logger.prn_inf("Initializing FastModel...") try: @@ -52,11 +50,11 @@ def __fastmodel_init(self): raise ConnectorPrimitiveException("Importing failed : %s" % str(e)) try: self.resource = self.fm_agent_module.FastmodelAgent(logger=self.logger) - self.resource.setup_simulator(self.platform_name,self.fm_config) + self.resource.setup_simulator(self.platform_name, self.fm_config) if self.__resource_allocated(): pass except self.fm_agent_module.SimulatorError as e: - self.logger.prn_err("module fm_agent, create() failed: %s"% str(e)) + self.logger.prn_err("module fm_agent, create() failed: %s" % str(e)) raise ConnectorPrimitiveException("FastModel Initializing failed as throw SimulatorError!") return True @@ -68,32 +66,32 @@ def __fastmodel_launch(self): if not self.resource.start_simulator(): raise ConnectorPrimitiveException("FastModel running failed, run_simulator() return False!") except self.fm_agent_module.SimulatorError as e: - self.logger.prn_err("start_simulator() failed: %s"% str(e)) + self.logger.prn_err("start_simulator() failed: %s" % str(e)) raise ConnectorPrimitiveException("FastModel launching failed as throw FastModelError!") def __fastmodel_run(self): - """! Use fm_agent API to run the FastModel """ + """! Use fm_agent API to run the FastModel""" self.logger.prn_inf("Running FastModel...") try: if not self.resource.run_simulator(): raise ConnectorPrimitiveException("FastModel running failed, run_simulator() return False!") except self.fm_agent_module.SimulatorError as e: - self.logger.prn_err("run_simulator() failed: %s"% str(e)) + self.logger.prn_err("run_simulator() failed: %s" % str(e)) raise ConnectorPrimitiveException("FastModel running failed as throw SimulatorError!") def __fastmodel_load(self, filename): """! Use fm_agent API to load image to FastModel, this is functional equivalent to flashing DUT""" - self.logger.prn_inf("loading FastModel with image '%s'..."% filename) + self.logger.prn_inf("loading FastModel with image '%s'..." % filename) try: if not self.resource.load_simulator(filename): raise ConnectorPrimitiveException("FastModel loading failed, load_simulator() return False!") except self.fm_agent_module.SimulatorError as e: - self.logger.prn_err("run_simulator() failed: %s"% str(e)) + self.logger.prn_err("run_simulator() failed: %s" % str(e)) raise ConnectorPrimitiveException("FastModel loading failed as throw SimulatorError!") def __resource_allocated(self): """! Check whether FastModel resource been allocated - @return True or throw an exception + @return True or throw an exception """ if self.resource: return True @@ -108,11 +106,12 @@ def read(self, count): try: data = self.resource.read() except self.fm_agent_module.SimulatorError as e: - self.logger.prn_err("FastmodelConnectorPrimitive.read() failed: %s"% str(e)) + self.logger.prn_err("FastmodelConnectorPrimitive.read() failed: %s" % str(e)) else: return data else: return False + def write(self, payload, log=False): """! Write 'payload' to DUT""" if self.__resource_allocated(): @@ -121,7 +120,7 @@ def write(self, payload, log=False): try: self.resource.write(payload) except self.fm_agent_module.SimulatorError as e: - self.logger.prn_err("FastmodelConnectorPrimitive.write() failed: %s"% str(e)) + self.logger.prn_err("FastmodelConnectorPrimitive.write() failed: %s" % str(e)) else: return True else: @@ -132,20 +131,20 @@ def flush(self): pass def connected(self): - """! return whether FastModel is connected """ + """! return whether FastModel is connected""" if self.__resource_allocated(): return self.resource.is_simulator_alive else: return False def finish(self): - """! shutdown the FastModel and release the allocation """ + """! shutdown the FastModel and release the allocation""" if self.__resource_allocated(): try: self.resource.shutdown_simulator() self.resource = None except self.fm_agent_module.SimulatorError as e: - self.logger.prn_err("FastmodelConnectorPrimitive.finish() failed: %s"% str(e)) + self.logger.prn_err("FastmodelConnectorPrimitive.finish() failed: %s" % str(e)) def reset(self): if self.__resource_allocated(): @@ -153,7 +152,7 @@ def reset(self): if not self.resource.reset_simulator(): self.logger.prn_err("FastModel reset failed, reset_simulator() return False!") except self.fm_agent_module.SimulatorError as e: - self.logger.prn_err("FastmodelConnectorPrimitive.reset() failed: %s"% str(e)) + self.logger.prn_err("FastmodelConnectorPrimitive.reset() failed: %s" % str(e)) def __del__(self): self.finish() diff --git a/tools/python/mbed_os_tools/test/host_tests_conn_proxy/conn_primitive_remote.py b/tools/python/mbed_os_tools/test/host_tests_conn_proxy/conn_primitive_remote.py index 2396c8e6cf1..36dd06b3707 100644 --- a/tools/python/mbed_os_tools/test/host_tests_conn_proxy/conn_primitive_remote.py +++ b/tools/python/mbed_os_tools/test/host_tests_conn_proxy/conn_primitive_remote.py @@ -22,25 +22,21 @@ class RemoteConnectorPrimitive(ConnectorPrimitive): def __init__(self, name, config, importer=__import__): ConnectorPrimitive.__init__(self, name) self.config = config - self.target_id = self.config.get('target_id', None) - self.grm_host = config.get('grm_host', None) - self.grm_port = config.get('grm_port', None) + self.target_id = self.config.get("target_id", None) + self.grm_host = config.get("grm_host", None) + self.grm_port = config.get("grm_port", None) if self.grm_port: self.grm_port = int(self.grm_port) - self.grm_module = config.get('grm_module', 'unknown') - self.platform_name = config.get('platform_name', None) - self.baudrate = config.get('baudrate', DEFAULT_BAUD_RATE) - self.image_path = config.get('image_path', None) - self.forced_reset_timeout = config.get('forced_reset_timeout', 0) - self.allocate_requirements = { - "platform_name": self.platform_name, - "power_on": True, - "connected": True - } + self.grm_module = config.get("grm_module", "unknown") + self.platform_name = config.get("platform_name", None) + self.baudrate = config.get("baudrate", DEFAULT_BAUD_RATE) + self.image_path = config.get("image_path", None) + self.forced_reset_timeout = config.get("forced_reset_timeout", 0) + self.allocate_requirements = {"platform_name": self.platform_name, "power_on": True, "connected": True} if self.config.get("tags"): self.allocate_requirements["tags"] = {} - for tag in config["tags"].split(','): + for tag in config["tags"].split(","): self.allocate_requirements["tags"][tag] = True # Global Resource Mgr tool-kit @@ -52,7 +48,7 @@ def __init__(self, name, config, importer=__import__): self.__remote_init(importer) def __remote_init(self, importer): - """! Initialize DUT using GRM APIs """ + """! Initialize DUT using GRM APIs""" # We want to load global resource manager module by name from command line (switch --grm) try: @@ -63,8 +59,9 @@ def __remote_init(self, importer): self.remote_module = None return False - self.logger.prn_inf("remote resources initialization: remote(host=%s, port=%s)" % - (self.grm_host, self.grm_port)) + self.logger.prn_inf( + "remote resources initialization: remote(host=%s, port=%s)" % (self.grm_host, self.grm_port) + ) # Connect to remote global resource manager self.client = self.remote_module.create(host=self.grm_host, port=self.grm_port) @@ -93,7 +90,7 @@ def __remote_init(self, importer): return True def __remote_connect(self, baudrate=DEFAULT_BAUD_RATE): - """! Open remote connection to DUT """ + """! Open remote connection to DUT""" self.logger.prn_inf("opening connection to platform at baudrate='%s'" % baudrate) if not self.selected_resource: raise Exception("remote resource not exists!") @@ -114,7 +111,7 @@ def __remote_disconnect(self): self.logger.prn_err("RemoteConnectorPrimitive.disconnect() failed, reason: " + str(error)) def __remote_reset(self, delay=0): - """! Use GRM remote API to reset DUT """ + """! Use GRM remote API to reset DUT""" self.logger.prn_inf("remote resources reset...") if not self.selected_resource: raise Exception("remote resource not exists!") @@ -127,11 +124,11 @@ def __remote_reset(self, delay=0): # Post-reset sleep if delay: - self.logger.prn_inf("waiting %.2f sec after reset"% delay) + self.logger.prn_inf("waiting %.2f sec after reset" % delay) time.sleep(delay) def __remote_flashing(self, filename, forceflash=False): - """! Use GRM remote API to flash DUT """ + """! Use GRM remote API to flash DUT""" self.logger.prn_inf("remote resources flashing with '%s'..." % filename) if not self.selected_resource: raise Exception("remote resource not exists!") @@ -143,7 +140,7 @@ def __remote_flashing(self, filename, forceflash=False): raise def read(self, count): - """! Read 'count' bytes of data from DUT """ + """! Read 'count' bytes of data from DUT""" if not self.connected(): raise Exception("remote resource not exists!") data = str() @@ -154,7 +151,7 @@ def read(self, count): return data def write(self, payload, log=False): - """! Write 'payload' to DUT """ + """! Write 'payload' to DUT""" if self.connected(): try: self.selected_resource.write(payload) diff --git a/tools/python/mbed_os_tools/test/host_tests_conn_proxy/conn_primitive_serial.py b/tools/python/mbed_os_tools/test/host_tests_conn_proxy/conn_primitive_serial.py index 8536784fa54..fd5c794d61b 100644 --- a/tools/python/mbed_os_tools/test/host_tests_conn_proxy/conn_primitive_serial.py +++ b/tools/python/mbed_os_tools/test/host_tests_conn_proxy/conn_primitive_serial.py @@ -30,11 +30,11 @@ def __init__(self, name, port, baudrate, config): self.read_timeout = 0.01 # 10 milli sec self.write_timeout = 5 self.config = config - self.target_id = self.config.get('target_id', None) - self.mcu = self.config.get('mcu', None) - self.polling_timeout = config.get('polling_timeout', 60) - self.forced_reset_timeout = config.get('forced_reset_timeout', 1) - self.skip_reset = config.get('skip_reset', False) + self.target_id = self.config.get("target_id", None) + self.mcu = self.config.get("mcu", None) + self.polling_timeout = config.get("polling_timeout", 60) + self.forced_reset_timeout = config.get("forced_reset_timeout", 1) + self.skip_reset = config.get("skip_reset", False) self.serial = None # Assume the provided serial port is good. Don't attempt to use the @@ -49,23 +49,30 @@ def __init__(self, name, port, baudrate, config): # Don't pass in the target_id, so that no change in serial port via # auto-discovery happens. self.logger.prn_inf("using specified port '%s'" % (self.port)) - serial_port = HostTestPluginBase().check_serial_port_ready(self.port, target_id=None, timeout=self.polling_timeout) + serial_port = HostTestPluginBase().check_serial_port_ready( + self.port, target_id=None, timeout=self.polling_timeout + ) else: # No serial port was provided. # Fallback to auto-discovery via target_id. self.logger.prn_inf("getting serial port via mbedls)") - serial_port = HostTestPluginBase().check_serial_port_ready(self.port, target_id=self.target_id, timeout=self.polling_timeout) + serial_port = HostTestPluginBase().check_serial_port_ready( + self.port, target_id=self.target_id, timeout=self.polling_timeout + ) if serial_port is None: raise ConnectorPrimitiveException("Serial port not ready!") if serial_port != self.port: # Serial port changed for given targetID - self.logger.prn_inf("serial port changed from '%s to '%s')"% (self.port, serial_port)) + self.logger.prn_inf("serial port changed from '%s to '%s')" % (self.port, serial_port)) self.port = serial_port startTime = time.time() - self.logger.prn_inf("serial(port=%s, baudrate=%d, read_timeout=%s, write_timeout=%d)"% (self.port, self.baudrate, self.read_timeout, self.write_timeout)) + self.logger.prn_inf( + "serial(port=%s, baudrate=%d, read_timeout=%s, write_timeout=%d)" + % (self.port, self.baudrate, self.read_timeout, self.write_timeout) + ) while time.time() - startTime < self.polling_timeout: try: # TIMEOUT: While creating Serial object timeout is delibrately passed as 0. Because blocking in Serial.read @@ -74,11 +81,13 @@ def __init__(self, name, port, baudrate, config): self.serial = Serial(self.port, baudrate=self.baudrate, timeout=0, write_timeout=self.write_timeout) except SerialException as e: self.serial = None - self.LAST_ERROR = "connection lost, serial.Serial(%s, %d, %d, %d): %s"% (self.port, + self.LAST_ERROR = "connection lost, serial.Serial(%s, %d, %d, %d): %s" % ( + self.port, self.baudrate, self.read_timeout, self.write_timeout, - str(e)) + str(e), + ) self.logger.prn_err(str(e)) self.logger.prn_err("Retry after 1 sec until %s seconds" % self.polling_timeout) else: @@ -88,29 +97,31 @@ def __init__(self, name, port, baudrate, config): time.sleep(1) def reset_dev_via_serial(self, delay=1): - """! Reset device using selected method, calls one of the reset plugins """ - reset_type = self.config.get('reset_type', 'default') + """! Reset device using selected method, calls one of the reset plugins""" + reset_type = self.config.get("reset_type", "default") if not reset_type: - reset_type = 'default' - disk = self.config.get('disk', None) + reset_type = "default" + disk = self.config.get("disk", None) - self.logger.prn_inf("reset device using '%s' plugin..."% reset_type) - result = host_tests_plugins.call_plugin('ResetMethod', + self.logger.prn_inf("reset device using '%s' plugin..." % reset_type) + result = host_tests_plugins.call_plugin( + "ResetMethod", reset_type, serial=self.serial, disk=disk, mcu=self.mcu, target_id=self.target_id, - polling_timeout=self.config.get('polling_timeout')) + polling_timeout=self.config.get("polling_timeout"), + ) # Post-reset sleep if delay: - self.logger.prn_inf("waiting %.2f sec after reset"% delay) + self.logger.prn_inf("waiting %.2f sec after reset" % delay) time.sleep(delay) self.logger.prn_inf("wait for it...") return result def read(self, count) -> bytes: - """! Read data from serial port RX buffer """ + """! Read data from serial port RX buffer""" # TIMEOUT: Since read is called in a loop, wait for self.timeout period before calling serial.read(). See # comment on serial.Serial() call above about timeout. time.sleep(self.read_timeout) @@ -120,21 +131,21 @@ def read(self, count) -> bytes: c = self.serial.read(count) except SerialException as e: self.serial = None - self.LAST_ERROR = "connection lost, serial.read(%d): %s"% (count, str(e)) + self.LAST_ERROR = "connection lost, serial.read(%d): %s" % (count, str(e)) self.logger.prn_err(str(e)) return c def write(self, payload, log=False): - """! Write data to serial port TX buffer """ + """! Write data to serial port TX buffer""" try: if self.serial: - self.serial.write(payload.encode('utf-8')) + self.serial.write(payload.encode("utf-8")) if log: self.logger.prn_txd(payload) return True except SerialException as e: self.serial = None - self.LAST_ERROR = "connection lost, serial.write(%d bytes): %s"% (len(payload), str(e)) + self.LAST_ERROR = "connection lost, serial.write(%d bytes): %s" % (len(payload), str(e)) self.logger.prn_err(str(e)) return False diff --git a/tools/python/mbed_os_tools/test/host_tests_conn_proxy/conn_proxy.py b/tools/python/mbed_os_tools/test/host_tests_conn_proxy/conn_proxy.py index 4382acaaf3d..c32d7874d03 100644 --- a/tools/python/mbed_os_tools/test/host_tests_conn_proxy/conn_proxy.py +++ b/tools/python/mbed_os_tools/test/host_tests_conn_proxy/conn_proxy.py @@ -24,11 +24,12 @@ from .conn_primitive_serial import SerialConnectorPrimitive from .conn_primitive_remote import RemoteConnectorPrimitive from .conn_primitive_fastmodel import FastmodelConnectorPrimitive -from queue import Empty as QueueEmpty # Queue here refers to the module, not a class +from queue import Empty as QueueEmpty # Queue here refers to the module, not a class -class KiViBufferWalker(): - """! Simple auxiliary class used to walk through a buffer and search for KV tokens """ +class KiViBufferWalker: + """! Simple auxiliary class used to walk through a buffer and search for KV tokens""" + def __init__(self): self.KIVI_REGEX = r"\{\{([\w\d_-]+);([^\}]+)\}\}" self.buff = str() @@ -37,16 +38,18 @@ def __init__(self): def append(self, payload: bytes): """! Append stream buffer with payload and process. Returns non-KV strings""" - logger = HtrunLogger('CONN') + logger = HtrunLogger("CONN") try: - self.buff += payload.decode('utf-8') + self.buff += payload.decode("utf-8") except UnicodeDecodeError: - decoded_payload = payload.decode('utf-8','ignore') - logger.prn_wrn(f"UnicodeDecodeError encountered! Raw bytes were {payload!r} and they decoded to \"{decoded_payload}\"") + decoded_payload = payload.decode("utf-8", "ignore") + logger.prn_wrn( + f'UnicodeDecodeError encountered! Raw bytes were {payload!r} and they decoded to "{decoded_payload}"' + ) self.buff += decoded_payload - lines = self.buff.split('\n') - self.buff = lines[-1] # remaining + lines = self.buff.split("\n") + self.buff = lines[-1] # remaining lines.pop(-1) # List of line or strings that did not match K,V pair. discarded = [] @@ -60,7 +63,7 @@ def append(self, payload: bytes): match = m.group(0) pos = line.find(match) before = line[:pos] - after = line[pos + len(match):] + after = line[pos + len(match) :] if len(before) > 0: logger.prn_rxd(before) discarded.append(before) @@ -68,7 +71,7 @@ def append(self, payload: bytes): # not a K,V pair part logger.prn_rxd(after) discarded.append(after) - logger.prn_inf("found KV pair in stream: {{%s;%s}}, queued..."% (key, value)) + logger.prn_inf("found KV pair in stream: {{%s;%s}}, queued..." % (key, value)) else: # not a K,V pair discarded.append(line) @@ -77,7 +80,7 @@ def append(self, payload: bytes): return discarded def search(self): - """! Check if there is a KV value in buffer """ + """! Check if there is a KV value in buffer""" return len(self.kvl) > 0 def pop_kv(self): @@ -95,63 +98,58 @@ def conn_primitive_factory(conn_resource, config, event_queue, logger): @param logger Host Test logger instance @return Object of type or None if type of connection primitive unknown (conn_resource) """ - polling_timeout = int(config.get('polling_timeout', 60)) - logger.prn_inf("notify event queue about extra %d sec timeout for serial port pooling"%polling_timeout) - event_queue.put(('__timeout', polling_timeout, time())) + polling_timeout = int(config.get("polling_timeout", 60)) + logger.prn_inf("notify event queue about extra %d sec timeout for serial port pooling" % polling_timeout) + event_queue.put(("__timeout", polling_timeout, time())) - if conn_resource == 'serial': + if conn_resource == "serial": # Standard serial port connection # Notify event queue we will wait additional time for serial port to be ready # Get extra configuration related to serial port - port = config.get('port') - baudrate = config.get('baudrate') + port = config.get("port") + baudrate = config.get("baudrate") logger.prn_inf("initializing serial port listener... ") - connector = SerialConnectorPrimitive( - 'SERI', - port, - baudrate, - config=config) - elif conn_resource == 'grm': + connector = SerialConnectorPrimitive("SERI", port, baudrate, config=config) + elif conn_resource == "grm": # Start GRM (Gloabal Resource Mgr) collection logger.prn_inf("initializing global resource mgr listener... ") - connector = RemoteConnectorPrimitive('GLRM', config=config) - elif conn_resource == 'fmc': + connector = RemoteConnectorPrimitive("GLRM", config=config) + elif conn_resource == "fmc": # Start Fast Model Connection collection logger.prn_inf("initializing fast model connection") - connector = FastmodelConnectorPrimitive('FSMD', config=config) + connector = FastmodelConnectorPrimitive("FSMD", config=config) else: logger.pn_err("unknown connection resource!") - raise NotImplementedError("ConnectorPrimitive factory: unknown connection resource '%s'!"% conn_resource) + raise NotImplementedError("ConnectorPrimitive factory: unknown connection resource '%s'!" % conn_resource) return connector def conn_process(event_queue, dut_event_queue, config: dict[str, Any]): - def __notify_conn_lost(): error_msg = connector.error() connector.finish() - event_queue.put(('__notify_conn_lost', error_msg, time())) + event_queue.put(("__notify_conn_lost", error_msg, time())) def __notify_sync_failed(): error_msg = connector.error() connector.finish() - event_queue.put(('__notify_sync_failed', error_msg, time())) + event_queue.put(("__notify_sync_failed", error_msg, time())) - logger = HtrunLogger('CONN') + logger = HtrunLogger("CONN") logger.prn_inf("starting connection process...") # Send connection process start event to host process # NOTE: Do not send any other Key-Value pairs before this! - event_queue.put(('__conn_process_start', 1, time())) + event_queue.put(("__conn_process_start", 1, time())) # Configuration of conn_opriocess behaviour - sync_behavior = int(config.get('sync_behavior', 1)) - sync_timeout = config.get('sync_timeout', 1.0) + sync_behavior = int(config.get("sync_behavior", 1)) + sync_timeout = config.get("sync_timeout", 1.0) sync_predelay: float = config["sync_predelay"] - conn_resource = config.get('conn_resource', 'serial') + conn_resource = config.get("conn_resource", "serial") syncs_sent = 0 # Create connector instance with proper configuration @@ -185,11 +183,11 @@ def __send_sync(timeout=None) -> str | None: if timeout: logger.prn_inf("Reset the part and send in new preamble...") connector.reset() - logger.prn_inf("resending new preamble '%s' after %0.2f sec"% (sync_uuid, timeout)) + logger.prn_inf("resending new preamble '%s' after %0.2f sec" % (sync_uuid, timeout)) else: - logger.prn_inf("sending preamble '%s'"% sync_uuid) + logger.prn_inf("sending preamble '%s'" % sync_uuid) - if connector.write_kv('__sync', sync_uuid): + if connector.write_kv("__sync", sync_uuid): return sync_uuid else: return None @@ -200,7 +198,6 @@ def __send_sync(timeout=None) -> str | None: __notify_conn_lost() return 0 - # Sync packet management allows us to manipulate the way htrun sends __sync packet(s) # With current settings we can force on htrun to send __sync packets in this manner: # @@ -213,7 +210,7 @@ def __send_sync(timeout=None) -> str | None: if sync_behavior > 0: # Sending up to 'n' __sync packets - logger.prn_inf("sending up to %s __sync packets (specified with --sync=%s)"% (sync_behavior, sync_behavior)) + logger.prn_inf("sending up to %s __sync packets (specified with --sync=%s)" % (sync_behavior, sync_behavior)) sync_uuid = __send_sync() if sync_uuid: @@ -224,10 +221,10 @@ def __send_sync(timeout=None) -> str | None: return 0 elif sync_behavior == 0: # No __sync packets - logger.prn_wrn("skipping __sync packet (specified with --sync=%s)"% sync_behavior) + logger.prn_wrn("skipping __sync packet (specified with --sync=%s)" % sync_behavior) else: # Send __sync until we go reply - logger.prn_inf("sending multiple __sync packets (specified with --sync=%s)"% sync_behavior) + logger.prn_inf("sending multiple __sync packets (specified with --sync=%s)" % sync_behavior) sync_uuid = __send_sync() if sync_uuid: @@ -239,7 +236,6 @@ def __send_sync(timeout=None) -> str | None: loop_timer = time() while True: - # Check if connection is lost to serial if not connector.connected(): __notify_conn_lost() @@ -249,14 +245,14 @@ def __send_sync(timeout=None) -> str | None: try: (key, value, _) = dut_event_queue.get(block=False) except QueueEmpty: - pass # Check if target sent something + pass # Check if target sent something else: # Return if state machine in host_test_default has finished to end process - if key == '__host_test_finished' and value == True: - logger.prn_inf("received special event '%s' value='%s', finishing"% (key, value)) + if key == "__host_test_finished" and value == True: + logger.prn_inf("received special event '%s' value='%s', finishing" % (key, value)) connector.finish() return 0 - elif key == '__reset': + elif key == "__reset": logger.prn_inf("received special event '%s', resetting dut" % (key)) connector.reset() event_queue.put(("reset_complete", 0, time())) @@ -271,26 +267,28 @@ def __send_sync(timeout=None) -> str | None: # Stream data stream KV parsing print_lines = kv_buffer.append(data) for line in print_lines: - event_queue.put(('__rxd_line', line, time())) + event_queue.put(("__rxd_line", line, time())) while kv_buffer.search(): key, value, timestamp = kv_buffer.pop_kv() if sync_uuid_discovered: event_queue.put((key, value, timestamp)) else: - if key == '__sync': + if key == "__sync": if value in sync_uuid_list: sync_uuid_discovered = True event_queue.put((key, value, time())) idx = sync_uuid_list.index(value) - logger.prn_inf("found SYNC in stream: {{%s;%s}} it is #%d sent, queued..."% (key, value, idx)) + logger.prn_inf( + "found SYNC in stream: {{%s;%s}} it is #%d sent, queued..." % (key, value, idx) + ) else: - logger.prn_err("found faulty SYNC in stream: {{%s;%s}}, ignored..."% (key, value)) + logger.prn_err("found faulty SYNC in stream: {{%s;%s}}, ignored..." % (key, value)) logger.prn_inf("Resetting the part and sync timeout to clear out the buffer...") connector.reset() loop_timer = time() else: - logger.prn_wrn("found KV pair in stream before sync: {{%s;%s}}, ignoring..."% (key, value)) + logger.prn_wrn("found KV pair in stream before sync: {{%s;%s}}, ignoring..." % (key, value)) if not sync_uuid_discovered: # Resending __sync after 'sync_timeout' secs (default 1 sec) diff --git a/tools/python/mbed_os_tools/test/host_tests_logger/ht_logger.py b/tools/python/mbed_os_tools/test/host_tests_logger/ht_logger.py index 21b8ccbb324..dc0588b3833 100644 --- a/tools/python/mbed_os_tools/test/host_tests_logger/ht_logger.py +++ b/tools/python/mbed_os_tools/test/host_tests_logger/ht_logger.py @@ -20,22 +20,22 @@ class HtrunLogger(object): - """! Yet another logger flavour """ + """! Yet another logger flavour""" + def __init__(self, name): - logging.basicConfig(stream=sys.stdout,format='[+%(relativeCreated)dms][%(name)s]%(message)s', level=logging.DEBUG) + logging.basicConfig( + stream=sys.stdout, format="[+%(relativeCreated)dms][%(name)s]%(message)s", level=logging.DEBUG + ) self.logger = logging.getLogger(name) - self.format_str = '[%(logger_level)s] %(message)s' + self.format_str = "[%(logger_level)s] %(message)s" def __prn_log(self, logger_level, text, timestamp=None): - self.logger.debug(self.format_str% { - 'logger_level' : logger_level, - 'message' : text, - }) + self.logger.debug(self.format_str % {"logger_level": logger_level, "message": text}) - self.prn_dbg = partial(__prn_log, self, 'DBG') - self.prn_wrn = partial(__prn_log, self, 'WRN') - self.prn_err = partial(__prn_log, self, 'ERR') - self.prn_inf = partial(__prn_log, self, 'INF') - self.prn_txt = partial(__prn_log, self, 'TXT') - self.prn_txd = partial(__prn_log, self, 'TXD') - self.prn_rxd = partial(__prn_log, self, 'RXD') + self.prn_dbg = partial(__prn_log, self, "DBG") + self.prn_wrn = partial(__prn_log, self, "WRN") + self.prn_err = partial(__prn_log, self, "ERR") + self.prn_inf = partial(__prn_log, self, "INF") + self.prn_txt = partial(__prn_log, self, "TXT") + self.prn_txd = partial(__prn_log, self, "TXD") + self.prn_rxd = partial(__prn_log, self, "RXD") diff --git a/tools/python/mbed_os_tools/test/host_tests_plugins/__init__.py b/tools/python/mbed_os_tools/test/host_tests_plugins/__init__.py index 600d0fea829..5f3afc2b95f 100644 --- a/tools/python/mbed_os_tools/test/host_tests_plugins/__init__.py +++ b/tools/python/mbed_os_tools/test/host_tests_plugins/__init__.py @@ -39,8 +39,8 @@ from . import module_reset_ublox from . import module_reset_mps2 from . import module_copy_mps2 -#import module_copy_jn51xx -#import module_reset_jn51xx +# import module_copy_jn51xx +# import module_reset_jn51xx # Plugin registry instance @@ -64,11 +64,12 @@ HOST_TEST_PLUGIN_REGISTRY.register_plugin(module_reset_pyocd.load_plugin()) HOST_TEST_PLUGIN_REGISTRY.register_plugin(module_reset_ublox.load_plugin()) HOST_TEST_PLUGIN_REGISTRY.register_plugin(module_copy_ublox.load_plugin()) -#HOST_TEST_PLUGIN_REGISTRY.register_plugin(module_copy_jn51xx.load_plugin()) -#HOST_TEST_PLUGIN_REGISTRY.register_plugin(module_reset_jn51xx.load_plugin()) +# HOST_TEST_PLUGIN_REGISTRY.register_plugin(module_copy_jn51xx.load_plugin()) +# HOST_TEST_PLUGIN_REGISTRY.register_plugin(module_reset_jn51xx.load_plugin()) # TODO: extend plugin loading to files with name module_*.py loaded ad-hoc + ############################################################################### # Functional interface for host test plugin registry ############################################################################### @@ -81,6 +82,7 @@ def call_plugin(type, capability, *args, **kwargs): """ return HOST_TEST_PLUGIN_REGISTRY.call_plugin(type, capability, *args, **kwargs) + def get_plugin_caps(type): """! Get list of all capabilities for plugin family with the same type @param type Type of a plugin @@ -88,13 +90,14 @@ def get_plugin_caps(type): """ return HOST_TEST_PLUGIN_REGISTRY.get_plugin_caps(type) + def get_plugin_info(): """! Return plugins information @return Dictionary HOST_TEST_PLUGIN_REGISTRY """ return HOST_TEST_PLUGIN_REGISTRY.get_dict() + def print_plugin_info(): - """! Prints plugins' information in user friendly way - """ + """! Prints plugins' information in user friendly way""" print(HOST_TEST_PLUGIN_REGISTRY) diff --git a/tools/python/mbed_os_tools/test/host_tests_plugins/host_test_plugins.py b/tools/python/mbed_os_tools/test/host_tests_plugins/host_test_plugins.py index 94796fb21e8..6636e3ec87d 100644 --- a/tools/python/mbed_os_tools/test/host_tests_plugins/host_test_plugins.py +++ b/tools/python/mbed_os_tools/test/host_tests_plugins/host_test_plugins.py @@ -27,8 +27,8 @@ class HostTestPluginBase: - """! Base class for all plugins used with host tests - """ + """! Base class for all plugins used with host tests""" + ########################################################################### # Interface: ########################################################################### @@ -36,31 +36,29 @@ class HostTestPluginBase: ########################################################################### # Interface attributes defining plugin name, type etc. ########################################################################### - name = "HostTestPluginBase" # Plugin name, can be plugin class name - type = "BasePlugin" # Plugin type: ResetMethod, CopyMethod etc. - capabilities = [] # Capabilities names: what plugin can achieve - # (e.g. reset using some external command line tool) - required_parameters = [] # Parameters required for 'kwargs' in plugin APIs: e.g. self.execute() - stable = False # Determine if plugin is stable and can be used + name = "HostTestPluginBase" # Plugin name, can be plugin class name + type = "BasePlugin" # Plugin type: ResetMethod, CopyMethod etc. + capabilities = [] # Capabilities names: what plugin can achieve + # (e.g. reset using some external command line tool) + required_parameters = [] # Parameters required for 'kwargs' in plugin APIs: e.g. self.execute() + stable = False # Determine if plugin is stable and can be used def __init__(self): - """ ctor - """ + """ctor""" # Setting Host Test Logger instance ht_loggers = { - 'BasePlugin' : HtrunLogger('PLGN'), - 'CopyMethod' : HtrunLogger('COPY'), - 'ResetMethod' : HtrunLogger('REST'), + "BasePlugin": HtrunLogger("PLGN"), + "CopyMethod": HtrunLogger("COPY"), + "ResetMethod": HtrunLogger("REST"), } - self.plugin_logger = ht_loggers.get(self.type, ht_loggers['BasePlugin']) + self.plugin_logger = ht_loggers.get(self.type, ht_loggers["BasePlugin"]) ########################################################################### # Interface methods ########################################################################### def setup(self, *args, **kwargs): - """ Configure plugin, this function should be called before plugin execute() method is used. - """ + """Configure plugin, this function should be called before plugin execute() method is used.""" return False def execute(self, capability, *args, **kwargs): @@ -104,8 +102,7 @@ def print_plugin_info(self, text, NL=True): return True def print_plugin_char(self, char): - """ Function prints char on stdout - """ + """Function prints char on stdout""" stdout.write(char) stdout.flush() return True @@ -127,41 +124,47 @@ def check_mount_point_ready(self, destination_disk, init_delay=0.2, loop_delay=0 # Sometimes OSes take a long time to mount devices (up to one minute). # Current pooling time: 120x 500ms = 1 minute - self.print_plugin_info("Waiting up to %d sec for '%s' mount point (current is '%s')..."% (timeout, target_id, destination_disk)) + self.print_plugin_info( + "Waiting up to %d sec for '%s' mount point (current is '%s')..." + % (timeout, target_id, destination_disk) + ) timeout_step = 0.5 timeout = int(timeout / timeout_step) for i in range(timeout): # mbed_os_tools.detect.create() should be done inside the loop. # Otherwise it will loop on same data. mbeds = detect.create() - mbed_list = mbeds.list_mbeds() #list of mbeds present + mbed_list = mbeds.list_mbeds() # list of mbeds present # get first item in list with a matching target_id, if present - mbed_target = next((x for x in mbed_list if x['target_id']==target_id), None) + mbed_target = next((x for x in mbed_list if x["target_id"] == target_id), None) if mbed_target is not None: # Only assign if mount point is present and known (not None) - if 'mount_point' in mbed_target and mbed_target['mount_point'] is not None: - new_destination_disk = mbed_target['mount_point'] + if "mount_point" in mbed_target and mbed_target["mount_point"] is not None: + new_destination_disk = mbed_target["mount_point"] break sleep(timeout_step) if new_destination_disk != destination_disk: # Mount point changed, update to new mount point from mbed-ls - self.print_plugin_info("Mount point for '%s' changed from '%s' to '%s'..."% (target_id, destination_disk, new_destination_disk)) + self.print_plugin_info( + "Mount point for '%s' changed from '%s' to '%s'..." + % (target_id, destination_disk, new_destination_disk) + ) destination_disk = new_destination_disk result = True # Check if mount point we've promoted to be valid one (by optional target_id check above) # Let's wait for 30 * loop_delay + init_delay max if not access(destination_disk, F_OK): - self.print_plugin_info("Waiting for mount point '%s' to be ready..."% destination_disk, NL=False) + self.print_plugin_info("Waiting for mount point '%s' to be ready..." % destination_disk, NL=False) sleep(init_delay) for i in range(30): if access(destination_disk, F_OK): result = True break sleep(loop_delay) - self.print_plugin_char('.') + self.print_plugin_char(".") else: self.print_plugin_error("mount {} is not accessible ...".format(destination_disk)) result = False @@ -181,23 +184,28 @@ def check_serial_port_ready(self, serial_port, target_id=None, timeout=60): if target_id: # Sometimes OSes take a long time to mount devices (up to one minute). # Current pooling time: 120x 500ms = 1 minute - self.print_plugin_info("Waiting up to %d sec for '%s' serial port (current is '%s')..."% (timeout, target_id, serial_port)) + self.print_plugin_info( + "Waiting up to %d sec for '%s' serial port (current is '%s')..." % (timeout, target_id, serial_port) + ) timeout_step = 0.5 timeout = int(timeout / timeout_step) for i in range(timeout): # mbed_os_tools.detect.create() should be done inside the loop. Otherwise it will loop on same data. mbeds = detect.create() - mbed_list = mbeds.list_mbeds() #list of mbeds present + mbed_list = mbeds.list_mbeds() # list of mbeds present # get first item in list with a matching target_id, if present - mbed_target = next((x for x in mbed_list if x['target_id']==target_id), None) + mbed_target = next((x for x in mbed_list if x["target_id"] == target_id), None) if mbed_target is not None: # Only assign if serial port is present and known (not None) - if 'serial_port' in mbed_target and mbed_target['serial_port'] is not None: - new_serial_port = mbed_target['serial_port'] + if "serial_port" in mbed_target and mbed_target["serial_port"] is not None: + new_serial_port = mbed_target["serial_port"] if new_serial_port != serial_port: # Serial port changed, update to new serial port from mbed-ls - self.print_plugin_info("Serial port for tid='%s' changed from '%s' to '%s'..." % (target_id, serial_port, new_serial_port)) + self.print_plugin_info( + "Serial port for tid='%s' changed from '%s' to '%s'..." + % (target_id, serial_port, new_serial_port) + ) break sleep(timeout_step) else: @@ -217,11 +225,11 @@ def check_parameters(self, capability, *args, **kwargs): if parameter not in kwargs: missing_parameters.append(parameter) if len(missing_parameters): - self.print_plugin_error("execute parameter(s) '%s' missing!"% (', '.join(missing_parameters))) + self.print_plugin_error("execute parameter(s) '%s' missing!" % (", ".join(missing_parameters))) return False return True - def run_command(self, cmd, shell=True, stdin = None): + def run_command(self, cmd, shell=True, stdin=None): """! Runs command from command line. @param cmd Command to execute @param shell True if shell command should be executed (eg. ls, ps) @@ -233,11 +241,11 @@ def run_command(self, cmd, shell=True, stdin = None): try: ret = call(cmd, shell=shell, stdin=stdin) if ret: - self.print_plugin_error("[ret=%d] Command: %s"% (int(ret), cmd)) + self.print_plugin_error("[ret=%d] Command: %s" % (int(ret), cmd)) return False except Exception as e: result = False - self.print_plugin_error("[ret=%d] Command: %s"% (int(ret), cmd)) + self.print_plugin_error("[ret=%d] Command: %s" % (int(ret), cmd)) self.print_plugin_error(str(e)) return result @@ -245,11 +253,7 @@ def mbed_os_info(self): """! Returns information about host OS @return Returns tuple with information about OS and host platform """ - result = (os.name, - platform.system(), - platform.release(), - platform.version(), - sys.platform) + result = (os.name, platform.system(), platform.release(), platform.version(), sys.platform) return result def mbed_os_support(self): @@ -259,12 +263,12 @@ def mbed_os_support(self): """ result = None os_info = self.mbed_os_info() - if (os_info[0] == 'nt' and os_info[1] == 'Windows'): - result = 'Windows7' - elif (os_info[0] == 'posix' and os_info[1] == 'Linux' and ('Ubuntu' in os_info[3])): - result = 'Ubuntu' - elif (os_info[0] == 'posix' and os_info[1] == 'Linux'): - result = 'LinuxGeneric' - elif (os_info[0] == 'posix' and os_info[1] == 'Darwin'): - result = 'Darwin' + if os_info[0] == "nt" and os_info[1] == "Windows": + result = "Windows7" + elif os_info[0] == "posix" and os_info[1] == "Linux" and ("Ubuntu" in os_info[3]): + result = "Ubuntu" + elif os_info[0] == "posix" and os_info[1] == "Linux": + result = "LinuxGeneric" + elif os_info[0] == "posix" and os_info[1] == "Darwin": + result = "Darwin" return result diff --git a/tools/python/mbed_os_tools/test/host_tests_plugins/host_test_registry.py b/tools/python/mbed_os_tools/test/host_tests_plugins/host_test_registry.py index 124361bef42..02a4cd31f35 100644 --- a/tools/python/mbed_os_tools/test/host_tests_plugins/host_test_registry.py +++ b/tools/python/mbed_os_tools/test/host_tests_plugins/host_test_registry.py @@ -13,19 +13,21 @@ # See the License for the specific language governing permissions and # limitations under the License. + class HostTestRegistry: - """ Simple class used to register and store - host test plugins for further usage + """Simple class used to register and store + host test plugins for further usage """ + # Here we actually store all the plugins - PLUGINS = {} # 'Plugin Name' : Plugin Object + PLUGINS = {} # 'Plugin Name' : Plugin Object def print_error(self, text): """! Prints error directly on console @param text Error message text message """ - print("Plugin load failed. Reason: %s"% text) + print("Plugin load failed. Reason: %s" % text) def register_plugin(self, plugin): """! Registers and stores plugin inside registry for further use. @@ -42,13 +44,13 @@ def register_plugin(self, plugin): # TODO: # - check for unique caps for specified type if plugin.name not in self.PLUGINS: - if plugin.setup(): # Setup plugin can be completed without errors + if plugin.setup(): # Setup plugin can be completed without errors self.PLUGINS[plugin.name] = plugin return True else: - self.print_error("%s setup failed"% plugin.name) + self.print_error("%s setup failed" % plugin.name) else: - self.print_error("%s already loaded"% plugin.name) + self.print_error("%s already loaded" % plugin.name) return False def call_plugin(self, type, capability, *args, **kwargs): @@ -82,7 +84,7 @@ def load_plugin(self, name): @param name name of the module to import @return Returns result of __import__ operation """ - mod = __import__("module_%s"% name) + mod = __import__("module_%s" % name) return mod def get_string(self): @@ -90,23 +92,24 @@ def get_string(self): @return Returns string formatted with PrettyTable """ from prettytable import PrettyTable, HEADER - column_names = ['name', 'type', 'capabilities', 'stable', 'os_support', 'required_parameters'] + + column_names = ["name", "type", "capabilities", "stable", "os_support", "required_parameters"] pt = PrettyTable(column_names, junction_char="|", hrules=HEADER) for column in column_names: - pt.align[column] = 'l' + pt.align[column] = "l" for plugin_name in sorted(self.PLUGINS.keys()): name = self.PLUGINS[plugin_name].name type = self.PLUGINS[plugin_name].type stable = self.PLUGINS[plugin_name].stable - capabilities = ', '.join(self.PLUGINS[plugin_name].capabilities) - is_os_supported = self.PLUGINS[plugin_name].is_os_supported() - required_parameters = ', '.join(self.PLUGINS[plugin_name].required_parameters) + capabilities = ", ".join(self.PLUGINS[plugin_name].capabilities) + is_os_supported = self.PLUGINS[plugin_name].is_os_supported() + required_parameters = ", ".join(self.PLUGINS[plugin_name].required_parameters) row = [name, type, capabilities, stable, is_os_supported, required_parameters] pt.add_row(row) return pt.get_string() def get_dict(self): - column_names = ['name', 'type', 'capabilities', 'stable'] + column_names = ["name", "type", "capabilities", "stable"] result = {} for plugin_name in sorted(self.PLUGINS.keys()): name = self.PLUGINS[plugin_name].name @@ -116,12 +119,12 @@ def get_dict(self): is_os_supported = self.PLUGINS[plugin_name].is_os_supported() required_parameters = self.PLUGINS[plugin_name].required_parameters result[plugin_name] = { - "name" : name, - "type" : type, - "stable" : stable, - "capabilities" : capabilities, - "os_support" : is_os_supported, - "required_parameters" : required_parameters + "name": name, + "type": type, + "stable": stable, + "capabilities": capabilities, + "os_support": is_os_supported, + "required_parameters": required_parameters, } return result diff --git a/tools/python/mbed_os_tools/test/host_tests_plugins/module_copy_jn51xx.py b/tools/python/mbed_os_tools/test/host_tests_plugins/module_copy_jn51xx.py index 01451d9f076..774efa6ac87 100644 --- a/tools/python/mbed_os_tools/test/host_tests_plugins/module_copy_jn51xx.py +++ b/tools/python/mbed_os_tools/test/host_tests_plugins/module_copy_jn51xx.py @@ -18,34 +18,30 @@ class HostTestPluginCopyMethod_JN51xx(HostTestPluginBase): - # Plugin interface - name = 'HostTestPluginCopyMethod_JN51xx' - type = 'CopyMethod' - capabilities = ['jn51xx'] - required_parameters = ['image_path', 'serial'] + name = "HostTestPluginCopyMethod_JN51xx" + type = "CopyMethod" + capabilities = ["jn51xx"] + required_parameters = ["image_path", "serial"] def __init__(self): - """ ctor - """ + """ctor""" HostTestPluginBase.__init__(self) def is_os_supported(self, os_name=None): - """! In this implementation this plugin only is supporeted under Windows machines - """ + """! In this implementation this plugin only is supporeted under Windows machines""" # If no OS name provided use host OS name if not os_name: os_name = self.mbed_os_support() # This plugin only works on Windows - if os_name and os_name.startswith('Windows'): + if os_name and os_name.startswith("Windows"): return True return False def setup(self, *args, **kwargs): - """! Configure plugin, this function should be called before plugin execute() method is used. - """ - self.JN51XX_PROGRAMMER = 'JN51xxProgrammer.exe' + """! Configure plugin, this function should be called before plugin execute() method is used.""" + self.JN51XX_PROGRAMMER = "JN51xxProgrammer.exe" return True def execute(self, capability, *args, **kwargs): @@ -57,32 +53,27 @@ def execute(self, capability, *args, **kwargs): @details Each capability e.g. may directly just call some command line program or execute building pythonic function @return Capability call return value """ - if not kwargs['image_path']: + if not kwargs["image_path"]: self.print_plugin_error("Error: image path not specified") return False - if not kwargs['serial']: + if not kwargs["serial"]: self.print_plugin_error("Error: serial port not set (not opened?)") return False result = False if self.check_parameters(capability, *args, **kwargs): - if kwargs['image_path'] and kwargs['serial']: - image_path = os.path.normpath(kwargs['image_path']) - serial_port = kwargs['serial'] - if capability == 'jn51xx': + if kwargs["image_path"] and kwargs["serial"]: + image_path = os.path.normpath(kwargs["image_path"]) + serial_port = kwargs["serial"] + if capability == "jn51xx": # Example: # JN51xxProgrammer.exe -s COM15 -f -V0 - cmd = [self.JN51XX_PROGRAMMER, - '-s', serial_port, - '-f', image_path, - '-V0' - ] + cmd = [self.JN51XX_PROGRAMMER, "-s", serial_port, "-f", image_path, "-V0"] result = self.run_command(cmd) return result def load_plugin(): - """ Returns plugin available in this module - """ + """Returns plugin available in this module""" return HostTestPluginCopyMethod_JN51xx() diff --git a/tools/python/mbed_os_tools/test/host_tests_plugins/module_copy_mbed.py b/tools/python/mbed_os_tools/test/host_tests_plugins/module_copy_mbed.py index 7aa9972a392..b54b97651b1 100644 --- a/tools/python/mbed_os_tools/test/host_tests_plugins/module_copy_mbed.py +++ b/tools/python/mbed_os_tools/test/host_tests_plugins/module_copy_mbed.py @@ -19,12 +19,10 @@ class HostTestPluginCopyMethod_Mbed(HostTestPluginBase): - """ Generic flashing method for mbed-enabled devices (by copy) - """ + """Generic flashing method for mbed-enabled devices (by copy)""" def __init__(self): - """ ctor - """ + """ctor""" HostTestPluginBase.__init__(self) def generic_mbed_copy(self, image_path, destination_disk): @@ -38,26 +36,25 @@ def generic_mbed_copy(self, image_path, destination_disk): @return Returns True if copy (flashing) was successful """ result = True - if not destination_disk.endswith('/') and not destination_disk.endswith('\\'): - destination_disk += '/' + if not destination_disk.endswith("/") and not destination_disk.endswith("\\"): + destination_disk += "/" try: copy(image_path, destination_disk) except Exception as e: - self.print_plugin_error("shutil.copy('%s', '%s')"% (image_path, destination_disk)) - self.print_plugin_error("Error: %s"% str(e)) + self.print_plugin_error("shutil.copy('%s', '%s')" % (image_path, destination_disk)) + self.print_plugin_error("Error: %s" % str(e)) result = False return result # Plugin interface - name = 'HostTestPluginCopyMethod_Mbed' - type = 'CopyMethod' + name = "HostTestPluginCopyMethod_Mbed" + type = "CopyMethod" stable = True - capabilities = ['shutil', 'default'] - required_parameters = ['image_path', 'destination_disk'] + capabilities = ["shutil", "default"] + required_parameters = ["image_path", "destination_disk"] def setup(self, *args, **kwargs): - """ Configure plugin, this function should be called before plugin execute() method is used. - """ + """Configure plugin, this function should be called before plugin execute() method is used.""" return True def execute(self, capability, *args, **kwargs): @@ -65,35 +62,36 @@ def execute(self, capability, *args, **kwargs): @details Each capability may directly just call some command line program or execute building pythonic function @return Returns True if 'capability' operation was successful """ - if not kwargs['image_path']: + if not kwargs["image_path"]: self.print_plugin_error("Error: image path not specified") return False - if not kwargs['destination_disk']: + if not kwargs["destination_disk"]: self.print_plugin_error("Error: destination disk not specified") return False # This optional parameter can be used if TargetID is provided (-t switch) - target_id = kwargs.get('target_id', None) - pooling_timeout = kwargs.get('polling_timeout', 60) + target_id = kwargs.get("target_id", None) + pooling_timeout = kwargs.get("polling_timeout", 60) result = False if self.check_parameters(capability, *args, **kwargs): # Capability 'default' is a dummy capability - if kwargs['image_path'] and kwargs['destination_disk']: - if capability == 'shutil': - image_path = os.path.normpath(kwargs['image_path']) - destination_disk = os.path.normpath(kwargs['destination_disk']) + if kwargs["image_path"] and kwargs["destination_disk"]: + if capability == "shutil": + image_path = os.path.normpath(kwargs["image_path"]) + destination_disk = os.path.normpath(kwargs["destination_disk"]) # Wait for mount point to be ready # if mount point changed according to target_id use new mount point # available in result (_, destination_disk) of check_mount_point_ready - mount_res, destination_disk = self.check_mount_point_ready(destination_disk, target_id=self.target_id, timeout=pooling_timeout) # Blocking + mount_res, destination_disk = self.check_mount_point_ready( + destination_disk, target_id=self.target_id, timeout=pooling_timeout + ) # Blocking if mount_res: result = self.generic_mbed_copy(image_path, destination_disk) return result def load_plugin(): - """ Returns plugin available in this module - """ + """Returns plugin available in this module""" return HostTestPluginCopyMethod_Mbed() diff --git a/tools/python/mbed_os_tools/test/host_tests_plugins/module_copy_mps2.py b/tools/python/mbed_os_tools/test/host_tests_plugins/module_copy_mps2.py index abfb55affb9..604f2eaa1b1 100644 --- a/tools/python/mbed_os_tools/test/host_tests_plugins/module_copy_mps2.py +++ b/tools/python/mbed_os_tools/test/host_tests_plugins/module_copy_mps2.py @@ -21,15 +21,14 @@ class HostTestPluginCopyMethod_MPS2(HostTestPluginBase): # MPS2 specific flashing / binary setup funcitons - name = 'HostTestPluginCopyMethod_MPS2' - type = 'CopyMethod' + name = "HostTestPluginCopyMethod_MPS2" + type = "CopyMethod" stable = True - capabilities = ['mps2'] - required_parameters = ['image_path', 'destination_disk'] + capabilities = ["mps2"] + required_parameters = ["image_path", "destination_disk"] def __init__(self): - """ ctor - """ + """ctor""" HostTestPluginBase.__init__(self) def mps2_copy(self, image_path, destination_disk): @@ -45,23 +44,22 @@ def mps2_copy(self, image_path, destination_disk): """ result = True # Keep the same extension in the test spec and on the MPS2 - _, extension = os.path.splitext(image_path); + _, extension = os.path.splitext(image_path) destination_path = os.path.join(destination_disk, "mbed" + extension) try: copy(image_path, destination_path) # sync command on mac ignores command line arguments. - if os.name == 'posix': - result = self.run_command('sync -f %s' % destination_path, shell=True) + if os.name == "posix": + result = self.run_command("sync -f %s" % destination_path, shell=True) except Exception as e: - self.print_plugin_error("shutil.copy('%s', '%s')"% (image_path, destination_path)) - self.print_plugin_error("Error: %s"% str(e)) + self.print_plugin_error("shutil.copy('%s', '%s')" % (image_path, destination_path)) + self.print_plugin_error("Error: %s" % str(e)) result = False return result def setup(self, *args, **kwargs): - """ Configure plugin, this function should be called before plugin execute() method is used. - """ + """Configure plugin, this function should be called before plugin execute() method is used.""" return True def execute(self, capability, *args, **kwargs): @@ -70,36 +68,36 @@ def execute(self, capability, *args, **kwargs): @return Returns True if 'capability' operation was successful """ - - if not kwargs['image_path']: + if not kwargs["image_path"]: self.print_plugin_error("Error: image path not specified") return False - if not kwargs['destination_disk']: + if not kwargs["destination_disk"]: self.print_plugin_error("Error: destination disk not specified") return False # This optional parameter can be used if TargetID is provided (-t switch) - target_id = kwargs.get('target_id', None) - pooling_timeout = kwargs.get('polling_timeout', 60) + target_id = kwargs.get("target_id", None) + pooling_timeout = kwargs.get("polling_timeout", 60) result = False if self.check_parameters(capability, *args, **kwargs): # Capability 'default' is a dummy capability - if kwargs['image_path'] and kwargs['destination_disk']: - if capability == 'mps2': - image_path = os.path.normpath(kwargs['image_path']) - destination_disk = os.path.normpath(kwargs['destination_disk']) + if kwargs["image_path"] and kwargs["destination_disk"]: + if capability == "mps2": + image_path = os.path.normpath(kwargs["image_path"]) + destination_disk = os.path.normpath(kwargs["destination_disk"]) # Wait for mount point to be ready # if mount point changed according to target_id use new mount point # available in result (_, destination_disk) of check_mount_point_ready - result, destination_disk = self.check_mount_point_ready(destination_disk, target_id=target_id, timeout=pooling_timeout) # Blocking + result, destination_disk = self.check_mount_point_ready( + destination_disk, target_id=target_id, timeout=pooling_timeout + ) # Blocking if result: result = self.mps2_copy(image_path, destination_disk) return result def load_plugin(): - """ Returns plugin available in this module - """ + """Returns plugin available in this module""" return HostTestPluginCopyMethod_MPS2() diff --git a/tools/python/mbed_os_tools/test/host_tests_plugins/module_copy_pyocd.py b/tools/python/mbed_os_tools/test/host_tests_plugins/module_copy_pyocd.py index 77191877f23..7cd9266f77b 100644 --- a/tools/python/mbed_os_tools/test/host_tests_plugins/module_copy_pyocd.py +++ b/tools/python/mbed_os_tools/test/host_tests_plugins/module_copy_pyocd.py @@ -19,26 +19,26 @@ try: from pyocd.core.helpers import ConnectHelper from pyocd.flash.file_programmer import FileProgrammer + PYOCD_PRESENT = True except ImportError: PYOCD_PRESENT = False + class HostTestPluginCopyMethod_pyOCD(HostTestPluginBase): # Plugin interface - name = 'HostTestPluginCopyMethod_pyOCD' - type = 'CopyMethod' + name = "HostTestPluginCopyMethod_pyOCD" + type = "CopyMethod" stable = True - capabilities = ['pyocd'] - required_parameters = ['image_path', 'target_id'] + capabilities = ["pyocd"] + required_parameters = ["image_path", "target_id"] def __init__(self): - """ ctor - """ + """ctor""" HostTestPluginBase.__init__(self) def setup(self, *args, **kwargs): - """ Configure plugin, this function should be called before plugin execute() method is used. - """ + """Configure plugin, this function should be called before plugin execute() method is used.""" return True def execute(self, capability, *args, **kwargs): @@ -58,16 +58,16 @@ def execute(self, capability, *args, **kwargs): if not self.check_parameters(capability, *args, **kwargs): return False - if not kwargs['image_path']: + if not kwargs["image_path"]: self.print_plugin_error("Error: image path not specified") return False - if not kwargs['target_id']: + if not kwargs["target_id"]: self.print_plugin_error("Error: Target ID") return False - target_id = kwargs['target_id'] - image_path = os.path.normpath(kwargs['image_path']) + target_id = kwargs["target_id"] + image_path = os.path.normpath(kwargs["image_path"]) with ConnectHelper.session_with_chosen_probe(unique_id=target_id, resume_on_disconnect=False) as session: # Performance hack! # Eventually pyOCD will know default clock speed @@ -86,12 +86,11 @@ def execute(self, capability, *args, **kwargs): # Program the file programmer = FileProgrammer(session) - programmer.program(image_path, format=kwargs['format']) + programmer.program(image_path, format=kwargs["format"]) return True def load_plugin(): - """ Returns plugin available in this module - """ + """Returns plugin available in this module""" return HostTestPluginCopyMethod_pyOCD() diff --git a/tools/python/mbed_os_tools/test/host_tests_plugins/module_copy_shell.py b/tools/python/mbed_os_tools/test/host_tests_plugins/module_copy_shell.py index c5080ee43c3..68e9e0dc352 100644 --- a/tools/python/mbed_os_tools/test/host_tests_plugins/module_copy_shell.py +++ b/tools/python/mbed_os_tools/test/host_tests_plugins/module_copy_shell.py @@ -20,20 +20,18 @@ class HostTestPluginCopyMethod_Shell(HostTestPluginBase): # Plugin interface - name = 'HostTestPluginCopyMethod_Shell' - type = 'CopyMethod' + name = "HostTestPluginCopyMethod_Shell" + type = "CopyMethod" stable = True - capabilities = ['shell', 'cp', 'copy', 'xcopy'] - required_parameters = ['image_path', 'destination_disk'] + capabilities = ["shell", "cp", "copy", "xcopy"] + required_parameters = ["image_path", "destination_disk"] def __init__(self): - """ ctor - """ + """ctor""" HostTestPluginBase.__init__(self) def setup(self, *args, **kwargs): - """ Configure plugin, this function should be called before plugin execute() method is used. - """ + """Configure plugin, this function should be called before plugin execute() method is used.""" return True def execute(self, capability, *args, **kwargs): @@ -45,41 +43,45 @@ def execute(self, capability, *args, **kwargs): @details Each capability e.g. may directly just call some command line program or execute building pythonic function @return Capability call return value """ - if not kwargs['image_path']: + if not kwargs["image_path"]: self.print_plugin_error("Error: image path not specified") return False - if not kwargs['destination_disk']: + if not kwargs["destination_disk"]: self.print_plugin_error("Error: destination disk not specified") return False # This optional parameter can be used if TargetID is provided (-t switch) - target_id = kwargs.get('target_id', None) - pooling_timeout = kwargs.get('polling_timeout', 60) + target_id = kwargs.get("target_id", None) + pooling_timeout = kwargs.get("polling_timeout", 60) result = False if self.check_parameters(capability, *args, **kwargs): - if kwargs['image_path'] and kwargs['destination_disk']: - image_path = os.path.normpath(kwargs['image_path']) - destination_disk = os.path.normpath(kwargs['destination_disk']) + if kwargs["image_path"] and kwargs["destination_disk"]: + image_path = os.path.normpath(kwargs["image_path"]) + destination_disk = os.path.normpath(kwargs["destination_disk"]) # Wait for mount point to be ready # if mount point changed according to target_id use new mount point # available in result (_, destination_disk) of check_mount_point_ready - mount_res, destination_disk = self.check_mount_point_ready(destination_disk, target_id=target_id, timeout=pooling_timeout) # Blocking + mount_res, destination_disk = self.check_mount_point_ready( + destination_disk, target_id=target_id, timeout=pooling_timeout + ) # Blocking if not mount_res: - return result # mount point is not ready return + return result # mount point is not ready return # Prepare correct command line parameter values image_base_name = basename(image_path) destination_path = join(destination_disk, image_base_name) - if capability == 'shell': - if os.name == 'nt': capability = 'copy' - elif os.name == 'posix': capability = 'cp' - if capability == 'cp' or capability == 'copy' or capability == 'copy': + if capability == "shell": + if os.name == "nt": + capability = "copy" + elif os.name == "posix": + capability = "cp" + if capability == "cp" or capability == "copy" or capability == "copy": copy_method = capability cmd = [copy_method, image_path, destination_path] - if os.name == 'posix': + if os.name == "posix": result = self.run_command(cmd, shell=False) - if os.uname()[0] == 'Linux': + if os.uname()[0] == "Linux": result = result and self.run_command(["sync", "-f", destination_path]) else: result = result and self.run_command(["sync"]) @@ -89,6 +91,5 @@ def execute(self, capability, *args, **kwargs): def load_plugin(): - """ Returns plugin available in this module - """ + """Returns plugin available in this module""" return HostTestPluginCopyMethod_Shell() diff --git a/tools/python/mbed_os_tools/test/host_tests_plugins/module_copy_silabs.py b/tools/python/mbed_os_tools/test/host_tests_plugins/module_copy_silabs.py index 7cd4e4bf0d2..5c314d89301 100644 --- a/tools/python/mbed_os_tools/test/host_tests_plugins/module_copy_silabs.py +++ b/tools/python/mbed_os_tools/test/host_tests_plugins/module_copy_silabs.py @@ -18,23 +18,20 @@ class HostTestPluginCopyMethod_Silabs(HostTestPluginBase): - # Plugin interface - name = 'HostTestPluginCopyMethod_Silabs' - type = 'CopyMethod' - capabilities = ['eACommander', 'eACommander-usb'] - required_parameters = ['image_path', 'destination_disk'] + name = "HostTestPluginCopyMethod_Silabs" + type = "CopyMethod" + capabilities = ["eACommander", "eACommander-usb"] + required_parameters = ["image_path", "destination_disk"] stable = True def __init__(self): - """ ctor - """ + """ctor""" HostTestPluginBase.__init__(self) def setup(self, *args, **kwargs): - """ Configure plugin, this function should be called before plugin execute() method is used. - """ - self.EACOMMANDER_CMD = 'eACommander.exe' + """Configure plugin, this function should be called before plugin execute() method is used.""" + self.EACOMMANDER_CMD = "eACommander.exe" return True def execute(self, capability, *args, **kwargs): @@ -50,23 +47,26 @@ def execute(self, capability, *args, **kwargs): """ result = False if self.check_parameters(capability, *args, **kwargs) is True: - image_path = os.path.normpath(kwargs['image_path']) - destination_disk = os.path.normpath(kwargs['destination_disk']) - if capability == 'eACommander': - cmd = [self.EACOMMANDER_CMD, - '--serialno', destination_disk, - '--flash', image_path, - '--resettype', '2', '--reset'] + image_path = os.path.normpath(kwargs["image_path"]) + destination_disk = os.path.normpath(kwargs["destination_disk"]) + if capability == "eACommander": + cmd = [ + self.EACOMMANDER_CMD, + "--serialno", + destination_disk, + "--flash", + image_path, + "--resettype", + "2", + "--reset", + ] result = self.run_command(cmd) - elif capability == 'eACommander-usb': - cmd = [self.EACOMMANDER_CMD, - '--usb', destination_disk, - '--flash', image_path] + elif capability == "eACommander-usb": + cmd = [self.EACOMMANDER_CMD, "--usb", destination_disk, "--flash", image_path] result = self.run_command(cmd) return result def load_plugin(): - """ Returns plugin available in this module - """ + """Returns plugin available in this module""" return HostTestPluginCopyMethod_Silabs() diff --git a/tools/python/mbed_os_tools/test/host_tests_plugins/module_copy_stlink.py b/tools/python/mbed_os_tools/test/host_tests_plugins/module_copy_stlink.py index 617e3e7d91f..a037c75a834 100644 --- a/tools/python/mbed_os_tools/test/host_tests_plugins/module_copy_stlink.py +++ b/tools/python/mbed_os_tools/test/host_tests_plugins/module_copy_stlink.py @@ -18,34 +18,30 @@ class HostTestPluginCopyMethod_Stlink(HostTestPluginBase): - # Plugin interface - name = 'HostTestPluginCopyMethod_Stlink' - type = 'CopyMethod' - capabilities = ['stlink'] - required_parameters = ['image_path'] + name = "HostTestPluginCopyMethod_Stlink" + type = "CopyMethod" + capabilities = ["stlink"] + required_parameters = ["image_path"] def __init__(self): - """ ctor - """ + """ctor""" HostTestPluginBase.__init__(self) def is_os_supported(self, os_name=None): - """! In this implementation this plugin only is supporeted under Windows machines - """ + """! In this implementation this plugin only is supporeted under Windows machines""" # If no OS name provided use host OS name if not os_name: os_name = self.mbed_os_support() # This plugin only works on Windows - if os_name and os_name.startswith('Windows'): + if os_name and os_name.startswith("Windows"): return True return False def setup(self, *args, **kwargs): - """! Configure plugin, this function should be called before plugin execute() method is used. - """ - self.ST_LINK_CLI = 'ST-LINK_CLI.exe' + """! Configure plugin, this function should be called before plugin execute() method is used.""" + self.ST_LINK_CLI = "ST-LINK_CLI.exe" return True def execute(self, capability, *args, **kwargs): @@ -61,19 +57,15 @@ def execute(self, capability, *args, **kwargs): """ result = False if self.check_parameters(capability, *args, **kwargs) is True: - image_path = os.path.normpath(kwargs['image_path']) - if capability == 'stlink': + image_path = os.path.normpath(kwargs["image_path"]) + if capability == "stlink": # Example: # ST-LINK_CLI.exe -p "C:\Work\mbed\build\test\DISCO_F429ZI\GCC_ARM\MBED_A1\basic.bin" - cmd = [self.ST_LINK_CLI, - '-p', image_path, '0x08000000', - '-V' - ] + cmd = [self.ST_LINK_CLI, "-p", image_path, "0x08000000", "-V"] result = self.run_command(cmd) return result def load_plugin(): - """ Returns plugin available in this module - """ + """Returns plugin available in this module""" return HostTestPluginCopyMethod_Stlink() diff --git a/tools/python/mbed_os_tools/test/host_tests_plugins/module_copy_ublox.py b/tools/python/mbed_os_tools/test/host_tests_plugins/module_copy_ublox.py index 6885ccd021c..d5c4c298bc9 100644 --- a/tools/python/mbed_os_tools/test/host_tests_plugins/module_copy_ublox.py +++ b/tools/python/mbed_os_tools/test/host_tests_plugins/module_copy_ublox.py @@ -18,29 +18,26 @@ class HostTestPluginCopyMethod_ublox(HostTestPluginBase): - # Plugin interface - name = 'HostTestPluginCopyMethod_ublox' - type = 'CopyMethod' - capabilities = ['ublox'] - required_parameters = ['image_path'] + name = "HostTestPluginCopyMethod_ublox" + type = "CopyMethod" + capabilities = ["ublox"] + required_parameters = ["image_path"] def is_os_supported(self, os_name=None): - """! In this implementation this plugin only is supporeted under Windows machines - """ + """! In this implementation this plugin only is supporeted under Windows machines""" # If no OS name provided use host OS name if not os_name: os_name = self.mbed_os_support() # This plugin only works on Windows - if os_name and os_name.startswith('Windows'): + if os_name and os_name.startswith("Windows"): return True return False def setup(self, *args, **kwargs): - """! Configure plugin, this function should be called before plugin execute() method is used. - """ - self.FLASH_ERASE = 'FlashErase.exe' + """! Configure plugin, this function should be called before plugin execute() method is used.""" + self.FLASH_ERASE = "FlashErase.exe" return True def execute(self, capability, *args, **kwargs): @@ -56,24 +53,15 @@ def execute(self, capability, *args, **kwargs): """ result = False if self.check_parameters(capability, *args, **kwargs) is True: - image_path = os.path.normpath(kwargs['image_path']) - if capability == 'ublox': + image_path = os.path.normpath(kwargs["image_path"]) + if capability == "ublox": # Example: # FLASH_ERASE -c 2 -s 0xD7000 -l 0x20000 -f "binary_file.bin" - cmd = [self.FLASH_ERASE, - '-c', - 'A', - '-s', - '0xD7000', - '-l', - '0x20000', - '-f', image_path - ] + cmd = [self.FLASH_ERASE, "-c", "A", "-s", "0xD7000", "-l", "0x20000", "-f", image_path] result = self.run_command(cmd) return result def load_plugin(): - """ Returns plugin available in this module - """ + """Returns plugin available in this module""" return HostTestPluginCopyMethod_ublox() diff --git a/tools/python/mbed_os_tools/test/host_tests_plugins/module_power_cycle_mbed.py b/tools/python/mbed_os_tools/test/host_tests_plugins/module_power_cycle_mbed.py index d68b1dce020..1abe94e5969 100644 --- a/tools/python/mbed_os_tools/test/host_tests_plugins/module_power_cycle_mbed.py +++ b/tools/python/mbed_os_tools/test/host_tests_plugins/module_power_cycle_mbed.py @@ -21,22 +21,19 @@ class HostTestPluginPowerCycleResetMethod(HostTestPluginBase): - # Plugin interface - name = 'HostTestPluginPowerCycleResetMethod' - type = 'ResetMethod' + name = "HostTestPluginPowerCycleResetMethod" + type = "ResetMethod" stable = True - capabilities = ['power_cycle'] - required_parameters = ['target_id', 'device_info'] + capabilities = ["power_cycle"] + required_parameters = ["target_id", "device_info"] def __init__(self): - """ ctor - """ + """ctor""" HostTestPluginBase.__init__(self) def setup(self, *args, **kwargs): - """! Configure plugin, this function should be called before plugin execute() method is used. - """ + """! Configure plugin, this function should be called before plugin execute() method is used.""" return True def execute(self, capability, *args, **kwargs): @@ -48,19 +45,19 @@ def execute(self, capability, *args, **kwargs): @details Each capability e.g. may directly just call some command line program or execute building pythonic function @return Capability call return value """ - if 'target_id' not in kwargs or not kwargs['target_id']: + if "target_id" not in kwargs or not kwargs["target_id"]: self.print_plugin_error("Error: This plugin requires mbed target_id") return False - if 'device_info' not in kwargs or type(kwargs['device_info']) is not dict: + if "device_info" not in kwargs or type(kwargs["device_info"]) is not dict: self.print_plugin_error("Error: This plugin requires dict parameter 'device_info' passed by the caller.") return False result = False if self.check_parameters(capability, *args, **kwargs) is True: if capability in HostTestPluginPowerCycleResetMethod.capabilities: - target_id = kwargs['target_id'] - device_info = kwargs['device_info'] + target_id = kwargs["target_id"] + device_info = kwargs["device_info"] ret = self.__get_mbed_tas_rm_addr() if ret: ip, port = ret @@ -73,11 +70,13 @@ def __get_mbed_tas_rm_addr(self): :return: """ try: - ip = os.environ['MBED_TAS_RM_IP'] - port = os.environ['MBED_TAS_RM_PORT'] + ip = os.environ["MBED_TAS_RM_IP"] + port = os.environ["MBED_TAS_RM_PORT"] return ip, port except KeyError as e: - self.print_plugin_error("HOST: Failed to read environment variable (" + str(e) + "). Can't perform hardware reset.") + self.print_plugin_error( + "HOST: Failed to read environment variable (" + str(e) + "). Can't perform hardware reset." + ) return None @@ -94,25 +93,12 @@ def __hw_reset(self, ip, port, target_id, device_info): switch_off_req = { "name": "switchResource", - "sub_requests": [ - { - "resource_type": "mbed_platform", - "resource_id": target_id, - "switch_command": "OFF" - } - ] + "sub_requests": [{"resource_type": "mbed_platform", "resource_id": target_id, "switch_command": "OFF"}], } - switch_on_req = { "name": "switchResource", - "sub_requests": [ - { - "resource_type": "mbed_platform", - "resource_id": target_id, - "switch_command": "ON" - } - ] + "sub_requests": [{"resource_type": "mbed_platform", "resource_id": target_id, "switch_command": "ON"}], } result = False @@ -123,25 +109,29 @@ def __hw_reset(self, ip, port, target_id, device_info): self.print_plugin_error("HOST: Failed to communicate with TAS RM!") return result - if "error" in switch_off_req['sub_requests'][0]: - self.print_plugin_error("HOST: Failed to reset target. error = %s" % switch_off_req['sub_requests'][0]['error']) + if "error" in switch_off_req["sub_requests"][0]: + self.print_plugin_error( + "HOST: Failed to reset target. error = %s" % switch_off_req["sub_requests"][0]["error"] + ) return result def poll_state(required_state): switch_state_req = { "name": "switchResource", "sub_requests": [ - { - "resource_type": "mbed_platform", - "resource_id": target_id, - "switch_command": "STATE" - } - ] + {"resource_type": "mbed_platform", "resource_id": target_id, "switch_command": "STATE"} + ], } resp = self.__run_request(ip, port, switch_state_req) start = time.time() - while resp and (resp['sub_requests'][0]['state'] != required_state or (required_state == 'ON' and - resp['sub_requests'][0]["mount_point"] == "Not Connected")) and (time.time() - start) < 300: + while ( + resp + and ( + resp["sub_requests"][0]["state"] != required_state + or (required_state == "ON" and resp["sub_requests"][0]["mount_point"] == "Not Connected") + ) + and (time.time() - start) < 300 + ): time.sleep(2) resp = self.__run_request(ip, port, resp) return resp @@ -150,8 +140,12 @@ def poll_state(required_state): self.__run_request(ip, port, switch_on_req) resp = poll_state("ON") - if resp and resp['sub_requests'][0]['state'] == 'ON' and resp['sub_requests'][0]["mount_point"] != "Not Connected": - for k, v in resp['sub_requests'][0].viewitems(): + if ( + resp + and resp["sub_requests"][0]["state"] == "ON" + and resp["sub_requests"][0]["mount_point"] != "Not Connected" + ): + for k, v in resp["sub_requests"][0].viewitems(): device_info[k] = v result = True else: @@ -166,7 +160,7 @@ def __run_request(ip, port, request): :param request: :return: """ - headers = {'Content-type': 'application/json', 'Accept': 'text/plain'} + headers = {"Content-type": "application/json", "Accept": "text/plain"} get_resp = requests.get("http://%s:%s/" % (ip, port), data=json.dumps(request), headers=headers) resp = get_resp.json() if get_resp.status_code == 200: @@ -176,6 +170,5 @@ def __run_request(ip, port, request): def load_plugin(): - """! Returns plugin available in this module - """ + """! Returns plugin available in this module""" return HostTestPluginPowerCycleResetMethod() diff --git a/tools/python/mbed_os_tools/test/host_tests_plugins/module_reset_jn51xx.py b/tools/python/mbed_os_tools/test/host_tests_plugins/module_reset_jn51xx.py index 86085f37c31..a603e717b1b 100644 --- a/tools/python/mbed_os_tools/test/host_tests_plugins/module_reset_jn51xx.py +++ b/tools/python/mbed_os_tools/test/host_tests_plugins/module_reset_jn51xx.py @@ -17,36 +17,32 @@ class HostTestPluginResetMethod_JN51xx(HostTestPluginBase): - # Plugin interface - name = 'HostTestPluginResetMethod_JN51xx' - type = 'ResetMethod' - capabilities = ['jn51xx'] - required_parameters = ['serial'] + name = "HostTestPluginResetMethod_JN51xx" + type = "ResetMethod" + capabilities = ["jn51xx"] + required_parameters = ["serial"] stable = False def __init__(self): - """ ctor - """ + """ctor""" HostTestPluginBase.__init__(self) def is_os_supported(self, os_name=None): - """! In this implementation this plugin only is supporeted under Windows machines - """ + """! In this implementation this plugin only is supporeted under Windows machines""" # If no OS name provided use host OS name if not os_name: os_name = self.mbed_os_support() # This plugin only works on Windows - if os_name and os_name.startswith('Windows'): + if os_name and os_name.startswith("Windows"): return True return False def setup(self, *args, **kwargs): - """! Configure plugin, this function should be called before plugin execute() method is used. - """ + """! Configure plugin, this function should be called before plugin execute() method is used.""" # Note you need to have eACommander.exe on your system path! - self.JN51XX_PROGRAMMER = 'JN51xxProgrammer.exe' + self.JN51XX_PROGRAMMER = "JN51xxProgrammer.exe" return True def execute(self, capability, *args, **kwargs): @@ -58,30 +54,26 @@ def execute(self, capability, *args, **kwargs): @details Each capability e.g. may directly just call some command line program or execute building pythonic function @return Capability call return value """ - if not kwargs['serial']: + if not kwargs["serial"]: self.print_plugin_error("Error: serial port not set (not opened?)") return False result = False if self.check_parameters(capability, *args, **kwargs): - if kwargs['serial']: - if capability == 'jn51xx': + if kwargs["serial"]: + if capability == "jn51xx": # Example: # The device should be automatically reset before the programmer disconnects. # Issuing a command with no file to program or read will put the device into # programming mode and then reset it. E.g. # $ JN51xxProgrammer.exe -s COM5 -V0 # COM5: Detected JN5179 with MAC address 00:15:8D:00:01:24:E0:37 - serial_port = kwargs['serial'] - cmd = [self.JN51XX_PROGRAMMER, - '-s', serial_port, - '-V0' - ] + serial_port = kwargs["serial"] + cmd = [self.JN51XX_PROGRAMMER, "-s", serial_port, "-V0"] result = self.run_command(cmd) return result def load_plugin(): - """ Returns plugin available in this module - """ + """Returns plugin available in this module""" return HostTestPluginResetMethod_JN51xx() diff --git a/tools/python/mbed_os_tools/test/host_tests_plugins/module_reset_mbed.py b/tools/python/mbed_os_tools/test/host_tests_plugins/module_reset_mbed.py index 8ba6662e199..141aa5fe68c 100644 --- a/tools/python/mbed_os_tools/test/host_tests_plugins/module_reset_mbed.py +++ b/tools/python/mbed_os_tools/test/host_tests_plugins/module_reset_mbed.py @@ -18,13 +18,12 @@ class HostTestPluginResetMethod_Mbed(HostTestPluginBase): - # Plugin interface - name = 'HostTestPluginResetMethod_Mbed' - type = 'ResetMethod' + name = "HostTestPluginResetMethod_Mbed" + type = "ResetMethod" stable = True - capabilities = ['default'] - required_parameters = ['serial'] + capabilities = ["default"] + required_parameters = ["serial"] def __init__(self): """! ctor @@ -50,13 +49,12 @@ def _safe_send_break(self, serial): try: serial.break_condition = False except Exception as e: - self.print_plugin_error("Error while doing 'serial.break_condition = False' : %s"% str(e)) + self.print_plugin_error("Error while doing 'serial.break_condition = False' : %s" % str(e)) result = False return result def setup(self, *args, **kwargs): - """! Configure plugin, this function should be called before plugin execute() method is used. - """ + """! Configure plugin, this function should be called before plugin execute() method is used.""" return True def execute(self, capability, *args, **kwargs): @@ -68,20 +66,19 @@ def execute(self, capability, *args, **kwargs): @details Each capability e.g. may directly just call some command line program or execute building pythonic function @return Capability call return value """ - if not kwargs['serial']: + if not kwargs["serial"]: self.print_plugin_error("Error: serial port not set (not opened?)") return False result = False if self.check_parameters(capability, *args, **kwargs) is True: - if kwargs['serial']: - if capability == 'default': - serial = kwargs['serial'] + if kwargs["serial"]: + if capability == "default": + serial = kwargs["serial"] result = self._safe_send_break(serial) return result def load_plugin(): - """! Returns plugin available in this module - """ + """! Returns plugin available in this module""" return HostTestPluginResetMethod_Mbed() diff --git a/tools/python/mbed_os_tools/test/host_tests_plugins/module_reset_mps2.py b/tools/python/mbed_os_tools/test/host_tests_plugins/module_reset_mps2.py index 8767ffb36fb..b0836d2d876 100644 --- a/tools/python/mbed_os_tools/test/host_tests_plugins/module_reset_mps2.py +++ b/tools/python/mbed_os_tools/test/host_tests_plugins/module_reset_mps2.py @@ -20,6 +20,7 @@ # Note: This plugin is not fully functional, needs improvements + class HostTestPluginResetMethod_MPS2(HostTestPluginBase): """! Plugin used to reset ARM_MPS2 platform @@ -28,26 +29,24 @@ class HostTestPluginResetMethod_MPS2(HostTestPluginBase): """ # Plugin interface - name = 'HostTestPluginResetMethod_MPS2' - type = 'ResetMethod' - capabilities = ['reboot.txt'] - required_parameters = ['disk'] + name = "HostTestPluginResetMethod_MPS2" + type = "ResetMethod" + capabilities = ["reboot.txt"] + required_parameters = ["disk"] def __init__(self): - """ ctor - """ + """ctor""" HostTestPluginBase.__init__(self) def touch_file(self, path): - """ Touch file and set timestamp to items - """ - with open(path, 'a'): + """Touch file and set timestamp to items""" + with open(path, "a"): os.utime(path, None) def setup(self, *args, **kwargs): - """ Prepare / configure plugin to work. - This method can receive plugin specific parameters by kwargs and - ignore other parameters which may affect other plugins. + """Prepare / configure plugin to work. + This method can receive plugin specific parameters by kwargs and + ignore other parameters which may affect other plugins. """ return True @@ -63,29 +62,30 @@ def execute(self, capability, *args, **kwargs): @return Capability call return value """ result = False - if not kwargs['disk']: + if not kwargs["disk"]: self.print_plugin_error("Error: disk not specified") return False - destination_disk = kwargs.get('disk', None) + destination_disk = kwargs.get("disk", None) # This optional parameter can be used if TargetID is provided (-t switch) - target_id = kwargs.get('target_id', None) - pooling_timeout = kwargs.get('polling_timeout', 60) + target_id = kwargs.get("target_id", None) + pooling_timeout = kwargs.get("polling_timeout", 60) if self.check_parameters(capability, *args, **kwargs) is True: - - if capability == 'reboot.txt': + if capability == "reboot.txt": reboot_file_path = os.path.join(destination_disk, capability) reboot_fh = open(reboot_file_path, "w") reboot_fh.close() # Make sure the file is written to the board before continuing - if os.name == 'posix': - self.run_command('sync -f %s' % reboot_file_path, shell=True) + if os.name == "posix": + self.run_command("sync -f %s" % reboot_file_path, shell=True) time.sleep(3) # sufficient delay for device to boot up - result, destination_disk = self.check_mount_point_ready(destination_disk, target_id=target_id, timeout=pooling_timeout) + result, destination_disk = self.check_mount_point_ready( + destination_disk, target_id=target_id, timeout=pooling_timeout + ) return result + def load_plugin(): - """ Returns plugin available in this module - """ + """Returns plugin available in this module""" return HostTestPluginResetMethod_MPS2() diff --git a/tools/python/mbed_os_tools/test/host_tests_plugins/module_reset_pyocd.py b/tools/python/mbed_os_tools/test/host_tests_plugins/module_reset_pyocd.py index 45b691c92ef..52120d82ecf 100644 --- a/tools/python/mbed_os_tools/test/host_tests_plugins/module_reset_pyocd.py +++ b/tools/python/mbed_os_tools/test/host_tests_plugins/module_reset_pyocd.py @@ -17,19 +17,19 @@ try: from pyocd.core.helpers import ConnectHelper + PYOCD_PRESENT = True except ImportError: PYOCD_PRESENT = False class HostTestPluginResetMethod_pyOCD(HostTestPluginBase): - # Plugin interface - name = 'HostTestPluginResetMethod_pyOCD' - type = 'ResetMethod' + name = "HostTestPluginResetMethod_pyOCD" + type = "ResetMethod" stable = True - capabilities = ['pyocd'] - required_parameters = ['target_id'] + capabilities = ["pyocd"] + required_parameters = ["target_id"] def __init__(self): """! ctor @@ -41,8 +41,7 @@ def __init__(self): HostTestPluginBase.__init__(self) def setup(self, *args, **kwargs): - """! Configure plugin, this function should be called before plugin execute() method is used. - """ + """! Configure plugin, this function should be called before plugin execute() method is used.""" return True def execute(self, capability, *args, **kwargs): @@ -60,17 +59,18 @@ def execute(self, capability, *args, **kwargs): ) return False - if not kwargs['target_id']: + if not kwargs["target_id"]: self.print_plugin_error("Error: target_id not set") return False result = False if self.check_parameters(capability, *args, **kwargs) is True: - if kwargs['target_id']: - if capability == 'pyocd': - target_id = kwargs['target_id'] - with ConnectHelper.session_with_chosen_probe(unique_id=target_id, - resume_on_disconnect=False) as session: + if kwargs["target_id"]: + if capability == "pyocd": + target_id = kwargs["target_id"] + with ConnectHelper.session_with_chosen_probe( + unique_id=target_id, resume_on_disconnect=False + ) as session: session.target.reset() session.target.resume() result = True @@ -78,6 +78,5 @@ def execute(self, capability, *args, **kwargs): def load_plugin(): - """! Returns plugin available in this module - """ + """! Returns plugin available in this module""" return HostTestPluginResetMethod_pyOCD() diff --git a/tools/python/mbed_os_tools/test/host_tests_plugins/module_reset_silabs.py b/tools/python/mbed_os_tools/test/host_tests_plugins/module_reset_silabs.py index fe79d3b8767..1ddd9faef2c 100644 --- a/tools/python/mbed_os_tools/test/host_tests_plugins/module_reset_silabs.py +++ b/tools/python/mbed_os_tools/test/host_tests_plugins/module_reset_silabs.py @@ -17,24 +17,21 @@ class HostTestPluginResetMethod_SiLabs(HostTestPluginBase): - # Plugin interface - name = 'HostTestPluginResetMethod_SiLabs' - type = 'ResetMethod' - capabilities = ['eACommander', 'eACommander-usb'] - required_parameters = ['disk'] + name = "HostTestPluginResetMethod_SiLabs" + type = "ResetMethod" + capabilities = ["eACommander", "eACommander-usb"] + required_parameters = ["disk"] stable = True def __init__(self): - """ ctor - """ + """ctor""" HostTestPluginBase.__init__(self) def setup(self, *args, **kwargs): - """ Configure plugin, this function should be called before plugin execute() method is used. - """ + """Configure plugin, this function should be called before plugin execute() method is used.""" # Note you need to have eACommander.exe on your system path! - self.EACOMMANDER_CMD = 'eACommander.exe' + self.EACOMMANDER_CMD = "eACommander.exe" return True def execute(self, capability, *args, **kwargs): @@ -50,26 +47,21 @@ def execute(self, capability, *args, **kwargs): """ result = False if self.check_parameters(capability, *args, **kwargs) is True: - disk = kwargs['disk'].rstrip('/\\') + disk = kwargs["disk"].rstrip("/\\") - if capability == 'eACommander': + if capability == "eACommander": # For this copy method 'disk' will be 'serialno' for eACommander command line parameters # Note: Commands are executed in the order they are specified on the command line - cmd = [self.EACOMMANDER_CMD, - '--serialno', disk, - '--resettype', '2', '--reset',] + cmd = [self.EACOMMANDER_CMD, "--serialno", disk, "--resettype", "2", "--reset"] result = self.run_command(cmd) - elif capability == 'eACommander-usb': + elif capability == "eACommander-usb": # For this copy method 'disk' will be 'usb address' for eACommander command line parameters # Note: Commands are executed in the order they are specified on the command line - cmd = [self.EACOMMANDER_CMD, - '--usb', disk, - '--resettype', '2', '--reset',] + cmd = [self.EACOMMANDER_CMD, "--usb", disk, "--resettype", "2", "--reset"] result = self.run_command(cmd) return result def load_plugin(): - """ Returns plugin available in this module - """ + """Returns plugin available in this module""" return HostTestPluginResetMethod_SiLabs() diff --git a/tools/python/mbed_os_tools/test/host_tests_plugins/module_reset_stlink.py b/tools/python/mbed_os_tools/test/host_tests_plugins/module_reset_stlink.py index 776172468a8..4ed7b8fa5e4 100644 --- a/tools/python/mbed_os_tools/test/host_tests_plugins/module_reset_stlink.py +++ b/tools/python/mbed_os_tools/test/host_tests_plugins/module_reset_stlink.py @@ -22,36 +22,32 @@ class HostTestPluginResetMethod_Stlink(HostTestPluginBase): - # Plugin interface - name = 'HostTestPluginResetMethod_Stlink' - type = 'ResetMethod' - capabilities = ['stlink'] + name = "HostTestPluginResetMethod_Stlink" + type = "ResetMethod" + capabilities = ["stlink"] required_parameters = [] stable = False def __init__(self): - """ ctor - """ + """ctor""" HostTestPluginBase.__init__(self) def is_os_supported(self, os_name=None): - """! In this implementation this plugin only is supporeted under Windows machines - """ + """! In this implementation this plugin only is supporeted under Windows machines""" # If no OS name provided use host OS name if not os_name: os_name = self.mbed_os_support() # This plugin only works on Windows - if os_name and os_name.startswith('Windows'): + if os_name and os_name.startswith("Windows"): return True return False def setup(self, *args, **kwargs): - """! Configure plugin, this function should be called before plugin execute() method is used. - """ + """! Configure plugin, this function should be called before plugin execute() method is used.""" # Note you need to have eACommander.exe on your system path! - self.ST_LINK_CLI = 'ST-LINK_CLI.exe' + self.ST_LINK_CLI = "ST-LINK_CLI.exe" return True def create_stlink_fix_file(self, file_path): @@ -67,7 +63,6 @@ def create_stlink_fix_file(self, file_path): self.print_plugin_error("Error opening STLINK-PRESS-ENTER-BUG file") sys.exit(1) - def execute(self, capability, *args, **kwargs): """! Executes capability by name @@ -81,21 +76,20 @@ def execute(self, capability, *args, **kwargs): """ result = False if self.check_parameters(capability, *args, **kwargs) is True: - if capability == 'stlink': + if capability == "stlink": # Example: # ST-LINK_CLI.exe -Rst -Run - cmd = [self.ST_LINK_CLI, - '-Rst', '-Run'] - + cmd = [self.ST_LINK_CLI, "-Rst", "-Run"] + # Due to the ST-LINK bug, we must press enter after burning the target # We do this here automatically by passing a file which contains an `ENTER` (line separator) # to the ST-LINK CLI as `stdin` for the running process enter_file_path = os.path.join(tempfile.gettempdir(), FIX_FILE_NAME) self.create_stlink_fix_file(enter_file_path) try: - with open(enter_file_path, 'r') as fix_file: - stdin_arg = kwargs.get('stdin', fix_file) - result = self.run_command(cmd, stdin = stdin_arg) + with open(enter_file_path, "r") as fix_file: + stdin_arg = kwargs.get("stdin", fix_file) + result = self.run_command(cmd, stdin=stdin_arg) except (OSError, IOError): self.print_plugin_error("Error opening STLINK-PRESS-ENTER-BUG file") sys.exit(1) @@ -104,6 +98,5 @@ def execute(self, capability, *args, **kwargs): def load_plugin(): - """ Returns plugin available in this module - """ + """Returns plugin available in this module""" return HostTestPluginResetMethod_Stlink() diff --git a/tools/python/mbed_os_tools/test/host_tests_plugins/module_reset_ublox.py b/tools/python/mbed_os_tools/test/host_tests_plugins/module_reset_ublox.py index 1eab914bd04..14168344128 100644 --- a/tools/python/mbed_os_tools/test/host_tests_plugins/module_reset_ublox.py +++ b/tools/python/mbed_os_tools/test/host_tests_plugins/module_reset_ublox.py @@ -17,31 +17,28 @@ class HostTestPluginResetMethod_ublox(HostTestPluginBase): - # Plugin interface - name = 'HostTestPluginResetMethod_ublox' - type = 'ResetMethod' - capabilities = ['ublox'] + name = "HostTestPluginResetMethod_ublox" + type = "ResetMethod" + capabilities = ["ublox"] required_parameters = [] stable = False def is_os_supported(self, os_name=None): - """! In this implementation this plugin only is supporeted under Windows machines - """ + """! In this implementation this plugin only is supporeted under Windows machines""" # If no OS name provided use host OS name if not os_name: os_name = self.mbed_os_support() # This plugin only works on Windows - if os_name and os_name.startswith('Windows'): + if os_name and os_name.startswith("Windows"): return True return False def setup(self, *args, **kwargs): - """! Configure plugin, this function should be called before plugin execute() method is used. - """ + """! Configure plugin, this function should be called before plugin execute() method is used.""" # Note you need to have jlink.exe on your system path! - self.JLINK = 'jlink.exe' + self.JLINK = "jlink.exe" return True def execute(self, capability, *args, **kwargs): @@ -57,17 +54,14 @@ def execute(self, capability, *args, **kwargs): """ result = False if self.check_parameters(capability, *args, **kwargs) is True: - if capability == 'ublox': + if capability == "ublox": # Example: # JLINK.exe --CommanderScript aCommandFile - cmd = [self.JLINK, - '-CommanderScript', - r'reset.jlink'] + cmd = [self.JLINK, "-CommanderScript", r"reset.jlink"] result = self.run_command(cmd) return result def load_plugin(): - """ Returns plugin available in this module - """ + """Returns plugin available in this module""" return HostTestPluginResetMethod_ublox() diff --git a/tools/python/mbed_os_tools/test/host_tests_registry/host_registry.py b/tools/python/mbed_os_tools/test/host_tests_registry/host_registry.py index 4e695239257..4eef6a6a2ba 100644 --- a/tools/python/mbed_os_tools/test/host_tests_registry/host_registry.py +++ b/tools/python/mbed_os_tools/test/host_tests_registry/host_registry.py @@ -35,8 +35,8 @@ def load_source(module_name, file_path): class HostRegistry: - """ Class stores registry with host tests and objects representing them - """ + """Class stores registry with host tests and objects representing them""" + HOST_TESTS = {} # Map between host_test_name -> host_test_object def register_host_test(self, ht_name, ht_object): @@ -72,41 +72,38 @@ def is_host_test(self, ht_name): @return True if ht_name is registered (available), else False """ - return (ht_name in self.HOST_TESTS and - self.HOST_TESTS[ht_name] is not None) + return ht_name in self.HOST_TESTS and self.HOST_TESTS[ht_name] is not None def table(self, verbose=False): """! Prints list of registered host test classes (by name) - @Detail For devel & debug purposes + @Detail For devel & debug purposes """ from prettytable import PrettyTable, HEADER - column_names = ['name', 'class', 'origin'] + + column_names = ["name", "class", "origin"] pt = PrettyTable(column_names, junction_char="|", hrules=HEADER) for column in column_names: - pt.align[column] = 'l' + pt.align[column] = "l" for name, host_test in sorted(self.HOST_TESTS.items()): cls_str = str(host_test.__class__) if host_test.script_location: src_path = host_test.script_location else: - src_path = 'mbed-host-tests' + src_path = "mbed-host-tests" pt.add_row([name, cls_str, src_path]) return pt.get_string() def register_from_path(self, path, verbose=False): - """ Enumerates and registers locally stored host tests - Host test are derived from mbed_os_tools.test.BaseHostTest classes + """Enumerates and registers locally stored host tests + Host test are derived from mbed_os_tools.test.BaseHostTest classes """ if path: path = path.strip('"') if verbose: print("HOST: Inspecting '%s' for local host tests..." % path) if exists(path) and isdir(path): - python_modules = [ - f for f in listdir(path) - if isfile(join(path, f)) and f.endswith(".py") - ] + python_modules = [f for f in listdir(path) if isfile(join(path, f)) and f.endswith(".py")] for module_file in python_modules: self._add_module_to_registry(path, module_file, verbose) @@ -115,21 +112,14 @@ def _add_module_to_registry(self, path, module_file, verbose): try: mod = load_source(module_name, abspath(join(path, module_file))) except Exception as e: - print( - "HOST: Error! While loading local host test module '%s'" - % join(path, module_file) - ) + print("HOST: Error! While loading local host test module '%s'" % join(path, module_file)) print("HOST: %s" % str(e)) return if verbose: print("HOST: Loading module '%s': %s" % (module_file, str(mod))) for name, obj in getmembers(mod): - if ( - isclass(obj) and - issubclass(obj, BaseHostTest) and - str(obj) != str(BaseHostTest) - ): + if isclass(obj) and issubclass(obj, BaseHostTest) and str(obj) != str(BaseHostTest): if obj.name: host_test_name = obj.name else: @@ -137,14 +127,6 @@ def _add_module_to_registry(self, path, module_file, verbose): host_test_cls = obj host_test_cls.script_location = join(path, module_file) if verbose: - print( - "HOST: Found host test implementation: %s -|> %s" - % (str(obj), str(BaseHostTest)) - ) - print( - "HOST: Registering '%s' as '%s'" - % (str(host_test_cls), host_test_name) - ) - self.register_host_test( - host_test_name, host_test_cls() - ) + print("HOST: Found host test implementation: %s -|> %s" % (str(obj), str(BaseHostTest))) + print("HOST: Registering '%s' as '%s'" % (str(host_test_cls), host_test_name)) + self.register_host_test(host_test_name, host_test_cls()) diff --git a/tools/python/mbed_os_tools/test/host_tests_runner/host_test.py b/tools/python/mbed_os_tools/test/host_tests_runner/host_test.py index 0cb5920ed75..2fc960f88c3 100644 --- a/tools/python/mbed_os_tools/test/host_tests_runner/host_test.py +++ b/tools/python/mbed_os_tools/test/host_tests_runner/host_test.py @@ -19,34 +19,34 @@ class HostTestResults(object): - """! Test results set by host tests """ + """! Test results set by host tests""" def enum(self, **enums): - return type('Enum', (), enums) + return type("Enum", (), enums) def __init__(self): self.TestResults = self.enum( - RESULT_SUCCESS = 'success', - RESULT_FAILURE = 'failure', - RESULT_ERROR = 'error', - RESULT_END = 'end', - RESULT_UNDEF = 'undefined', - RESULT_TIMEOUT = 'timeout', - RESULT_IOERR_COPY = "ioerr_copy", - RESULT_IOERR_DISK = "ioerr_disk", - RESULT_IO_SERIAL = 'ioerr_serial', - RESULT_NO_IMAGE = 'no_image', - RESULT_NOT_DETECTED = "not_detected", - RESULT_MBED_ASSERT = "mbed_assert", - RESULT_PASSIVE = "passive", - RESULT_BUILD_FAILED = 'build_failed', - RESULT_SYNC_FAILED = 'sync_failed' + RESULT_SUCCESS="success", + RESULT_FAILURE="failure", + RESULT_ERROR="error", + RESULT_END="end", + RESULT_UNDEF="undefined", + RESULT_TIMEOUT="timeout", + RESULT_IOERR_COPY="ioerr_copy", + RESULT_IOERR_DISK="ioerr_disk", + RESULT_IO_SERIAL="ioerr_serial", + RESULT_NO_IMAGE="no_image", + RESULT_NOT_DETECTED="not_detected", + RESULT_MBED_ASSERT="mbed_assert", + RESULT_PASSIVE="passive", + RESULT_BUILD_FAILED="build_failed", + RESULT_SYNC_FAILED="sync_failed", ) # Magically creates attributes in this class corresponding # to RESULT_ elements in self.TestResults enum for attr in self.TestResults.__dict__: - if attr.startswith('RESULT_'): + if attr.startswith("RESULT_"): setattr(self, attr, self.TestResults.__dict__[attr]) # Indexes of this list define string->int mapping between @@ -66,32 +66,31 @@ def __init__(self): self.TestResults.RESULT_MBED_ASSERT, self.TestResults.RESULT_PASSIVE, self.TestResults.RESULT_BUILD_FAILED, - self.TestResults.RESULT_SYNC_FAILED + self.TestResults.RESULT_SYNC_FAILED, ] def get_test_result_int(self, test_result_str): - """! Maps test result string to unique integer """ + """! Maps test result string to unique integer""" if test_result_str in self.TestResultsList: return self.TestResultsList.index(test_result_str) return -1 def __getitem__(self, test_result_str): - """! Returns numerical result code """ + """! Returns numerical result code""" return self.get_test_result_int(test_result_str) class Test(HostTestResults): - """ Base class for host test's test runner - """ + """Base class for host test's test runner""" + def __init__(self, options): - """ ctor - """ + """ctor""" HostTestResults.__init__(self) self.mbed = Mbed(options) def run(self): - """ Test runner for host test. This function will start executing - test and forward test result via serial port to test suite + """Test runner for host test. This function will start executing + test and forward test result via serial port to test suite """ pass @@ -112,17 +111,15 @@ def print_result(self, result): """! Test result unified printing function @param result Should be a member of HostTestResults.RESULT_* enums """ - self.notify("{{%s}}\n"% result) - self.notify("{{%s}}\n"% self.RESULT_END) + self.notify("{{%s}}\n" % result) + self.notify("{{%s}}\n" % self.RESULT_END) def finish(self): - """ dctor for this class, finishes tasks and closes resources - """ + """dctor for this class, finishes tasks and closes resources""" pass def get_hello_string(self): - """ Hello string used as first print - """ + """Hello string used as first print""" return "host test executor ver. " + __version__ @@ -130,5 +127,6 @@ class DefaultTestSelectorBase(Test): """! Test class with serial port initialization @details This is a base for other test selectors, initializes """ + def __init__(self, options): Test.__init__(self, options=options) diff --git a/tools/python/mbed_os_tools/test/host_tests_runner/host_test_default.py b/tools/python/mbed_os_tools/test/host_tests_runner/host_test_default.py index 2c043a60c18..673a571c5a2 100644 --- a/tools/python/mbed_os_tools/test/host_tests_runner/host_test_default.py +++ b/tools/python/mbed_os_tools/test/host_tests_runner/host_test_default.py @@ -37,23 +37,24 @@ from ..host_tests_logger import HtrunLogger from ..host_tests_conn_proxy import conn_process from ..host_tests_toolbox.host_functional import handle_send_break_cmd -if (sys.version_info > (3, 0)): + +if sys.version_info > (3, 0): from queue import Empty as QueueEmpty else: from Queue import Empty as QueueEmpty class DefaultTestSelector(DefaultTestSelectorBase): - """! Select default host_test supervision (replaced after auto detection) """ - RESET_TYPE_SW_RST = "software_reset" - RESET_TYPE_HW_RST = "hardware_reset" + """! Select default host_test supervision (replaced after auto detection)""" + + RESET_TYPE_SW_RST = "software_reset" + RESET_TYPE_HW_RST = "hardware_reset" def __init__(self, options): - """! ctor - """ + """! ctor""" self.options = options - self.logger = HtrunLogger('HTST') + self.logger = HtrunLogger("HTST") self.registry = HostRegistry() self.registry.register_host_test("echo", EchoTest()) @@ -69,15 +70,13 @@ def __init__(self, options): if options: if options.enum_host_tests: for path in options.enum_host_tests: - self.registry.register_from_path( - path, verbose=options.verbose - ) + self.registry.register_from_path(path, verbose=options.verbose) - if options.list_reg_hts: # --list option + if options.list_reg_hts: # --list option print(self.registry.table(options.verbose)) sys.exit(0) - if options.list_plugins: # --plugins option + if options.list_plugins: # --plugins option host_tests_plugins.print_plugin_info() sys.exit(0) @@ -109,7 +108,7 @@ def is_host_test_obj_compatible(self, obj_instance): result = False if obj_instance: result = True - self.logger.prn_inf("host test class: '%s'"% obj_instance.__class__) + self.logger.prn_inf("host test class: '%s'" % obj_instance.__class__) # Check if host test (obj_instance) is derived from mbed_os_tools.test.BaseHostTest() if not isinstance(obj_instance, BaseHostTest): @@ -131,19 +130,17 @@ def run_test(self): @return Return self.TestResults.RESULT_* enum """ result = None - timeout_duration = 10 # Default test case timeout + timeout_duration = 10 # Default test case timeout coverage_idle_timeout = 10 # Default coverage idle timeout - event_queue = Queue() # Events from DUT to host - dut_event_queue = Queue() # Events from host to DUT {k;v} + event_queue = Queue() # Events from DUT to host + dut_event_queue = Queue() # Events from host to DUT {k;v} def callback__notify_prn(key, value, timestamp): - """! Handles __norify_prn. Prints all lines in separate log line """ + """! Handles __norify_prn. Prints all lines in separate log line""" for line in value.splitlines(): self.logger.prn_inf(line) - callbacks = { - "__notify_prn" : callback__notify_prn - } + callbacks = {"__notify_prn": callback__notify_prn} # if True we will allow host test to consume all events after test is finished callbacks_consume = True @@ -158,24 +155,23 @@ def callback__notify_prn(key, value, timestamp): self.logger.prn_inf("starting host test process...") - # Create device info here as it may change after restart. config = { - "digest" : "serial", - "port" : self.mbed.port, - "baudrate" : self.mbed.serial_baud, - "mcu" : self.mbed.mcu, - "program_cycle_s" : self.options.program_cycle_s, - "reset_type" : self.options.forced_reset_type, - "target_id" : self.options.target_id, - "disk" : self.options.disk, - "polling_timeout" : self.options.polling_timeout, - "forced_reset_timeout" : self.options.forced_reset_timeout, - "sync_behavior" : self.options.sync_behavior, - "platform_name" : self.options.micro, - "image_path" : self.mbed.image_path, + "digest": "serial", + "port": self.mbed.port, + "baudrate": self.mbed.serial_baud, + "mcu": self.mbed.mcu, + "program_cycle_s": self.options.program_cycle_s, + "reset_type": self.options.forced_reset_type, + "target_id": self.options.target_id, + "disk": self.options.disk, + "polling_timeout": self.options.polling_timeout, + "forced_reset_timeout": self.options.forced_reset_timeout, + "sync_behavior": self.options.sync_behavior, + "platform_name": self.options.micro, + "image_path": self.mbed.image_path, "skip_reset": self.options.skip_reset, - "tags" : self.options.tag_filters, + "tags": self.options.tag_filters, "sync_timeout": self.options.sync_timeout, "sync_predelay": self.options.sync_predelay, } @@ -186,11 +182,7 @@ def callback__notify_prn(key, value, timestamp): config.update(grm_config) if self.options.fast_model_connection: - - config.update({ - "conn_resource" : 'fmc', - "fm_config" : self.options.fast_model_connection - }) + config.update({"conn_resource": "fmc", "fm_config": self.options.fast_model_connection}) def start_conn_process(): # DUT-host communication process @@ -225,7 +217,7 @@ def process_code_coverage(key, value, timestamp): # If coverage detected use idle loop # Prevent breaking idle loop for __rxd_line (occurs between keys) - if key == '__coverage_start' or key == '__rxd_line': + if key == "__coverage_start" or key == "__rxd_line": start_time = time() # Perform callback @@ -242,15 +234,16 @@ def process_code_coverage(key, value, timestamp): # Global resource manager case as it may take a while for resource # to be available. (key, value, timestamp) = event_queue.get( - timeout=None if self.options.global_resource_mgr else self.options.process_start_timeout) + timeout=None if self.options.global_resource_mgr else self.options.process_start_timeout + ) - if key == '__conn_process_start': + if key == "__conn_process_start": conn_process_started = True else: - self.logger.prn_err("First expected event was '__conn_process_start', received '%s' instead"% key) + self.logger.prn_err("First expected event was '__conn_process_start', received '%s' instead" % key) except QueueEmpty: - self.logger.prn_err("Conn process failed to start in %f sec"% self.options.process_start_timeout) + self.logger.prn_err("Conn process failed to start in %f sec" % self.options.process_start_timeout) if not conn_process_started: p.terminate() @@ -270,28 +263,28 @@ def process_code_coverage(key, value, timestamp): # Write serial output to the file if specified in options. if self.serial_output_file: - if key == '__rxd_line': + if key == "__rxd_line": with open(self.serial_output_file, "a") as f: f.write("%s\n" % value) # In this mode we only check serial output against compare log. if self.compare_log: - if key == '__rxd_line': + if key == "__rxd_line": if self.match_log(value): self.logger.prn_inf("Target log matches compare log!") result = True break if consume_preamble_events: - if key == '__timeout': + if key == "__timeout": # Override default timeout for this event queue start_time = time() - timeout_duration = int(value) # New timeout - self.logger.prn_inf("setting timeout to: %d sec"% int(value)) - elif key == '__version': + timeout_duration = int(value) # New timeout + self.logger.prn_inf("setting timeout to: %d sec" % int(value)) + elif key == "__version": self.client_version = value self.logger.prn_inf("DUT greentea-client version: " + self.client_version) - elif key == '__host_test_name': + elif key == "__host_test_name": # Load dynamically requested host test self.test_supervisor = self.registry.get_host_test(value) @@ -313,7 +306,7 @@ def process_code_coverage(key, value, timestamp): print(line) self.logger.prn_inf("==== Traceback end ====") result = self.RESULT_ERROR - event_queue.put(('__exit_event_queue', 0, time())) + event_queue.put(("__exit_event_queue", 0, time())) self.logger.prn_inf("host test setup() call...") if self.test_supervisor.get_callbacks(): @@ -321,44 +314,46 @@ def process_code_coverage(key, value, timestamp): self.logger.prn_inf("CALLBACKs updated") else: self.logger.prn_wrn("no CALLBACKs specified by host test") - self.logger.prn_inf("host test detected: %s"% value) + self.logger.prn_inf("host test detected: %s" % value) else: - self.logger.prn_err("host test not detected: %s"% value) + self.logger.prn_err("host test not detected: %s" % value) result = self.RESULT_ERROR - event_queue.put(('__exit_event_queue', 0, time())) + event_queue.put(("__exit_event_queue", 0, time())) consume_preamble_events = False - elif key == '__sync': + elif key == "__sync": # This is DUT-Host Test handshake event - self.logger.prn_inf("sync KV found, uuid=%s, timestamp=%f"% (str(value), timestamp)) - elif key == '__notify_sync_failed': + self.logger.prn_inf("sync KV found, uuid=%s, timestamp=%f" % (str(value), timestamp)) + elif key == "__notify_sync_failed": # This event is sent by conn_process, SYNC failed - self.logger.prn_err("stopped consuming events due to %s event"% key) + self.logger.prn_err("stopped consuming events due to %s event" % key) if value is not None: self.logger.prn_err(value) callbacks_consume = False result = self.RESULT_SYNC_FAILED - event_queue.put(('__exit_event_queue', 0, time())) - elif key == '__notify_conn_lost': + event_queue.put(("__exit_event_queue", 0, time())) + elif key == "__notify_conn_lost": # This event is sent by conn_process, DUT connection was lost self.logger.prn_err(value) - self.logger.prn_wrn("stopped to consume events due to %s event"% key) + self.logger.prn_wrn("stopped to consume events due to %s event" % key) callbacks_consume = False result = self.RESULT_IO_SERIAL - event_queue.put(('__exit_event_queue', 0, time())) - elif key == '__exit_event_queue': + event_queue.put(("__exit_event_queue", 0, time())) + elif key == "__exit_event_queue": # This event is sent by the host test indicating no more events expected - self.logger.prn_inf("%s received"% (key)) + self.logger.prn_inf("%s received" % (key)) callbacks__exit_event_queue = True break - elif key.startswith('__'): + elif key.startswith("__"): # Consume other system level events pass else: - self.logger.prn_err("orphan event in preamble phase: {{%s;%s}}, timestamp=%f"% (key, str(value), timestamp)) + self.logger.prn_err( + "orphan event in preamble phase: {{%s;%s}}, timestamp=%f" % (key, str(value), timestamp) + ) else: # If coverage detected switch to idle loop - if key == '__coverage_start': + if key == "__coverage_start": self.logger.prn_inf("starting coverage idle timeout loop...") elapsed_time, (key, value, timestamp) = process_code_coverage(key, value, timestamp) @@ -366,18 +361,18 @@ def process_code_coverage(key, value, timestamp): timeout_duration += elapsed_time self.logger.prn_inf("exiting coverage idle timeout loop (elapsed_time: %.2f" % elapsed_time) - if key == '__notify_complete': + if key == "__notify_complete": # This event is sent by Host Test, test result is in value # or if value is None, value will be retrieved from HostTest.result() method self.logger.prn_inf("%s(%s)" % (key, str(value))) result = value - event_queue.put(('__exit_event_queue', 0, time())) - elif key == '__reset': + event_queue.put(("__exit_event_queue", 0, time())) + elif key == "__reset": # This event only resets the dut, not the host test - dut_event_queue.put(('__reset', True, time())) - elif key == '__reset_dut': + dut_event_queue.put(("__reset", True, time())) + elif key == "__reset_dut": # Disconnect to avoid connection lost event - dut_event_queue.put(('__host_test_finished', True, time())) + dut_event_queue.put(("__host_test_finished", True, time())) p.join() if value == DefaultTestSelector.RESET_TYPE_SW_RST: @@ -388,43 +383,51 @@ def process_code_coverage(key, value, timestamp): # request hardware reset self.mbed.hw_reset() else: - self.logger.prn_err("Invalid reset type (%s). Supported types [%s]." % - (value, ", ".join([DefaultTestSelector.RESET_TYPE_HW_RST, - DefaultTestSelector.RESET_TYPE_SW_RST]))) + self.logger.prn_err( + "Invalid reset type (%s). Supported types [%s]." + % ( + value, + ", ".join( + [DefaultTestSelector.RESET_TYPE_HW_RST, DefaultTestSelector.RESET_TYPE_SW_RST] + ), + ) + ) self.logger.prn_inf("Software reset will be performed.") # connect to the device p = start_conn_process() - elif key == '__notify_conn_lost': + elif key == "__notify_conn_lost": # This event is sent by conn_process, DUT connection was lost self.logger.prn_err(value) - self.logger.prn_wrn("stopped to consume events due to %s event"% key) + self.logger.prn_wrn("stopped to consume events due to %s event" % key) callbacks_consume = False result = self.RESULT_IO_SERIAL - event_queue.put(('__exit_event_queue', 0, time())) - elif key == '__exit': + event_queue.put(("__exit_event_queue", 0, time())) + elif key == "__exit": # This event is sent by DUT, test suite exited - self.logger.prn_inf("%s(%s)"% (key, str(value))) + self.logger.prn_inf("%s(%s)" % (key, str(value))) callbacks__exit = True - event_queue.put(('__exit_event_queue', 0, time())) - elif key == '__exit_event_queue': + event_queue.put(("__exit_event_queue", 0, time())) + elif key == "__exit_event_queue": # This event is sent by the host test indicating no more events expected - self.logger.prn_inf("%s received"% (key)) + self.logger.prn_inf("%s received" % (key)) callbacks__exit_event_queue = True break - elif key == '__timeout_set': + elif key == "__timeout_set": # Dynamic timeout set - timeout_duration = int(value) # New timeout - self.logger.prn_inf("setting timeout to: %d sec"% int(value)) - elif key == '__timeout_adjust': + timeout_duration = int(value) # New timeout + self.logger.prn_inf("setting timeout to: %d sec" % int(value)) + elif key == "__timeout_adjust": # Dynamic timeout adjust - timeout_duration = timeout_duration + int(value) # adjust time + timeout_duration = timeout_duration + int(value) # adjust time self.logger.prn_inf("adjusting timeout with %d sec (now %d)" % (int(value), timeout_duration)) elif key in callbacks: # Handle callback callbacks[key](key, value, timestamp) else: - self.logger.prn_err("orphan event in main phase: {{%s;%s}}, timestamp=%f"% (key, str(value), timestamp)) + self.logger.prn_err( + "orphan event in main phase: {{%s;%s}}, timestamp=%f" % (key, str(value), timestamp) + ) except Exception: self.logger.prn_err("something went wrong in event main loop!") self.logger.prn_inf("==== Traceback start ====") @@ -434,16 +437,18 @@ def process_code_coverage(key, value, timestamp): result = self.RESULT_ERROR time_duration = time() - start_time - self.logger.prn_inf("test suite run finished after %.2f sec..."% time_duration) + self.logger.prn_inf("test suite run finished after %.2f sec..." % time_duration) if self.compare_log and result is None: if self.compare_log_idx < len(self.compare_log): - self.logger.prn_err("Expected output [%s] not received in log." % self.compare_log[self.compare_log_idx]) + self.logger.prn_err( + "Expected output [%s] not received in log." % self.compare_log[self.compare_log_idx] + ) # Force conn_proxy process to return - dut_event_queue.put(('__host_test_finished', True, time())) + dut_event_queue.put(("__host_test_finished", True, time())) p.join() - self.logger.prn_inf("CONN exited with code: %s"% str(p.exitcode)) + self.logger.prn_inf("CONN exited with code: %s" % str(p.exitcode)) # Callbacks... self.logger.prn_inf("No events in queue" if event_queue.empty() else "Some events in queue") @@ -464,27 +469,27 @@ def process_code_coverage(key, value, timestamp): except QueueEmpty: break - if key == '__notify_complete': + if key == "__notify_complete": # This event is sent by Host Test, test result is in value # or if value is None, value will be retrieved from HostTest.result() method - self.logger.prn_inf("%s(%s)"% (key, str(value))) + self.logger.prn_inf("%s(%s)" % (key, str(value))) result = value - elif key.startswith('__'): + elif key.startswith("__"): # Consume other system level events pass elif key in callbacks: callbacks[key](key, value, timestamp) else: - self.logger.prn_wrn(">>> orphan event: {{%s;%s}}, timestamp=%f"% (key, str(value), timestamp)) + self.logger.prn_wrn(">>> orphan event: {{%s;%s}}, timestamp=%f" % (key, str(value), timestamp)) self.logger.prn_inf("stopped consuming events") if result is not None: # We must compare here against None! # Here for example we've received some error code like IOERR_COPY - self.logger.prn_inf("host test result() call skipped, received: %s"% str(result)) + self.logger.prn_inf("host test result() call skipped, received: %s" % str(result)) else: if self.test_supervisor: result = self.test_supervisor.result() - self.logger.prn_inf("host test result(): %s"% str(result)) + self.logger.prn_inf("host test result(): %s" % str(result)) if not callbacks__exit: self.logger.prn_wrn("missing __exit event from DUT") @@ -492,10 +497,11 @@ def process_code_coverage(key, value, timestamp): if not callbacks__exit_event_queue: self.logger.prn_wrn("missing __exit_event_queue event from host test") - #if not callbacks__exit_event_queue and not result: + # if not callbacks__exit_event_queue and not result: if not callbacks__exit_event_queue and result is None: - self.logger.prn_err("missing __exit_event_queue event from " + \ - "host test and no result from host test, timeout...") + self.logger.prn_err( + "missing __exit_event_queue event from " + "host test and no result from host test, timeout..." + ) result = self.RESULT_TIMEOUT self.logger.prn_inf("calling blocking teardown()") @@ -546,11 +552,11 @@ def execute(self): result = test_result # This will be captured by Greentea - self.logger.prn_inf("{{result;%s}}"% result) + self.logger.prn_inf("{{result;%s}}" % result) return self.get_test_result_int(result) except KeyboardInterrupt: - return(-3) # Keyboard interrupt + return -3 # Keyboard interrupt def match_log(self, line): """ @@ -573,8 +579,8 @@ def match_log(self, line): @staticmethod def _parse_grm(grm_arg): - grm_module, leftover = grm_arg.split(':', 1) - parts = leftover.rsplit(':', 1) + grm_module, leftover = grm_arg.split(":", 1) + parts = leftover.rsplit(":", 1) try: grm_host, grm_port = parts @@ -584,8 +590,4 @@ def _parse_grm(grm_arg): grm_host = leftover grm_port = None - return { - "grm_module" : grm_module, - "grm_host" : grm_host, - "grm_port" : grm_port, - } + return {"grm_module": grm_module, "grm_host": grm_host, "grm_port": grm_port} diff --git a/tools/python/mbed_os_tools/test/host_tests_runner/mbed_base.py b/tools/python/mbed_os_tools/test/host_tests_runner/mbed_base.py index 1595434c771..4a081ceb9b1 100644 --- a/tools/python/mbed_os_tools/test/host_tests_runner/mbed_base.py +++ b/tools/python/mbed_os_tools/test/host_tests_runner/mbed_base.py @@ -27,19 +27,19 @@ class Mbed: @details This class stores information about things like disk, port, serial speed etc. Class is also responsible for manipulation of serial port between host and mbed device """ + def __init__(self, options): - """ ctor - """ + """ctor""" # For compatibility with old mbed. We can use command line options for Mbed object # or we can pass options directly from . self.options = options - self.logger = HtrunLogger('MBED') + self.logger = HtrunLogger("MBED") # Options related to copy / reset mbed device self.port = self.options.port self.mcu = self.options.micro self.disk = self.options.disk self.target_id = self.options.target_id - self.image_path = self.options.image_path.strip('"') if self.options.image_path is not None else '' + self.image_path = self.options.image_path.strip('"') if self.options.image_path is not None else "" self.copy_method = self.options.copy_method self.retry_copy = self.options.retry_copy self.program_cycle_s = float(self.options.program_cycle_s if self.options.program_cycle_s is not None else 2.0) @@ -51,7 +51,7 @@ def __init__(self, options): # Users can use command to pass port speeds together with port name. E.g. COM4:115200:1 # Format if PORT:SPEED:TIMEOUT - port_config = self.port.split(':') if self.port else '' + port_config = self.port.split(":") if self.port else "" if len(port_config) == 2: # -p COM4:115200 self.port = port_config[0] @@ -75,8 +75,11 @@ def __init__(self, options): with open(json_test_configuration_path) as data_file: self.test_cfg = json.load(data_file) except IOError as e: - self.logger.prn_err("Test configuration JSON file '{0}' I/O error({1}): {2}" - .format(json_test_configuration_path, e.errno, e.strerror)) + self.logger.prn_err( + "Test configuration JSON file '{0}' I/O error({1}): {2}".format( + json_test_configuration_path, e.errno, e.strerror + ) + ) except: self.logger.prn_err("Test configuration JSON Unexpected error:", str(e)) raise @@ -85,23 +88,24 @@ def copy_image(self, image_path=None, disk=None, copy_method=None, port=None, mc """! Closure for copy_image_raw() method. @return Returns result from copy plugin """ + def get_remount_count(disk_path, tries=2): """! Get the remount count from 'DETAILS.TXT' file @return Returns count, None if not-available """ - #In case of no disk path, nothing to do + # In case of no disk path, nothing to do if disk_path is None: return None - + for cur_try in range(1, tries + 1): try: files_on_disk = [x.upper() for x in os.listdir(disk_path)] - if 'DETAILS.TXT' in files_on_disk: - with open(os.path.join(disk_path, 'DETAILS.TXT'), 'r') as details_txt: + if "DETAILS.TXT" in files_on_disk: + with open(os.path.join(disk_path, "DETAILS.TXT"), "r") as details_txt: for line in details_txt.readlines(): - if 'Remount count:' in line: - return int(line.replace('Remount count: ', '')) + if "Remount count:" in line: + return int(line.replace("Remount count: ", "")) # Remount count not found in file return None # 'DETAILS.TXT file not found @@ -136,17 +140,17 @@ def check_flash_error(target_id, disk, initial_remount_count): # trying to check for Mbed Enabled devices. return True - bad_files = set(['FAIL.TXT']) + bad_files = set(["FAIL.TXT"]) # Re-try at max 5 times with 0.5 sec in delay for i in range(5): # mbed_os_tools.detect.create() should be done inside the loop. Otherwise it will loop on same data. mbeds = detect.create() - mbed_list = mbeds.list_mbeds() #list of mbeds present + mbed_list = mbeds.list_mbeds() # list of mbeds present # get first item in list with a matching target_id, if present - mbed_target = next((x for x in mbed_list if x['target_id']==target_id), None) + mbed_target = next((x for x in mbed_list if x["target_id"] == target_id), None) if mbed_target is not None: - if 'mount_point' in mbed_target and mbed_target['mount_point'] is not None: + if "mount_point" in mbed_target and mbed_target["mount_point"] is not None: if not initial_remount_count is None: new_remount_count = get_remount_count(disk) if not new_remount_count is None and new_remount_count == initial_remount_count: @@ -155,15 +159,15 @@ def check_flash_error(target_id, disk, initial_remount_count): common_items = [] try: - items = set([x.upper() for x in os.listdir(mbed_target['mount_point'])]) + items = set([x.upper() for x in os.listdir(mbed_target["mount_point"])]) common_items = bad_files.intersection(items) except OSError as e: print("Failed to enumerate disk files, retrying") continue for common_item in common_items: - full_path = os.path.join(mbed_target['mount_point'], common_item) - self.logger.prn_err("Found %s"% (full_path)) + full_path = os.path.join(mbed_target["mount_point"], common_item) + self.logger.prn_err("Found %s" % (full_path)) bad_file_contents = "[failed to read bad file]" try: with open(full_path, "r") as bad_file: @@ -222,21 +226,19 @@ def copy_image_raw(self, image_path=None, disk=None, copy_method=None, port=None # Select copy_method # We override 'default' method with 'shell' method - copy_method = { - None : 'shell', - 'default' : 'shell', - }.get(copy_method, copy_method) + copy_method = {None: "shell", "default": "shell"}.get(copy_method, copy_method) - result = ht_plugins.call_plugin('CopyMethod', - copy_method, - image_path=image_path, - mcu=mcu, - serial=port, - destination_disk=disk, - target_id=self.target_id, - pooling_timeout=self.polling_timeout, - format=self.options.format - ) + result = ht_plugins.call_plugin( + "CopyMethod", + copy_method, + image_path=image_path, + mcu=mcu, + serial=port, + destination_disk=disk, + target_id=self.target_id, + pooling_timeout=self.polling_timeout, + format=self.options.format, + ) return result def hw_reset(self): @@ -246,12 +248,10 @@ def hw_reset(self): :return: """ device_info = {} - result = ht_plugins.call_plugin('ResetMethod', - 'power_cycle', - target_id=self.target_id, - device_info=device_info, - format=self.options.format) + result = ht_plugins.call_plugin( + "ResetMethod", "power_cycle", target_id=self.target_id, device_info=device_info, format=self.options.format + ) if result: - self.port = device_info['serial_port'] - self.disk = device_info['mount_point'] + self.port = device_info["serial_port"] + self.disk = device_info["mount_point"] return result diff --git a/tools/python/mbed_os_tools/test/host_tests_toolbox/host_functional.py b/tools/python/mbed_os_tools/test/host_tests_toolbox/host_functional.py index c75579e4a83..c33cb1d30cb 100644 --- a/tools/python/mbed_os_tools/test/host_tests_toolbox/host_functional.py +++ b/tools/python/mbed_os_tools/test/host_tests_toolbox/host_functional.py @@ -20,36 +20,33 @@ from .. import host_tests_plugins, DEFAULT_BAUD_RATE -def flash_dev(disk=None, - image_path=None, - copy_method='default', - port=None, - program_cycle_s=4): +def flash_dev(disk=None, image_path=None, copy_method="default", port=None, program_cycle_s=4): """! Flash device using pythonic interface @param disk Switch -d @param image_path Switch -f @param copy_method Switch -c (default: shell) @param port Switch -p """ - if copy_method == 'default': - copy_method = 'shell' + if copy_method == "default": + copy_method = "shell" result = False - result = host_tests_plugins.call_plugin('CopyMethod', - copy_method, - image_path=image_path, - serial=port, - destination_disk=disk) + result = host_tests_plugins.call_plugin( + "CopyMethod", copy_method, image_path=image_path, serial=port, destination_disk=disk + ) sleep(program_cycle_s) return result -def reset_dev(port=None, - disk=None, - reset_type='default', - reset_timeout=1, - serial_port=None, - baudrate=DEFAULT_BAUD_RATE, - timeout=1, - verbose=False): + +def reset_dev( + port=None, + disk=None, + reset_type="default", + reset_timeout=1, + serial_port=None, + baudrate=DEFAULT_BAUD_RATE, + timeout=1, + verbose=False, +): """! Reset device using pythonic interface @param port Switch -p @param disk Switch -d @@ -66,10 +63,7 @@ def reset_dev(port=None, if not serial_port: try: with Serial(port, baudrate=baudrate, timeout=timeout) as serial_port: - result = host_tests_plugins.call_plugin('ResetMethod', - reset_type, - serial=serial_port, - disk=disk) + result = host_tests_plugins.call_plugin("ResetMethod", reset_type, serial=serial_port, disk=disk) sleep(reset_timeout) except SerialException as e: if verbose: @@ -77,19 +71,15 @@ def reset_dev(port=None, result = False return result -def handle_send_break_cmd(port, - disk, - reset_type=None, - baudrate=None, - timeout=1, - verbose=False): + +def handle_send_break_cmd(port, disk, reset_type=None, baudrate=None, timeout=1, verbose=False): """! Resets platforms and prints serial port output - @detail Mix with switch -r RESET_TYPE and -p PORT for versatility + @detail Mix with switch -r RESET_TYPE and -p PORT for versatility """ if not reset_type: - reset_type = 'default' + reset_type = "default" - port_config = port.split(':') + port_config = port.split(":") if len(port_config) == 2: # -p COM4:115200 port = port_config[0] @@ -105,33 +95,31 @@ def handle_send_break_cmd(port, baudrate = DEFAULT_BAUD_RATE if verbose: - print("mbedhtrun: serial port configuration: %s:%s:%s"% (port, str(baudrate), str(timeout))) + print("mbedhtrun: serial port configuration: %s:%s:%s" % (port, str(baudrate), str(timeout))) try: serial_port = Serial(port, baudrate=baudrate, timeout=timeout) except Exception as e: print("mbedhtrun: %s" % (str(e))) - print(json.dumps({ - "port" : port, - "disk" : disk, - "baudrate" : baudrate, - "timeout" : timeout, - "reset_type" : reset_type, - }, indent=4)) + print( + json.dumps( + {"port": port, "disk": disk, "baudrate": baudrate, "timeout": timeout, "reset_type": reset_type}, + indent=4, + ) + ) return False serial_port.flush() # Reset using one of the plugins - result = host_tests_plugins.call_plugin('ResetMethod', reset_type, serial=serial_port, disk=disk) + result = host_tests_plugins.call_plugin("ResetMethod", reset_type, serial=serial_port, disk=disk) if not result: print("mbedhtrun: reset plugin failed") - print(json.dumps({ - "port" : port, - "disk" : disk, - "baudrate" : baudrate, - "timeout" : timeout, - "reset_type" : reset_type - }, indent=4)) + print( + json.dumps( + {"port": port, "disk": disk, "baudrate": baudrate, "timeout": timeout, "reset_type": reset_type}, + indent=4, + ) + ) return False print("mbedhtrun: serial dump started (use ctrl+c to break)") @@ -139,7 +127,7 @@ def handle_send_break_cmd(port, while True: test_output = serial_port.read(512) if test_output: - sys.stdout.write('%s'% test_output) + sys.stdout.write("%s" % test_output) if "{end}" in test_output: if verbose: print() diff --git a/tools/python/mbed_os_tools/test/mbed_common_api.py b/tools/python/mbed_os_tools/test/mbed_common_api.py index 0b8e3c7066e..6a465c18769 100644 --- a/tools/python/mbed_os_tools/test/mbed_common_api.py +++ b/tools/python/mbed_os_tools/test/mbed_common_api.py @@ -29,14 +29,15 @@ def run_cli_command(cmd, shell=True, verbose=False): if ret: result = False if verbose: - print("mbedgt: [ret=%d] Command: %s"% (int(ret), cmd)) + print("mbedgt: [ret=%d] Command: %s" % (int(ret), cmd)) except OSError as e: result = False if verbose: - print("mbedgt: [ret=%d] Command: %s"% (int(ret), cmd)) + print("mbedgt: [ret=%d] Command: %s" % (int(ret), cmd)) print(str(e)) return (result, ret) + def run_cli_process(cmd): """! Runs command as a process and return stdout, stderr and ret code @param cmd Command to execute @@ -46,7 +47,7 @@ def run_cli_process(cmd): p = Popen(cmd, stdout=PIPE, stderr=PIPE) _stdout, _stderr = p.communicate() except OSError as e: - print("mbedgt: Command: %s"% (cmd)) + print("mbedgt: Command: %s" % (cmd)) print(str(e)) print("mbedgt: traceback...") print(e.child_traceback) diff --git a/tools/python/mbed_os_tools/test/mbed_coverage_api.py b/tools/python/mbed_os_tools/test/mbed_coverage_api.py index d05f50dceb2..40c399d8a0e 100644 --- a/tools/python/mbed_os_tools/test/mbed_coverage_api.py +++ b/tools/python/mbed_os_tools/test/mbed_coverage_api.py @@ -29,6 +29,7 @@ def __default_coverage_start_callback(self, key, value, timestamp): self.log("LCOV:" + str(e)) """ + def coverage_pack_hex_payload(payload): """! Convert a block of hex string data back to binary and return the binary data @param payload String with hex encoded ascii data, e.g.: '6164636772...' @@ -36,9 +37,9 @@ def coverage_pack_hex_payload(payload): """ # This payload might be packed with dot compression # where byte value 0x00 is coded as ".", and not as "00" - payload = payload.replace('.', '00') + payload = payload.replace(".", "00") - hex_pairs = map(''.join, zip(*[iter(payload)] * 2)) # ['61', '64', '63', '67', '72', ... ] + hex_pairs = map("".join, zip(*[iter(payload)] * 2)) # ['61', '64', '63', '67', '72', ... ] bin_payload = bytearray([int(s, 16) for s in hex_pairs]) return bin_payload diff --git a/tools/python/mbed_os_tools/test/mbed_greentea_cli.py b/tools/python/mbed_os_tools/test/mbed_greentea_cli.py index 06cc6dd5a08..29e53bf4ba7 100644 --- a/tools/python/mbed_os_tools/test/mbed_greentea_cli.py +++ b/tools/python/mbed_os_tools/test/mbed_greentea_cli.py @@ -1,4 +1,3 @@ - # Copyright (c) 2018, Arm Limited and affiliates. # SPDX-License-Identifier: Apache-2.0 # @@ -22,12 +21,11 @@ RET_NO_DEVICES = 1001 RET_YOTTA_BUILD_FAIL = -1 -LOCAL_HOST_TESTS_DIR = './test/host_tests' # Used by mbedhtrun -e +LOCAL_HOST_TESTS_DIR = "./test/host_tests" # Used by mbedhtrun -e def get_local_host_tests_dir(path): - """! Forms path to local host tests. Performs additional basic checks if directory exists etc. - """ + """! Forms path to local host tests. Performs additional basic checks if directory exists etc.""" # If specified path exist return path if path and os.path.exists(path) and os.path.isdir(path): return path @@ -36,6 +34,7 @@ def get_local_host_tests_dir(path): return LOCAL_HOST_TESTS_DIR return None + def create_filtered_test_list(ctest_test_list, test_by_names, skip_test, test_spec=None): """! Filters test case list (filtered with switch -n) and return filtered list. @ctest_test_list List iof tests, originally from CTestTestFile.cmake in yotta module. Now comes from test specification @@ -52,8 +51,8 @@ def create_filtered_test_list(ctest_test_list, test_by_names, skip_test, test_sp return {} if test_by_names: - filtered_ctest_test_list = {} # Subset of 'ctest_test_list' - test_list = test_by_names.lower().split(',') + filtered_ctest_test_list = {} # Subset of 'ctest_test_list' + test_list = test_by_names.lower().split(",") gt_logger.gt_log("test case filter (specified with -n option)") for test_name in set(test_list): @@ -61,13 +60,13 @@ def create_filtered_test_list(ctest_test_list, test_by_names, skip_test, test_sp matches = [test for test in ctest_test_list.keys() if fnmatch.fnmatch(test, test_name)] if matches: for match in matches: - gt_logger.gt_log_tab("test filtered in '%s'"% gt_logger.gt_bright(match)) + gt_logger.gt_log_tab("test filtered in '%s'" % gt_logger.gt_bright(match)) filtered_ctest_test_list[match] = ctest_test_list[match] else: invalid_test_names.append(test_name) if skip_test: - test_list = skip_test.split(',') + test_list = skip_test.split(",") gt_logger.gt_log("test case filter (specified with -i option)") for test_name in set(test_list): @@ -75,22 +74,26 @@ def create_filtered_test_list(ctest_test_list, test_by_names, skip_test, test_sp matches = [test for test in filtered_ctest_test_list.keys() if fnmatch.fnmatch(test, test_name)] if matches: for match in matches: - gt_logger.gt_log_tab("test filtered out '%s'"% gt_logger.gt_bright(match)) + gt_logger.gt_log_tab("test filtered out '%s'" % gt_logger.gt_bright(match)) del filtered_ctest_test_list[match] else: invalid_test_names.append(test_name) if invalid_test_names: - opt_to_print = '-n' if test_by_names else 'skip-test' - gt_logger.gt_log_warn("invalid test case names (specified with '%s' option)"% opt_to_print) + opt_to_print = "-n" if test_by_names else "skip-test" + gt_logger.gt_log_warn("invalid test case names (specified with '%s' option)" % opt_to_print) for test_name in invalid_test_names: if test_spec: test_spec_name = test_spec.test_spec_filename - gt_logger.gt_log_warn("test name '%s' not found in '%s' (specified with --test-spec option)"% (gt_logger.gt_bright(test_name), - gt_logger.gt_bright(test_spec_name))) + gt_logger.gt_log_warn( + "test name '%s' not found in '%s' (specified with --test-spec option)" + % (gt_logger.gt_bright(test_name), gt_logger.gt_bright(test_spec_name)) + ) else: - gt_logger.gt_log_warn("test name '%s' not found in CTestTestFile.cmake (specified with '%s' option)"% (gt_logger.gt_bright(test_name), - opt_to_print)) + gt_logger.gt_log_warn( + "test name '%s' not found in CTestTestFile.cmake (specified with '%s' option)" + % (gt_logger.gt_bright(test_name), opt_to_print) + ) gt_logger.gt_log_tab("note: test case names are case sensitive") gt_logger.gt_log_tab("note: see list of available test cases below") # Print available test suite names (binary names user can use with -n diff --git a/tools/python/mbed_os_tools/test/mbed_greentea_dlm.py b/tools/python/mbed_os_tools/test/mbed_greentea_dlm.py index 59bdace0995..5586ae8f58c 100644 --- a/tools/python/mbed_os_tools/test/mbed_greentea_dlm.py +++ b/tools/python/mbed_os_tools/test/mbed_greentea_dlm.py @@ -22,37 +22,39 @@ HOME_DIR = expanduser("~") GREENTEA_HOME_DIR = ".mbed-greentea" GREENTEA_GLOBAL_LOCK = "glock.lock" -GREENTEA_KETTLE = "kettle.json" # active Greentea instances +GREENTEA_KETTLE = "kettle.json" # active Greentea instances GREENTEA_KETTLE_PATH = os.path.join(HOME_DIR, GREENTEA_HOME_DIR, GREENTEA_KETTLE) def greentea_home_dir_init(): - """ Initialize data in home directory for locking features - """ + """Initialize data in home directory for locking features""" if not os.path.isdir(os.path.join(HOME_DIR, GREENTEA_HOME_DIR)): os.mkdir(os.path.join(HOME_DIR, GREENTEA_HOME_DIR)) + def greentea_get_app_sem(): - """ Obtain locking mechanism info - """ + """Obtain locking mechanism info""" greentea_home_dir_init() - gt_instance_uuid = str(uuid.uuid4()) # String version + gt_instance_uuid = str(uuid.uuid4()) # String version gt_file_sem_name = os.path.join(HOME_DIR, GREENTEA_HOME_DIR, gt_instance_uuid) gt_file_sem = lockfile.LockFile(gt_file_sem_name) return gt_file_sem, gt_file_sem_name, gt_instance_uuid + def greentea_get_target_lock(target_id): greentea_home_dir_init() file_path = os.path.join(HOME_DIR, GREENTEA_HOME_DIR, target_id) lock = lockfile.LockFile(file_path) return lock + def greentea_get_global_lock(): greentea_home_dir_init() file_path = os.path.join(HOME_DIR, GREENTEA_HOME_DIR, GREENTEA_GLOBAL_LOCK) lock = lockfile.LockFile(file_path) return lock + def greentea_update_kettle(greentea_uuid): from time import gmtime, strftime @@ -61,44 +63,44 @@ def greentea_update_kettle(greentea_uuid): if not current_brew: current_brew = {} current_brew[greentea_uuid] = { - "start_time" : strftime("%Y-%m-%d %H:%M:%S", gmtime()), - "cwd" : os.getcwd(), - "locks" : [] + "start_time": strftime("%Y-%m-%d %H:%M:%S", gmtime()), + "cwd": os.getcwd(), + "locks": [], } - with open(GREENTEA_KETTLE_PATH, 'w') as kettle_file: + with open(GREENTEA_KETTLE_PATH, "w") as kettle_file: json.dump(current_brew, kettle_file, indent=4) + def greentea_clean_kettle(greentea_uuid): - """ Clean info in local file system config file - """ + """Clean info in local file system config file""" with greentea_get_global_lock(): current_brew = get_json_data_from_file(GREENTEA_KETTLE_PATH) if not current_brew: current_brew = {} current_brew.pop(greentea_uuid, None) - with open(GREENTEA_KETTLE_PATH, 'w') as kettle_file: + with open(GREENTEA_KETTLE_PATH, "w") as kettle_file: json.dump(current_brew, kettle_file, indent=4) + def greentea_acquire_target_id(target_id, gt_instance_uuid): - """ Acquire lock on target_id for given greentea UUID - """ + """Acquire lock on target_id for given greentea UUID""" with greentea_get_global_lock(): current_brew = get_json_data_from_file(GREENTEA_KETTLE_PATH) if current_brew: - current_brew[gt_instance_uuid]['locks'].append(target_id) - with open(GREENTEA_KETTLE_PATH, 'w') as kettle_file: + current_brew[gt_instance_uuid]["locks"].append(target_id) + with open(GREENTEA_KETTLE_PATH, "w") as kettle_file: json.dump(current_brew, kettle_file, indent=4) + def greentea_acquire_target_id_from_list(possible_target_ids, gt_instance_uuid): - """ Acquire lock on target_id from list of possible target_ids for given greentea UUID - """ + """Acquire lock on target_id from list of possible target_ids for given greentea UUID""" target_id = None already_locked_target_ids = [] with greentea_get_global_lock(): current_brew = get_json_data_from_file(GREENTEA_KETTLE_PATH) # Get all already locked target_id for cb in current_brew: - locks_list = current_brew[cb]['locks'] + locks_list = current_brew[cb]["locks"] already_locked_target_ids.extend(locks_list) # Remove from possible_target_ids elements from already_locked_target_ids @@ -106,27 +108,27 @@ def greentea_acquire_target_id_from_list(possible_target_ids, gt_instance_uuid): if available_target_ids: target_id = available_target_ids[0] - current_brew[gt_instance_uuid]['locks'].append(target_id) - with open(GREENTEA_KETTLE_PATH, 'w') as kettle_file: + current_brew[gt_instance_uuid]["locks"].append(target_id) + with open(GREENTEA_KETTLE_PATH, "w") as kettle_file: json.dump(current_brew, kettle_file, indent=4) return target_id + def greentea_release_target_id(target_id, gt_instance_uuid): - """ Release target_id for given greentea UUID - """ + """Release target_id for given greentea UUID""" with greentea_get_global_lock(): current_brew = get_json_data_from_file(GREENTEA_KETTLE_PATH) if current_brew: - current_brew[gt_instance_uuid]['locks'].remove(target_id) - with open(GREENTEA_KETTLE_PATH, 'w') as kettle_file: + current_brew[gt_instance_uuid]["locks"].remove(target_id) + with open(GREENTEA_KETTLE_PATH, "w") as kettle_file: json.dump(current_brew, kettle_file, indent=4) + def get_json_data_from_file(json_spec_filename): - """ Loads from file JSON formatted string to data structure - """ + """Loads from file JSON formatted string to data structure""" result = None try: - with open(json_spec_filename, 'r') as data_file: + with open(json_spec_filename, "r") as data_file: try: result = json.load(data_file) except ValueError: @@ -135,8 +137,9 @@ def get_json_data_from_file(json_spec_filename): result = None return result + def greentea_kettle_info(): - """ generates human friendly info about current kettle state + """generates human friendly info about current kettle state @details { @@ -148,22 +151,23 @@ def greentea_kettle_info(): } """ from prettytable import PrettyTable + with greentea_get_global_lock(): current_brew = get_json_data_from_file(GREENTEA_KETTLE_PATH) - cols = ['greentea_uuid', 'start_time', 'cwd', 'locks'] + cols = ["greentea_uuid", "start_time", "cwd", "locks"] pt = PrettyTable(cols) for col in cols: pt.align[col] = "l" - pt.padding_width = 1 # One space between column edges and contents (default) + pt.padding_width = 1 # One space between column edges and contents (default) row = [] for greentea_uuid in current_brew: kettle = current_brew[greentea_uuid] row.append(greentea_uuid) - row.append(kettle['start_time']) - row.append(kettle['cwd']) - row.append('\n'.join(kettle['locks'])) + row.append(kettle["start_time"]) + row.append(kettle["cwd"]) + row.append("\n".join(kettle["locks"])) pt.add_row(row) row = [] return pt.get_string() diff --git a/tools/python/mbed_os_tools/test/mbed_greentea_hooks.py b/tools/python/mbed_os_tools/test/mbed_greentea_hooks.py index 4d20362fa9b..ff43346c4c3 100644 --- a/tools/python/mbed_os_tools/test/mbed_greentea_hooks.py +++ b/tools/python/mbed_os_tools/test/mbed_greentea_hooks.py @@ -25,8 +25,8 @@ class GreenteaTestHook(object): - """! Class used to define - """ + """! Class used to define""" + name = None def __init__(self, name): @@ -35,9 +35,10 @@ def __init__(self, name): def run(self, format=None): pass + class GreenteaCliTestHook(GreenteaTestHook): - """! Class used to define a hook which will call command line program - """ + """! Class used to define a hook which will call command line program""" + cmd = None def __init__(self, name, cmd): @@ -64,14 +65,14 @@ def run(self, format=None): @format Pass format dictionary to replace hook {tags} with real values @param format Used to format string with cmd, notation used is e.g: {build_name} """ - gt_logger.gt_log("hook '%s' execution"% self.name) + gt_logger.gt_log("hook '%s' execution" % self.name) cmd = self.format_before_run(self.cmd, format) - gt_logger.gt_log_tab("hook command: %s"% cmd) + gt_logger.gt_log_tab("hook command: %s" % cmd) (_stdout, _stderr, ret) = self.run_cli_process(cmd) if _stdout: print(_stdout) if ret: - gt_logger.gt_log_err("hook exited with error: %d, dumping stderr..."% ret) + gt_logger.gt_log_err("hook exited with error: %d, dumping stderr..." % ret) print(_stderr) return ret @@ -83,15 +84,15 @@ def format_before_run(cmd, format, verbose=False): if cmd_expand: cmd = cmd_expand if verbose: - gt_logger.gt_log_tab("hook expanded: %s"% cmd) + gt_logger.gt_log_tab("hook expanded: %s" % cmd) cmd = cmd.format(**format) if verbose: - gt_logger.gt_log_tab("hook formated: %s"% cmd) + gt_logger.gt_log_tab("hook formated: %s" % cmd) return cmd @staticmethod - def expand_parameters(expr, expandables, delimiter=' '): + def expand_parameters(expr, expandables, delimiter=" "): """! Expands lists for multiple parameters in hook command @param expr Expression to expand @param expandables Dictionary of token: list_to_expand See details for more info @@ -117,7 +118,7 @@ def expand_parameters(expr, expandables, delimiter=' '): result = None if expandables: expansion_result = [] - m = re.search(r'\[.*?\]', expr) + m = re.search(r"\[.*?\]", expr) if m: expr_str_orig = m.group(0) expr_str_base = m.group(0)[1:-1] @@ -126,7 +127,7 @@ def expand_parameters(expr, expandables, delimiter=' '): # We will expand only values which are lists (of strings) if type(expandables[token]) is list: # Use tokens with curly braces (Python string format like) - format_token = '{' + token + '}' + format_token = "{" + token + "}" for expr_str in expr_str_list: if format_token in expr_str: patterns = expandables[token] @@ -135,19 +136,22 @@ def expand_parameters(expr, expandables, delimiter=' '): s = s.replace(format_token, pattern) expr_str_list.append(s) # Nothing to extend/change in this string - if not any('{' + p + '}' in s for p in expandables.keys() if type(expandables[p]) is list): + if not any( + "{" + p + "}" in s for p in expandables.keys() if type(expandables[p]) is list + ): expansion_result.append(s) expansion_result.sort() result = expr.replace(expr_str_orig, delimiter.join(expansion_result)) return result + class LcovHook(GreenteaCliTestHook): - """! Class used to define a LCOV hook - """ + """! Class used to define a LCOV hook""" + lcov_hooks = { "hooks": { "hook_test_end": "$lcov --gcov-tool gcov --capture --directory ./build --output-file {build_path}/{test_name}.info", - "hook_post_all_test_end": "$lcov --gcov-tool gcov [(-a << {build_path}/{test_name_list}.info>>)] --output-file result.info" + "hook_post_all_test_end": "$lcov --gcov-tool gcov [(-a << {build_path}/{test_name_list}.info>>)] --output-file result.info", } } @@ -162,12 +166,12 @@ def format_before_run(cmd, format, verbose=False): if cmd_expand: cmd = cmd_expand if verbose: - gt_logger.gt_log_tab("hook expanded: %s"% cmd) + gt_logger.gt_log_tab("hook expanded: %s" % cmd) cmd = cmd.format(**format) cmd = LcovHook.check_if_file_exists_or_is_empty(cmd) if verbose: - gt_logger.gt_log_tab("hook formated: %s"% cmd) + gt_logger.gt_log_tab("hook formated: %s" % cmd) return cmd @staticmethod @@ -186,48 +190,50 @@ def check_if_file_exists_or_is_empty(expr): expr = "lcov --gcov-tool gcov [(-a <<./build/{yotta_target_name}/{test_name_list}.info>>)] --output-file result.info" """ result = expr - expr_strs_orig = re.findall(r'\(.*?\)', expr) + expr_strs_orig = re.findall(r"\(.*?\)", expr) for expr_str_orig in expr_strs_orig: expr_str_base = expr_str_orig[1:-1] result = result.replace(expr_str_orig, expr_str_base) - m = re.search(r'\<<.*?\>>', expr_str_base) + m = re.search(r"\<<.*?\>>", expr_str_base) if m: expr_str_path = m.group(0)[2:-2] # Remove option if file not exists OR if file exists but empty if not os.path.exists(expr_str_path): - result = result.replace(expr_str_base, '') + result = result.replace(expr_str_base, "") elif os.path.getsize(expr_str_path) == 0: - result = result.replace(expr_str_base, '') + result = result.replace(expr_str_base, "") # Remove path limiter - result = result.replace('<<', '') - result = result.replace('>>', '') + result = result.replace("<<", "") + result = result.replace(">>", "") return result + class GreenteaHooks(object): """! Class used to store all hooks @details Hooks command starts with '$' dollar sign """ + HOOKS = {} + def __init__(self, path_to_hooks): - """! Opens JSON file with - """ + """! Opens JSON file with""" try: - if path_to_hooks == 'lcov': + if path_to_hooks == "lcov": hooks = LcovHook.lcov_hooks - for hook in hooks['hooks']: + for hook in hooks["hooks"]: hook_name = hook - hook_expression = hooks['hooks'][hook] + hook_expression = hooks["hooks"][hook] self.HOOKS[hook_name] = LcovHook(hook_name, hook_expression[1:]) else: - with open(path_to_hooks, 'r') as data_file: + with open(path_to_hooks, "r") as data_file: hooks = json.load(data_file) - if 'hooks' in hooks: - for hook in hooks['hooks']: + if "hooks" in hooks: + for hook in hooks["hooks"]: hook_name = hook - hook_expression = hooks['hooks'][hook] + hook_expression = hooks["hooks"][hook] # This is a command line hook - if hook_expression.startswith('$'): + if hook_expression.startswith("$"): self.HOOKS[hook_name] = GreenteaCliTestHook(hook_name, hook_expression[1:]) except IOError as e: print(str(e)) diff --git a/tools/python/mbed_os_tools/test/mbed_greentea_log.py b/tools/python/mbed_os_tools/test/mbed_greentea_log.py index b82cfd2dba1..26050f31505 100644 --- a/tools/python/mbed_os_tools/test/mbed_greentea_log.py +++ b/tools/python/mbed_os_tools/test/mbed_greentea_log.py @@ -17,6 +17,7 @@ try: import colorama + COLORAMA = True except ImportError: COLORAMA = False @@ -26,6 +27,7 @@ class GreenTeaSimpleLockLogger(object): """! Simple locking printing mechanism @details We are using parallel testing """ + # Colors used by color(ama) terminal component DIM = str() BRIGHT = str() @@ -36,8 +38,8 @@ class GreenTeaSimpleLockLogger(object): RESET = str() def __init__(self, colors=True, use_colorama=False): - self.use_colorama = colorama # Should we try to use colorama - self.colorful(colors) # Set and use colours for formatting + self.use_colorama = colorama # Should we try to use colorama + self.colorful(colors) # Set and use colours for formatting # Mutext used to protect logger prints # Usage: @@ -52,8 +54,7 @@ def __init__(self, colors=True, use_colorama=False): colorama.init() def colorful(self, colors): - """! Enable/Disable colourful printing - """ + """! Enable/Disable colourful printing""" self.colors = colors if self.colors: self.__set_colors() @@ -61,8 +62,7 @@ def colorful(self, colors): self.__clear_colors() def __set_colors(self): - """! Zeroes colours used for formatting - """ + """! Zeroes colours used for formatting""" if self.use_colorama: self.DIM = colorama.Style.DIM self.BRIGHT = colorama.Style.BRIGHT @@ -73,19 +73,17 @@ def __set_colors(self): self.RESET = colorama.Style.RESET_ALL def __clear_colors(self): - """! Zeroes colours used for formatting - """ + """! Zeroes colours used for formatting""" self.DIM = str() self.BRIGHT = str() self.GREEN = str() self.RED = str() self.BLUE = str() self.YELLOW = str() - self.RESET = str() + self.RESET = str() def __print(self, text): - """! Mutex protected print - """ + """! Mutex protected print""" self.GREENTEA_LOG_MUTEX.acquire(1) print(text) self.GREENTEA_LOG_MUTEX.release() @@ -106,7 +104,7 @@ def gt_log_tab(self, text, tab_count=1, print_text=True): @param print_text Forces log function to print on screen (not only return message) @return Returns string with message """ - result = "\t"*tab_count + text + result = "\t" * tab_count + text if print_text: self.__print(result) return result @@ -136,7 +134,8 @@ def gt_bright(self, text): @return Returns string with additional BRIGHT color codes """ if not text: - text = '' + text = "" return self.BLUE + self.BRIGHT + text + self.RESET + gt_logger = GreenTeaSimpleLockLogger(use_colorama=COLORAMA) diff --git a/tools/python/mbed_os_tools/test/mbed_report_api.py b/tools/python/mbed_os_tools/test/mbed_report_api.py index 4d303e074a0..1d0e6f65c40 100644 --- a/tools/python/mbed_os_tools/test/mbed_report_api.py +++ b/tools/python/mbed_os_tools/test/mbed_report_api.py @@ -13,6 +13,7 @@ # See the License for the specific language governing permissions and # limitations under the License. + def export_to_file(file_name, payload): """! Simple file dump used to store reports on disk @param file_name Report file name (with path if needed) @@ -21,7 +22,7 @@ def export_to_file(file_name, payload): """ result = True try: - with open(file_name, 'w') as f: + with open(file_name, "w") as f: f.write(payload) except IOError as e: print("Exporting report to file failed: %s" % str(e)) @@ -34,6 +35,7 @@ def exporter_json(test_result_ext, test_suite_properties=None): @details This is a machine friendly format """ import json + for target in test_result_ext.values(): for suite in target.values(): try: @@ -49,14 +51,15 @@ def exporter_text(test_result_ext, test_suite_properties=None): @return Tuple with table of results and result quantity summary string """ from prettytable import PrettyTable, HEADER - #TODO: export to text, preferably to PrettyTable (SQL like) format - cols = ['target', 'platform_name', 'test suite', 'result', 'elapsed_time (sec)', 'copy_method'] + + # TODO: export to text, preferably to PrettyTable (SQL like) format + cols = ["target", "platform_name", "test suite", "result", "elapsed_time (sec)", "copy_method"] pt = PrettyTable(cols, junction_char="|", hrules=HEADER) for col in cols: pt.align[col] = "l" - pt.padding_width = 1 # One space between column edges and contents (default) + pt.padding_width = 1 # One space between column edges and contents (default) - result_dict = {} # Used to print test suite results + result_dict = {} # Used to print test suite results for target_name in sorted(test_result_ext): test_results = test_result_ext[target_name] @@ -65,24 +68,27 @@ def exporter_text(test_result_ext, test_suite_properties=None): test = test_results[test_name] # Grab quantity of each test result - if test['single_test_result'] in result_dict: - result_dict[test['single_test_result']] += 1 + if test["single_test_result"] in result_dict: + result_dict[test["single_test_result"]] += 1 else: - result_dict[test['single_test_result']] = 1 + result_dict[test["single_test_result"]] = 1 row.append(target_name) - row.append(test['platform_name']) + row.append(test["platform_name"]) row.append(test_name) - row.append(test['single_test_result']) - row.append(round(test['elapsed_time'], 2)) - row.append(test['copy_method']) + row.append(test["single_test_result"]) + row.append(round(test["elapsed_time"], 2)) + row.append(test["copy_method"]) pt.add_row(row) row = [] result_pt = pt.get_string() - result_res = ' / '.join(['%s %s' % (value, key) for (key, value) in {k: v for k, v in result_dict.items() if v != 0}.items()]) + result_res = " / ".join( + ["%s %s" % (value, key) for (key, value) in {k: v for k, v in result_dict.items() if v != 0}.items()] + ) return result_pt, result_res + def exporter_testcase_text(test_result_ext, test_suite_properties=None): """! Exports test case results to text formatted output @param test_result_ext Extended report from Greentea @@ -91,14 +97,15 @@ def exporter_testcase_text(test_result_ext, test_suite_properties=None): @return Tuple with table of results and result quantity summary string """ from prettytable import PrettyTable, HEADER - #TODO: export to text, preferably to PrettyTable (SQL like) format - cols = ['target', 'platform_name', 'test suite', 'test case', 'passed', 'failed', 'result', 'elapsed_time (sec)'] + + # TODO: export to text, preferably to PrettyTable (SQL like) format + cols = ["target", "platform_name", "test suite", "test case", "passed", "failed", "result", "elapsed_time (sec)"] pt = PrettyTable(cols, junction_char="|", hrules=HEADER) for col in cols: pt.align[col] = "l" - pt.padding_width = 1 # One space between column edges and contents (default) + pt.padding_width = 1 # One space between column edges and contents (default) - result_testcase_dict = {} # Used to print test case results + result_testcase_dict = {} # Used to print test case results for target_name in sorted(test_result_ext): test_results = test_result_ext[target_name] @@ -107,7 +114,7 @@ def exporter_testcase_text(test_result_ext, test_suite_properties=None): test = test_results[test_suite_name] # testcase_result stores info about test case results - testcase_result = test['testcase_result'] + testcase_result = test["testcase_result"] # "testcase_result": { # "STRINGS004": { # "duration": 0.009999990463256836, @@ -117,11 +124,11 @@ def exporter_testcase_text(test_result_ext, test_suite_properties=None): # }, for tc_name in sorted(testcase_result): - duration = testcase_result[tc_name].get('duration', 0.0) + duration = testcase_result[tc_name].get("duration", 0.0) # result = testcase_result[tc_name].get('result', 0) - passed = testcase_result[tc_name].get('passed', 0) - failed = testcase_result[tc_name].get('failed', 0) - result_text = testcase_result[tc_name].get('result_text', "UNDEF") + passed = testcase_result[tc_name].get("passed", 0) + failed = testcase_result[tc_name].get("failed", 0) + result_text = testcase_result[tc_name].get("result_text", "UNDEF") # Grab quantity of each test result if result_text in result_testcase_dict: @@ -130,7 +137,7 @@ def exporter_testcase_text(test_result_ext, test_suite_properties=None): result_testcase_dict[result_text] = 1 row.append(target_name) - row.append(test['platform_name']) + row.append(test["platform_name"]) row.append(test_suite_name) row.append(tc_name) row.append(passed) @@ -141,9 +148,12 @@ def exporter_testcase_text(test_result_ext, test_suite_properties=None): row = [] result_pt = pt.get_string() - result_res = ' / '.join(['%s %s' % (value, key) for (key, value) in {k: v for k, v in result_testcase_dict.items() if v != 0}.items()]) + result_res = " / ".join( + ["%s %s" % (value, key) for (key, value) in {k: v for k, v in result_testcase_dict.items() if v != 0}.items()] + ) return result_pt, result_res + def exporter_testcase_junit(test_result_ext, test_suite_properties=None): """! Export test results in JUnit XML compliant format @param test_result_ext Extended report from Greentea @@ -160,10 +170,10 @@ def exporter_testcase_junit(test_result_ext, test_suite_properties=None): test_results = test_result_ext[target_name] for test_suite_name in test_results: test = test_results[test_suite_name] - tc_stdout = test['single_test_output'] + tc_stdout = test["single_test_output"] # testcase_result stores info about test case results - testcase_result = test['testcase_result'] + testcase_result = test["testcase_result"] # "testcase_result": { # "STRINGS004": { # "duration": 0.009999990463256836, @@ -175,25 +185,25 @@ def exporter_testcase_junit(test_result_ext, test_suite_properties=None): test_cases = [] for tc_name in sorted(testcase_result.keys()): - duration = testcase_result[tc_name].get('duration', 0.0) - utest_log = testcase_result[tc_name].get('utest_log', '') - result_text = testcase_result[tc_name].get('result_text', "UNDEF") + duration = testcase_result[tc_name].get("duration", 0.0) + utest_log = testcase_result[tc_name].get("utest_log", "") + result_text = testcase_result[tc_name].get("result_text", "UNDEF") - tc_stderr = '\n'.join(utest_log) - tc_class = target_name + '.' + test_suite_name + tc_stderr = "\n".join(utest_log) + tc_class = target_name + "." + test_suite_name - if result_text == 'SKIPPED': + if result_text == "SKIPPED": # Skipped test cases do not have logs and we do not want to put # whole log inside JUNIT for skipped test case tc_stderr = str() tc = TestCase(tc_name, tc_class, duration, tc_stdout, tc_stderr) - if result_text == 'FAIL': + if result_text == "FAIL": tc.add_failure_info(result_text) - elif result_text == 'SKIPPED': + elif result_text == "SKIPPED": tc.add_skipped_info(result_text) - elif result_text != 'OK': + elif result_text != "OK": tc.add_error_info(result_text) test_cases.append(tc) @@ -210,6 +220,7 @@ def exporter_testcase_junit(test_result_ext, test_suite_properties=None): return TestSuite.to_xml_string(test_suites) + html_template = """ @@ -234,10 +245,10 @@ def exporter_testcase_junit(test_result_ext, test_suite_properties=None): if (elem) { if (elem.style.height == "0px" || elem.style.height == "") { elem.style.height = "auto"; - elem.previousElementSibling.textContent = elem.previousElementSibling.textContent.replace("\u25B2", "\u25BC"); + elem.previousElementSibling.textContent = elem.previousElementSibling.textContent.replace("\u25b2", "\u25bc"); } else { elem.style.height = 0; - elem.previousElementSibling.textContent = elem.previousElementSibling.textContent.replace("\u25BC", "\u25B2"); + elem.previousElementSibling.textContent = elem.previousElementSibling.textContent.replace("\u25bc", "\u25b2"); } } } @@ -414,23 +425,24 @@ def exporter_testcase_junit(test_result_ext, test_suite_properties=None): """ TEST_RESULT_COLOURS = { - 'OK': "limegreen", - 'FAIL': "darkorange", - 'ERROR': "orangered", - 'SKIPPED': "lightsteelblue", - 'UNDEF': "Red", - 'IOERR_COPY': "DarkSalmon", - 'IOERR_DISK': "DarkSalmon", - 'IOERR_SERIAL': "DarkSalmon", - 'TIMEOUT': "DarkKhaki", - 'NO_IMAGE': "DarkSalmon", - 'NOT_RAN': 'grey' + "OK": "limegreen", + "FAIL": "darkorange", + "ERROR": "orangered", + "SKIPPED": "lightsteelblue", + "UNDEF": "Red", + "IOERR_COPY": "DarkSalmon", + "IOERR_DISK": "DarkSalmon", + "IOERR_SERIAL": "DarkSalmon", + "TIMEOUT": "DarkKhaki", + "NO_IMAGE": "DarkSalmon", + "NOT_RAN": "grey", # 'MBED_ASSERT': "", # 'BUILD_FAILED': "", } TEST_RESULT_DEFAULT_COLOUR = "lavender" + def get_result_colour_class_css(): """! Get the CSS for the colour classes @details Returns a string of the CSS classes that are used to colour the different results @@ -446,14 +458,13 @@ def get_result_colour_class_css(): # Create CSS classes for all of the allocated colours css = "" for result, colour in TEST_RESULT_COLOURS.items(): - css += colour_class_template % ("result-%s" % result.lower().replace("_", "-"), - colour) + css += colour_class_template % ("result-%s" % result.lower().replace("_", "-"), colour) - css += colour_class_template % ("result-other", - TEST_RESULT_DEFAULT_COLOUR) + css += colour_class_template % ("result-other", TEST_RESULT_DEFAULT_COLOUR) return css + def get_result_colour_class(result): """! Get the CSS colour class representing the result @param result The result of the test @@ -466,6 +477,7 @@ def get_result_colour_class(result): else: return "result-other" + def get_dropdown_html(div_id, dropdown_name, content, title_classes="", output_text=False, sub_dropdown=False): """! Get the HTML for a dropdown menu @param title_classes A space separated string of css classes on the title @@ -490,12 +502,8 @@ def get_dropdown_html(div_id, dropdown_name, content, title_classes="", output_t if sub_dropdown: dropdown_classes += " sub-dropdown-content" - return dropdown_template % (title_classes, - div_id, - dropdown_name, - div_id, - dropdown_classes, - content) + return dropdown_template % (title_classes, div_id, dropdown_name, div_id, dropdown_classes, content) + def get_result_overlay_testcase_dropdown(result_div_id, index, testcase_result_name, testcase_result): """! Get the HTML for an individual testcase dropdown @@ -521,33 +529,39 @@ def get_result_overlay_testcase_dropdown(result_div_id, index, testcase_result_n testcase_div_id = "%s_testcase_result_%d" % (result_div_id, index) testcase_utest_div_id = "%s_testcase_result_%d_utest" % (result_div_id, index) - testcase_utest_log_dropdown = get_dropdown_html(testcase_utest_div_id, - "uTest Log", - "\n".join(testcase_result.get('utest_log', 'n/a')).rstrip("\n"), - output_text=True, - sub_dropdown=True) - - time_start = 'n/a' - time_end = 'n/a' - if 'time_start' in testcase_result.keys(): - time_start = datetime.datetime.fromtimestamp(testcase_result['time_start']).strftime('%d-%m-%Y %H:%M:%S.%f') - if 'time_end' in testcase_result.keys(): - time_end = datetime.datetime.fromtimestamp(testcase_result['time_end']).strftime('%d-%m-%Y %H:%M:%S.%f') - - testcase_info = testcase_result_template % (testcase_result.get('result_text', 'n/a'), - testcase_result.get('duration', 'n/a'), - time_start, - time_end, - testcase_result.get('failed', 'n/a'), - testcase_result.get('passed', 'n/a'), - testcase_utest_log_dropdown) - - testcase_class = get_result_colour_class(testcase_result['result_text']) - testcase_dropdown = get_dropdown_html(testcase_div_id, - "Testcase: %s
" % testcase_result_name, - testcase_info, - title_classes=testcase_class, - sub_dropdown=True) + testcase_utest_log_dropdown = get_dropdown_html( + testcase_utest_div_id, + "uTest Log", + "\n".join(testcase_result.get("utest_log", "n/a")).rstrip("\n"), + output_text=True, + sub_dropdown=True, + ) + + time_start = "n/a" + time_end = "n/a" + if "time_start" in testcase_result.keys(): + time_start = datetime.datetime.fromtimestamp(testcase_result["time_start"]).strftime("%d-%m-%Y %H:%M:%S.%f") + if "time_end" in testcase_result.keys(): + time_end = datetime.datetime.fromtimestamp(testcase_result["time_end"]).strftime("%d-%m-%Y %H:%M:%S.%f") + + testcase_info = testcase_result_template % ( + testcase_result.get("result_text", "n/a"), + testcase_result.get("duration", "n/a"), + time_start, + time_end, + testcase_result.get("failed", "n/a"), + testcase_result.get("passed", "n/a"), + testcase_utest_log_dropdown, + ) + + testcase_class = get_result_colour_class(testcase_result["result_text"]) + testcase_dropdown = get_dropdown_html( + testcase_div_id, + "Testcase: %s
" % testcase_result_name, + testcase_info, + title_classes=testcase_class, + sub_dropdown=True, + ) return testcase_dropdown @@ -563,16 +577,18 @@ def get_result_overlay_testcases_dropdown_menu(result_div_id, test_results): testcase_results_info = "" # Loop through the test cases giving them a number to create a unique id - for index, (testcase_result_name, testcase_result) in enumerate(test_results['testcase_result'].items()): - testcase_results_info += get_result_overlay_testcase_dropdown(result_div_id, index, testcase_result_name, testcase_result) + for index, (testcase_result_name, testcase_result) in enumerate(test_results["testcase_result"].items()): + testcase_results_info += get_result_overlay_testcase_dropdown( + result_div_id, index, testcase_result_name, testcase_result + ) - result_testcases_dropdown = get_dropdown_html(testcase_results_div_id, - "Testcase Results", - testcase_results_info, - sub_dropdown=True) + result_testcases_dropdown = get_dropdown_html( + testcase_results_div_id, "Testcase Results", testcase_results_info, sub_dropdown=True + ) return result_testcases_dropdown + def get_result_overlay_dropdowns(result_div_id, test_results): """! Get the HTML for a test overlay's dropdown menus @param result_div_id The div id used for the test @@ -584,19 +600,20 @@ def get_result_overlay_dropdowns(result_div_id, test_results): # The HTML for the dropdown containing the ouput of the test result_output_div_id = "%s_output" % result_div_id result_output_dropdown = get_dropdown_html( - result_output_div_id, "Test Output", - test_results['single_test_output'].rstrip("\n"), - output_text=True + result_output_div_id, "Test Output", test_results["single_test_output"].rstrip("\n"), output_text=True ) # Add a dropdown for the testcases if they are present if len(test_results) > 0: - result_overlay_dropdowns = result_output_dropdown + get_result_overlay_testcases_dropdown_menu(result_div_id, test_results) + result_overlay_dropdowns = result_output_dropdown + get_result_overlay_testcases_dropdown_menu( + result_div_id, test_results + ) else: result_overlay_dropdowns = result_output_dropdown return result_overlay_dropdowns + def get_result_overlay(result_div_id, test_name, platform, toolchain, test_results): """! Get the HTML for a test's overlay @param result_div_id The div id used for the test @@ -625,18 +642,21 @@ def get_result_overlay(result_div_id, test_name, platform, toolchain, test_resul overlay_dropdowns = get_result_overlay_dropdowns(result_div_id, test_results) - return overlay_template % (result_div_id, - test_name, - result_div_id, - test_results['single_test_result'], - platform, - toolchain, - test_results['elapsed_time'], - test_results['build_path'], - test_results['build_path_abs'], - test_results['copy_method'], - test_results['image_path'], - overlay_dropdowns) + return overlay_template % ( + result_div_id, + test_name, + result_div_id, + test_results["single_test_result"], + platform, + toolchain, + test_results["elapsed_time"], + test_results["build_path"], + test_results["build_path_abs"], + test_results["copy_method"], + test_results["image_path"], + overlay_dropdowns, + ) + def exporter_html(test_result_ext, test_suite_properties=None): """! Export test results as HTML @@ -668,8 +688,8 @@ def exporter_html(test_result_ext, test_suite_properties=None): # Format of string is - # can however contain '-' such as "frdm-k64f" # is split with '_' fortunately, as in "gcc_arm" - toolchain = platform_toolchain.split('-')[-1] - platform = platform_toolchain.replace('-%s'% toolchain, '') + toolchain = platform_toolchain.split("-")[-1] + platform = platform_toolchain.replace("-%s" % toolchain, "") if platform in platforms_toolchains: platforms_toolchains[platform].append(toolchain) else: @@ -714,51 +734,52 @@ def exporter_html(test_result_ext, test_suite_properties=None): test_results = test_result_ext["%s-%s" % (platform, toolchain)][test_name] else: test_results = { - 'single_test_result': 'NOT_RAN', - 'elapsed_time': 0.0, - 'build_path': 'N/A', - 'build_path_abs': 'N/A', - 'copy_method': 'N/A', - 'image_path': 'N/A', - 'single_test_output': 'N/A', - 'platform_name': platform, - 'test_bin_name': 'N/A', - 'testcase_result': {} + "single_test_result": "NOT_RAN", + "elapsed_time": 0.0, + "build_path": "N/A", + "build_path_abs": "N/A", + "copy_method": "N/A", + "image_path": "N/A", + "single_test_output": "N/A", + "platform_name": platform, + "test_bin_name": "N/A", + "testcase_result": {}, } - test_results['single_test_passes'] = 0 - test_results['single_test_count'] = 0 - result_div_id = "target_%s_toolchain_%s_test_%s" % (platform, toolchain, test_name.replace('-', '_')) + test_results["single_test_passes"] = 0 + test_results["single_test_count"] = 0 + result_div_id = "target_%s_toolchain_%s_test_%s" % (platform, toolchain, test_name.replace("-", "_")) - result_overlay = get_result_overlay(result_div_id, - test_name, - platform, - toolchain, - test_results) + result_overlay = get_result_overlay(result_div_id, test_name, platform, toolchain, test_results) # Loop through the test cases and count the passes and failures - for index, (testcase_result_name, testcase_result) in enumerate(test_results['testcase_result'].items()): - test_results['single_test_passes'] += testcase_result['passed'] - test_results['single_test_count'] += 1 + for index, (testcase_result_name, testcase_result) in enumerate( + test_results["testcase_result"].items() + ): + test_results["single_test_passes"] += testcase_result["passed"] + test_results["single_test_count"] += 1 - result_class = get_result_colour_class(test_results['single_test_result']) + result_class = get_result_colour_class(test_results["single_test_result"]) try: - percent_pass = int((test_results['single_test_passes']*100.0)/test_results['single_test_count']) + percent_pass = int((test_results["single_test_passes"] * 100.0) / test_results["single_test_count"]) except ZeroDivisionError: percent_pass = 100 - this_row += result_cell_template % (result_class, - result_div_id, - test_results['single_test_result'], - percent_pass, - test_results['single_test_passes'], - test_results['single_test_count'], - result_overlay) + this_row += result_cell_template % ( + result_class, + result_div_id, + test_results["single_test_result"], + percent_pass, + test_results["single_test_passes"], + test_results["single_test_count"], + result_overlay, + ) table += row_template % this_row # Add the numbers of columns to make them have the same width return html_template % (get_result_colour_class_css(), len(test_result_ext), table) + def exporter_memory_metrics_csv(test_result_ext, test_suite_properties=None): """! Export memory metrics as CSV @param test_result_ext Extended report from Greentea @@ -773,37 +794,37 @@ def exporter_memory_metrics_csv(test_result_ext, test_suite_properties=None): for test_suite_name in test_results: test = test_results[test_suite_name] - if 'memory_metrics' in test and test['memory_metrics']: - memory_metrics = test['memory_metrics'] + if "memory_metrics" in test and test["memory_metrics"]: + memory_metrics = test["memory_metrics"] - if 'max_heap' in memory_metrics: - report_key = '%s_%s_max_heap_usage' % (target_name, test_suite_name) - metrics_report[report_key] = memory_metrics['max_heap'] + if "max_heap" in memory_metrics: + report_key = "%s_%s_max_heap_usage" % (target_name, test_suite_name) + metrics_report[report_key] = memory_metrics["max_heap"] - if 'reserved_heap' in memory_metrics: - report_key = '%s_%s_reserved_heap_usage' % (target_name, test_suite_name) - metrics_report[report_key] = memory_metrics['reserved_heap'] + if "reserved_heap" in memory_metrics: + report_key = "%s_%s_reserved_heap_usage" % (target_name, test_suite_name) + metrics_report[report_key] = memory_metrics["reserved_heap"] - if 'thread_stack_summary' in memory_metrics: - thread_stack_summary = memory_metrics['thread_stack_summary'] + if "thread_stack_summary" in memory_metrics: + thread_stack_summary = memory_metrics["thread_stack_summary"] - if 'max_stack_size' in thread_stack_summary: - report_key = '%s_%s_max_stack_size' % (target_name, test_suite_name) - metrics_report[report_key] = thread_stack_summary['max_stack_size'] + if "max_stack_size" in thread_stack_summary: + report_key = "%s_%s_max_stack_size" % (target_name, test_suite_name) + metrics_report[report_key] = thread_stack_summary["max_stack_size"] - if 'max_stack_usage' in thread_stack_summary: - report_key = '%s_%s_max_stack_usage' % (target_name, test_suite_name) - metrics_report[report_key] = thread_stack_summary['max_stack_usage'] + if "max_stack_usage" in thread_stack_summary: + report_key = "%s_%s_max_stack_usage" % (target_name, test_suite_name) + metrics_report[report_key] = thread_stack_summary["max_stack_usage"] - if 'max_stack_usage_total' in thread_stack_summary: - report_key = '%s_%s_max_stack_usage_total' % (target_name, test_suite_name) - metrics_report[report_key] = thread_stack_summary['max_stack_usage_total'] + if "max_stack_usage_total" in thread_stack_summary: + report_key = "%s_%s_max_stack_usage_total" % (target_name, test_suite_name) + metrics_report[report_key] = thread_stack_summary["max_stack_usage_total"] - if 'reserved_stack_total' in thread_stack_summary: - report_key = '%s_%s_reserved_stack_total' % (target_name, test_suite_name) - metrics_report[report_key] = thread_stack_summary['reserved_stack_total'] + if "reserved_stack_total" in thread_stack_summary: + report_key = "%s_%s_reserved_stack_total" % (target_name, test_suite_name) + metrics_report[report_key] = thread_stack_summary["reserved_stack_total"] column_names = sorted(metrics_report.keys()) column_values = [str(metrics_report[x]) for x in column_names] - return "%s\n%s" % (','.join(column_names), ','.join(column_values)) + return "%s\n%s" % (",".join(column_names), ",".join(column_values)) diff --git a/tools/python/mbed_os_tools/test/mbed_target_info.py b/tools/python/mbed_os_tools/test/mbed_target_info.py index e6bd8faa199..8d830f1623d 100644 --- a/tools/python/mbed_os_tools/test/mbed_target_info.py +++ b/tools/python/mbed_os_tools/test/mbed_target_info.py @@ -17,16 +17,20 @@ import re import json from os import walk + try: from contextlib import suppress except ImportError: from contextlib import contextmanager + @contextmanager def suppress(*excs): try: yield except excs: pass + + from .mbed_common_api import run_cli_process from .mbed_greentea_log import gt_logger @@ -38,128 +42,83 @@ def suppress(*excs): # TARGET_INFO_MAPPING = { - "default" : { - "program_cycle_s": 4, - "binary_type": ".bin", - "copy_method": "default", - "reset_method": "default" - }, - - "K64F" : { + "default": {"program_cycle_s": 4, "binary_type": ".bin", "copy_method": "default", "reset_method": "default"}, + "K64F": { "yotta_targets": [ - { - "yotta_target": "frdm-k64f-gcc", - "mbed_toolchain": "GCC_ARM" - }, - { - "yotta_target": "frdm-k64f-armcc", - "mbed_toolchain": "ARM" - } - ], - "properties" : { - "binary_type": ".bin", - "copy_method": "default", - "reset_method": "default", - "program_cycle_s": 4 - } + {"yotta_target": "frdm-k64f-gcc", "mbed_toolchain": "GCC_ARM"}, + {"yotta_target": "frdm-k64f-armcc", "mbed_toolchain": "ARM"}, + ], + "properties": { + "binary_type": ".bin", + "copy_method": "default", + "reset_method": "default", + "program_cycle_s": 4, + }, }, - "RAPIDIOT_K64F" : { - "properties" : { - "forced_reset_timeout":7 - } + "RAPIDIOT_K64F": {"properties": {"forced_reset_timeout": 7}}, + "NUCLEO_F401RE": { + "yotta_targets": [{"yotta_target": "st-nucleo-f401re-gcc", "mbed_toolchain": "GCC_ARM"}], + "properties": {"binary_type": ".bin", "copy_method": "cp", "reset_method": "default", "program_cycle_s": 4}, }, - "NUCLEO_F401RE" : { + "NRF51_DK": { "yotta_targets": [ - { - "yotta_target": "st-nucleo-f401re-gcc", - "mbed_toolchain": "GCC_ARM" - } - ], - "properties" : { - "binary_type": ".bin", - "copy_method": "cp", - "reset_method": "default", - "program_cycle_s": 4 - } + {"yotta_target": "nrf51dk-gcc", "mbed_toolchain": "GCC_ARM"}, + {"yotta_target": "nrf51dk-armcc", "mbed_toolchain": "ARM"}, + ], + "properties": { + "binary_type": "-combined.hex", + "copy_method": "shell", + "reset_method": "default", + "program_cycle_s": 4, }, - "NRF51_DK" : { + }, + "NRF51822": { "yotta_targets": [ - { - "yotta_target": "nrf51dk-gcc", - "mbed_toolchain": "GCC_ARM" - }, - { - "yotta_target": "nrf51dk-armcc", - "mbed_toolchain": "ARM" - } - ], - "properties" : { - "binary_type": "-combined.hex", - "copy_method": "shell", - "reset_method": "default", - "program_cycle_s": 4 - } + {"yotta_target": "mkit-gcc", "mbed_toolchain": "GCC_ARM"}, + {"yotta_target": "mkit-armcc", "mbed_toolchain": "ARM"}, + ], + "properties": { + "binary_type": "-combined.hex", + "copy_method": "shell", + "reset_method": "default", + "program_cycle_s": 4, }, - "NRF51822" : { - "yotta_targets": [ - { - "yotta_target": "mkit-gcc", - "mbed_toolchain": "GCC_ARM" - }, - { - "yotta_target": "mkit-armcc", - "mbed_toolchain": "ARM" - } - ], - "properties" : { - "binary_type": "-combined.hex", - "copy_method": "shell", - "reset_method": "default", - "program_cycle_s": 4 - } + }, + "ARCH_BLE": { + "yotta_targets": [{"yotta_target": "tinyble-gcc", "mbed_toolchain": "GCC_ARM"}], + "properties": { + "binary_type": "-combined.hex", + "copy_method": "shell", + "reset_method": "default", + "program_cycle_s": 4, }, - "ARCH_BLE" : { - "yotta_targets": [ - { - "yotta_target": "tinyble-gcc", - "mbed_toolchain": "GCC_ARM" - } - ], - "properties" : { - "binary_type": "-combined.hex", - "copy_method": "shell", - "reset_method": "default", - "program_cycle_s": 4 - } - } + }, } -TARGET_TOOLCAHINS = { - '-armcc': 'ARM', - '-gcc': 'GCC_ARM', - '-iar': 'IAR', -} +TARGET_TOOLCAHINS = {"-armcc": "ARM", "-gcc": "GCC_ARM", "-iar": "IAR"} + def get_mbed_target_call_yotta_target(): - """! Calls yotta's 'yotta target' command to get information about - """ - cmd = ['yotta', '--plain', 'target'] + """! Calls yotta's 'yotta target' command to get information about""" + cmd = ["yotta", "--plain", "target"] gt_logger.gt_log("checking yotta target in current directory") - gt_logger.gt_log_tab("calling yotta: %s"% " ".join(cmd)) + gt_logger.gt_log_tab("calling yotta: %s" % " ".join(cmd)) _stdout, _stderr, _ret = run_cli_process(cmd) return _stdout, _stderr, _ret + def parse_yotta_json_for_build_name(yotta_json_content): """! Function parse .yotta.json to fetch set yotta target @param yotta_json_content Content of .yotta_json file @return String with set yotta target name, None if no target found """ try: - return yotta_json_content['build']['target'].split(',')[0] + return yotta_json_content["build"]["target"].split(",")[0] except KeyError: return None -def get_yotta_target_from_local_config(yotta_json='.yotta.json'): + +def get_yotta_target_from_local_config(yotta_json=".yotta.json"): """! Load yotta target from local configuration file @param yotta_json File in format of .yotta.json which stores current target names @return Yotta target set in currect directory, None if no info is available @@ -176,14 +135,15 @@ def get_yotta_target_from_local_config(yotta_json='.yotta.json'): return None try: - gt_logger.gt_log("parsing local file '%s' for target information"% yotta_json) + gt_logger.gt_log("parsing local file '%s' for target information" % yotta_json) - with open(yotta_json, 'r') as f: + with open(yotta_json, "r") as f: return parse_yotta_json_for_build_name(json.load(f)) except (IOError, ValueError) as e: gt_logger.gt_log(str(e)) return None + def get_mbed_target_from_current_dir(): """! Function uses yotta target command to check current target @return Returns current target or None if target not found (e.g. not yotta package) @@ -202,6 +162,7 @@ def get_mbed_target_from_current_dir(): break return result + def parse_yotta_target_cmd_output(line): """! Function parsed output from command 'yotta --plain target' looking for valid target names. First one will be used as 'default' @@ -218,13 +179,14 @@ def parse_yotta_target_cmd_output(line): """ # Regular expression to parse stings like: 'frdm-k64f-gcc 2.0.0' - m = re.search(r'[\w\d_-]+ \d+\.\d+\.\d+', line) + m = re.search(r"[\w\d_-]+ \d+\.\d+\.\d+", line) if m and len(m.group()): result = line.split()[0] return result return None -def get_mbed_targets_from_yotta_local_module(mbed_classic_name, yotta_targets_path='./yotta_targets'): + +def get_mbed_targets_from_yotta_local_module(mbed_classic_name, yotta_targets_path="./yotta_targets"): """! Function is parsing local yotta targets to fetch matching mbed device target's name @return Function returns list of possible targets or empty list if value not found """ @@ -234,38 +196,47 @@ def get_mbed_targets_from_yotta_local_module(mbed_classic_name, yotta_targets_pa return result # All local directories with yotta targets - target_dirs = [target_dir_name for target_dir_name in os.listdir(yotta_targets_path) if os.path.isdir(os.path.join(yotta_targets_path, target_dir_name))] + target_dirs = [ + target_dir_name + for target_dir_name in os.listdir(yotta_targets_path) + if os.path.isdir(os.path.join(yotta_targets_path, target_dir_name)) + ] - gt_logger.gt_log("local yotta target search in '%s' for compatible mbed-target '%s'"% (gt_logger.gt_bright(yotta_targets_path), gt_logger.gt_bright(mbed_classic_name.lower().strip()))) + gt_logger.gt_log( + "local yotta target search in '%s' for compatible mbed-target '%s'" + % (gt_logger.gt_bright(yotta_targets_path), gt_logger.gt_bright(mbed_classic_name.lower().strip())) + ) for target_dir in target_dirs: - path = os.path.join(yotta_targets_path, target_dir, 'target.json') + path = os.path.join(yotta_targets_path, target_dir, "target.json") try: - with open(path, 'r') as data_file: + with open(path, "r") as data_file: target_json_data = json.load(data_file) yotta_target_name = parse_mbed_target_from_target_json(mbed_classic_name, target_json_data) if yotta_target_name: target_dir_name = os.path.join(yotta_targets_path, target_dir) - gt_logger.gt_log_tab("inside '%s' found compatible target '%s'"% (gt_logger.gt_bright(target_dir_name), gt_logger.gt_bright(yotta_target_name))) + gt_logger.gt_log_tab( + "inside '%s' found compatible target '%s'" + % (gt_logger.gt_bright(target_dir_name), gt_logger.gt_bright(yotta_target_name)) + ) result.append(yotta_target_name) except IOError as e: gt_logger.gt_log_err(str(e)) return result + def parse_mbed_target_from_target_json(mbed_classic_name, target_json_data): - if (not target_json_data or - 'keywords' not in target_json_data or - 'name' not in target_json_data): + if not target_json_data or "keywords" not in target_json_data or "name" not in target_json_data: return None - for keyword in target_json_data['keywords']: - target, _, name = keyword.partition(':') - if (target == "mbed-target" and - name.lower() == mbed_classic_name.lower()): - return target_json_data['name'] + for keyword in target_json_data["keywords"]: + target, _, name = keyword.partition(":") + if target == "mbed-target" and name.lower() == mbed_classic_name.lower(): + return target_json_data["name"] return None + def get_mbed_targets_from_yotta(mbed_classic_name): """! Function is using 'yotta search' command to fetch matching mbed device target's name @return Function returns list of possible targets or empty list if value not found @@ -277,9 +248,9 @@ def get_mbed_targets_from_yotta(mbed_classic_name): Note: Function prints on console """ result = [] - cmd = ['yotta', '--plain', 'search', '-k', 'mbed-target:%s'% mbed_classic_name.lower().strip(), 'target'] - gt_logger.gt_log("yotta search for mbed-target '%s'"% gt_logger.gt_bright(mbed_classic_name.lower().strip())) - gt_logger.gt_log_tab("calling yotta: %s"% " ".join(cmd)) + cmd = ["yotta", "--plain", "search", "-k", "mbed-target:%s" % mbed_classic_name.lower().strip(), "target"] + gt_logger.gt_log("yotta search for mbed-target '%s'" % gt_logger.gt_bright(mbed_classic_name.lower().strip())) + gt_logger.gt_log_tab("calling yotta: %s" % " ".join(cmd)) _stdout, _stderr, _ret = run_cli_process(cmd) if not _ret: for line in _stdout.splitlines(): @@ -292,13 +263,15 @@ def get_mbed_targets_from_yotta(mbed_classic_name): gt_logger.gt_log_err("calling yotta search failed!") return result + def parse_yotta_search_cmd_output(line): - m = re.search(r'([\w\d-]+) \d+\.\d+\.\d+[$:]?', line) + m = re.search(r"([\w\d-]+) \d+\.\d+\.\d+[$:]?", line) if m and len(m.groups()): yotta_target_name = m.groups()[0] return yotta_target_name return None + def add_target_info_mapping(mbed_classic_name, map_platform_to_yt_target=None, use_yotta_registry=False): """! Adds more target information to TARGET_INFO_MAPPING by searching in yotta registry @return Returns TARGET_INFO_MAPPING updated with new targets @@ -309,7 +282,7 @@ def add_target_info_mapping(mbed_classic_name, map_platform_to_yt_target=None, u # We can also use yotta registry to check for target compatibility (slower) yotta_registry_target_search = get_mbed_targets_from_yotta(mbed_classic_name) yotta_target_search.extend(yotta_registry_target_search) - yotta_target_search = list(set(yotta_target_search)) # Reduce repeated values + yotta_target_search = list(set(yotta_target_search)) # Reduce repeated values # Add extra targets to already existing and detected in the system platforms if map_platform_to_yt_target and mbed_classic_name in map_platform_to_yt_target: @@ -318,44 +291,44 @@ def add_target_info_mapping(mbed_classic_name, map_platform_to_yt_target=None, u # Check if this targets are already there if mbed_classic_name not in TARGET_INFO_MAPPING: TARGET_INFO_MAPPING[mbed_classic_name] = { - "yotta_targets": [], - "properties" : { + "yotta_targets": [], + "properties": { "binary_type": ".bin", "copy_method": "shell", "reset_method": "default", - "program_cycle_s": 6 - } + "program_cycle_s": 6, + }, } target_desc = TARGET_INFO_MAPPING[mbed_classic_name] - if 'yotta_targets' not in target_desc: + if "yotta_targets" not in target_desc: return TARGET_INFO_MAPPING # All yt targets supported by 'mbed_classic_name' board mbeds_yt_targets = [] - for target in target_desc['yotta_targets']: - mbeds_yt_targets.append(target['yotta_target']) + for target in target_desc["yotta_targets"]: + mbeds_yt_targets.append(target["yotta_target"]) # Check if any of yotta targets is new to TARGET_INFO_MAPPING for new_yt_target in yotta_target_search: if new_yt_target in mbeds_yt_targets: continue - gt_logger.gt_log_tab("discovered extra target '%s'"% new_yt_target) + gt_logger.gt_log_tab("discovered extra target '%s'" % new_yt_target) # We want to at least guess toolchain type by target's name suffix - mbed_toolchain = 'UNKNOWN' + mbed_toolchain = "UNKNOWN" for toolchain_suffix in TARGET_TOOLCAHINS: if new_yt_target.endswith(toolchain_suffix): mbed_toolchain = TARGET_TOOLCAHINS[toolchain_suffix] break - TARGET_INFO_MAPPING[mbed_classic_name]['yotta_targets'].append({ - 'yotta_target': new_yt_target, - 'mbed_toolchain': mbed_toolchain - }) + TARGET_INFO_MAPPING[mbed_classic_name]["yotta_targets"].append( + {"yotta_target": new_yt_target, "mbed_toolchain": mbed_toolchain} + ) return TARGET_INFO_MAPPING + def get_mbed_clasic_target_info(mbed_classic_name, map_platform_to_yt_target=None, use_yotta_registry=False): """! Function resolves meta-data information about target given as mbed classic name. @param mbed_classic_name Mbed classic (mbed 2.0) name e.g. K64F, LPC1768 etc. @@ -366,6 +339,7 @@ def get_mbed_clasic_target_info(mbed_classic_name, map_platform_to_yt_target=Non TARGET_INFO_MAPPING = add_target_info_mapping(mbed_classic_name, map_platform_to_yt_target, use_yotta_registry) return TARGET_INFO_MAPPING[mbed_classic_name] if mbed_classic_name in TARGET_INFO_MAPPING else None + def get_binary_type_for_platform(platform): """ Gives binary type for the given platform. @@ -373,8 +347,9 @@ def get_binary_type_for_platform(platform): :param platform: :return: """ - #return TARGET_INFO_MAPPING[platform]['properties']["binary_type"] - return get_platform_property(platform, 'binary_type') + # return TARGET_INFO_MAPPING[platform]['properties']["binary_type"] + return get_platform_property(platform, "binary_type") + def get_platform_property(platform, property): """ @@ -385,8 +360,7 @@ def get_platform_property(platform, property): """ default = _get_platform_property_from_default(property) - from_targets_json = _get_platform_property_from_targets( - platform, property, default) + from_targets_json = _get_platform_property_from_targets(platform, property, default) if from_targets_json: return from_targets_json from_info_mapping = _get_platform_property_from_info_mapping(platform, property) @@ -394,13 +368,16 @@ def get_platform_property(platform, property): return from_info_mapping return default + def _get_platform_property_from_default(property): with suppress(KeyError): - return TARGET_INFO_MAPPING['default'][property] + return TARGET_INFO_MAPPING["default"][property] + def _get_platform_property_from_info_mapping(platform, property): with suppress(KeyError): - return TARGET_INFO_MAPPING[platform]['properties'][property] + return TARGET_INFO_MAPPING[platform]["properties"][property] + def _platform_property_from_targets_json(targets, platform, property, default): """! Get a platforms's property from the target data structure in @@ -415,22 +392,25 @@ def _platform_property_from_targets_json(targets, platform, property, default): with suppress(KeyError): return targets[platform][property] with suppress(KeyError): - for inherited_target in targets[platform]['inherits']: + for inherited_target in targets[platform]["inherits"]: result = _platform_property_from_targets_json(targets, inherited_target, property, None) if result: return result if platform in targets: return default -IGNORED_DIRS = ['.build', 'BUILD', 'tools'] + +IGNORED_DIRS = [".build", "BUILD", "tools"] + def _find_targets_json(path): for root, dirs, files in walk(path, followlinks=True): for ignored_dir in IGNORED_DIRS: if ignored_dir in dirs: dirs.remove(ignored_dir) - if 'targets.json' in files: - yield os.path.join(root, 'targets.json') + if "targets.json" in files: + yield os.path.join(root, "targets.json") + def _get_platform_property_from_targets(platform, property, default): """ @@ -441,7 +421,7 @@ def _get_platform_property_from_targets(platform, property, default): """ for targets_path in _find_targets_json(os.getcwd()): with suppress(IOError, ValueError): - with open(targets_path, 'r') as f: + with open(targets_path, "r") as f: targets = json.load(f) result = _platform_property_from_targets_json(targets, platform, property, default) if result: diff --git a/tools/python/mbed_os_tools/test/mbed_test_api.py b/tools/python/mbed_os_tools/test/mbed_test_api.py index a4204f2af76..412dc895f74 100644 --- a/tools/python/mbed_os_tools/test/mbed_test_api.py +++ b/tools/python/mbed_os_tools/test/mbed_test_api.py @@ -29,7 +29,6 @@ from .tests_spec import TestSpec - # Return codes for test script TEST_RESULT_OK = "OK" TEST_RESULT_FAIL = "FAIL" @@ -45,41 +44,44 @@ TEST_RESULT_BUILD_FAILED = "BUILD_FAILED" TEST_RESULT_SYNC_FAILED = "SYNC_FAILED" -TEST_RESULTS = [TEST_RESULT_OK, - TEST_RESULT_FAIL, - TEST_RESULT_ERROR, - TEST_RESULT_SKIPPED, - TEST_RESULT_UNDEF, - TEST_RESULT_IOERR_COPY, - TEST_RESULT_IOERR_DISK, - TEST_RESULT_IOERR_SERIAL, - TEST_RESULT_TIMEOUT, - TEST_RESULT_NO_IMAGE, - TEST_RESULT_MBED_ASSERT, - TEST_RESULT_BUILD_FAILED, - TEST_RESULT_SYNC_FAILED - ] - -TEST_RESULT_MAPPING = {"success" : TEST_RESULT_OK, - "failure" : TEST_RESULT_FAIL, - "error" : TEST_RESULT_ERROR, - "skipped" : TEST_RESULT_SKIPPED, - "end" : TEST_RESULT_UNDEF, - "ioerr_copy" : TEST_RESULT_IOERR_COPY, - "ioerr_disk" : TEST_RESULT_IOERR_DISK, - "ioerr_serial" : TEST_RESULT_IOERR_SERIAL, - "timeout" : TEST_RESULT_TIMEOUT, - "no_image" : TEST_RESULT_NO_IMAGE, - "mbed_assert" : TEST_RESULT_MBED_ASSERT, - "build_failed" : TEST_RESULT_BUILD_FAILED, - "sync_failed" : TEST_RESULT_SYNC_FAILED - } +TEST_RESULTS = [ + TEST_RESULT_OK, + TEST_RESULT_FAIL, + TEST_RESULT_ERROR, + TEST_RESULT_SKIPPED, + TEST_RESULT_UNDEF, + TEST_RESULT_IOERR_COPY, + TEST_RESULT_IOERR_DISK, + TEST_RESULT_IOERR_SERIAL, + TEST_RESULT_TIMEOUT, + TEST_RESULT_NO_IMAGE, + TEST_RESULT_MBED_ASSERT, + TEST_RESULT_BUILD_FAILED, + TEST_RESULT_SYNC_FAILED, +] + +TEST_RESULT_MAPPING = { + "success": TEST_RESULT_OK, + "failure": TEST_RESULT_FAIL, + "error": TEST_RESULT_ERROR, + "skipped": TEST_RESULT_SKIPPED, + "end": TEST_RESULT_UNDEF, + "ioerr_copy": TEST_RESULT_IOERR_COPY, + "ioerr_disk": TEST_RESULT_IOERR_DISK, + "ioerr_serial": TEST_RESULT_IOERR_SERIAL, + "timeout": TEST_RESULT_TIMEOUT, + "no_image": TEST_RESULT_NO_IMAGE, + "mbed_assert": TEST_RESULT_MBED_ASSERT, + "build_failed": TEST_RESULT_BUILD_FAILED, + "sync_failed": TEST_RESULT_SYNC_FAILED, +} # This value is used to tell caller than run_host_test function failed while invoking mbedhtrun # Just a value greater than zero RUN_HOST_TEST_POPEN_ERROR = 1729 + def get_test_result(output): """! Parse test 'output' data @details If test result not found returns by default TEST_RESULT_TIMEOUT value @@ -103,15 +105,14 @@ def run_command(cmd): @return Value returned by subprocess.Popen, if failed return None """ try: - p = Popen(cmd, - stdout=PIPE, - stderr=STDOUT) + p = Popen(cmd, stdout=PIPE, stderr=STDOUT) except OSError as e: gt_logger.gt_log_err("run_host_test.run_command(%s) failed!" % str(cmd)) gt_logger.gt_log_tab(str(e)) return None return p + def run_htrun(cmd, verbose): # detect overflow when running tests htrun_output = str() @@ -121,9 +122,9 @@ def run_htrun(cmd, verbose): # int value > 0 notifies caller that starting of host test process failed return RUN_HOST_TEST_POPEN_ERROR - htrun_failure_line = re.compile(r'\[RXD\] (:\d+::FAIL: .*)') + htrun_failure_line = re.compile(r"\[RXD\] (:\d+::FAIL: .*)") - for line in iter(p.stdout.readline, b''): + for line in iter(p.stdout.readline, b""): decoded_line = line.decode("utf-8", "replace") htrun_output += decoded_line # When dumping output to file both \r and \n will be a new line @@ -134,7 +135,7 @@ def run_htrun(cmd, verbose): gt_logger.gt_log_err(test_error.group(1)) if verbose: - output = decoded_line.rstrip() + '\n' + output = decoded_line.rstrip() + "\n" try: # Try to output decoded unicode. Should be fine in most Python 3 # environments. @@ -154,24 +155,25 @@ def run_htrun(cmd, verbose): returncode = p.wait() return returncode, htrun_output + def get_testcase_count_and_names(output): - """ Fetches from log utest events with test case count (__testcase_count) and test case names (__testcase_name)* - - @details - Example test case count + names prints - [1467197417.34][HTST][INF] host test detected: default_auto - [1467197417.36][CONN][RXD] {{__testcase_count;2}} - [1467197417.36][CONN][INF] found KV pair in stream: {{__testcase_count;2}}, queued... - [1467197417.39][CONN][RXD] >>> Running 2 test cases... - [1467197417.43][CONN][RXD] {{__testcase_name;C strings: strtok}} - [1467197417.43][CONN][INF] found KV pair in stream: {{__testcase_name;C strings: strtok}}, queued... - [1467197417.47][CONN][RXD] {{__testcase_name;C strings: strpbrk}} - [1467197417.47][CONN][INF] found KV pair in stream: {{__testcase_name;C strings: strpbrk}}, queued... - [1467197417.52][CONN][RXD] >>> Running case #1: 'C strings: strtok'... - [1467197417.56][CONN][RXD] {{__testcase_start;C strings: strtok}} - [1467197417.56][CONN][INF] found KV pair in stream: {{__testcase_start;C strings: strtok}}, queued... - - @return Tuple with (test case count, list of test case names in order of appearance) + """Fetches from log utest events with test case count (__testcase_count) and test case names (__testcase_name)* + + @details + Example test case count + names prints + [1467197417.34][HTST][INF] host test detected: default_auto + [1467197417.36][CONN][RXD] {{__testcase_count;2}} + [1467197417.36][CONN][INF] found KV pair in stream: {{__testcase_count;2}}, queued... + [1467197417.39][CONN][RXD] >>> Running 2 test cases... + [1467197417.43][CONN][RXD] {{__testcase_name;C strings: strtok}} + [1467197417.43][CONN][INF] found KV pair in stream: {{__testcase_name;C strings: strtok}}, queued... + [1467197417.47][CONN][RXD] {{__testcase_name;C strings: strpbrk}} + [1467197417.47][CONN][INF] found KV pair in stream: {{__testcase_name;C strings: strpbrk}}, queued... + [1467197417.52][CONN][RXD] >>> Running case #1: 'C strings: strtok'... + [1467197417.56][CONN][RXD] {{__testcase_start;C strings: strtok}} + [1467197417.56][CONN][INF] found KV pair in stream: {{__testcase_start;C strings: strtok}}, queued... + + @return Tuple with (test case count, list of test case names in order of appearance) """ testcase_count = 0 testcase_names = [] @@ -180,7 +182,6 @@ def get_testcase_count_and_names(output): re_tc_names = re.compile(r"^\[(\d+\.\d+)\]\[(\w+)\]\[(\w+)\].*\{\{(__testcase_name);([^;]+)\}\}") for line in output.splitlines(): - m = re_tc_names.search(line) if m: testcase_names.append(m.group(5)) @@ -192,20 +193,21 @@ def get_testcase_count_and_names(output): return (testcase_count, testcase_names) + def get_testcase_utest(output, test_case_name): - """ Fetches from log all prints for given utest test case (from being print to end print) - - @details - Example test case prints - [1455553765.52][CONN][RXD] >>> Running case #1: 'Simple Test'... - [1455553765.52][CONN][RXD] {{__testcase_start;Simple Test}} - [1455553765.52][CONN][INF] found KV pair in stream: {{__testcase_start;Simple Test}}, queued... - [1455553765.58][CONN][RXD] Simple test called - [1455553765.58][CONN][RXD] {{__testcase_finish;Simple Test;1;0}} - [1455553765.58][CONN][INF] found KV pair in stream: {{__testcase_finish;Simple Test;1;0}}, queued... - [1455553765.70][CONN][RXD] >>> 'Simple Test': 1 passed, 0 failed - - @return log lines between start and end test case print + """Fetches from log all prints for given utest test case (from being print to end print) + + @details + Example test case prints + [1455553765.52][CONN][RXD] >>> Running case #1: 'Simple Test'... + [1455553765.52][CONN][RXD] {{__testcase_start;Simple Test}} + [1455553765.52][CONN][INF] found KV pair in stream: {{__testcase_start;Simple Test}}, queued... + [1455553765.58][CONN][RXD] Simple test called + [1455553765.58][CONN][RXD] {{__testcase_finish;Simple Test;1;0}} + [1455553765.58][CONN][INF] found KV pair in stream: {{__testcase_finish;Simple Test;1;0}}, queued... + [1455553765.70][CONN][RXD] >>> 'Simple Test': 1 passed, 0 failed + + @return log lines between start and end test case print """ # Return string with all non-alphanumerics backslashed; @@ -213,12 +215,15 @@ def get_testcase_utest(output, test_case_name): # string that may have regular expression metacharacters in it. escaped_test_case_name = re.escape(test_case_name) - re_tc_utest_log_start = re.compile(r"^\[(\d+\.\d+)\]\[(\w+)\]\[(\w+)\] >>> Running case #(\d)+: '(%s)'"% escaped_test_case_name) - re_tc_utest_log_finish = re.compile(r"^\[(\d+\.\d+)\]\[(\w+)\]\[(\w+)\] >>> '(%s)': (\d+) passed, (\d+) failed"% escaped_test_case_name) + re_tc_utest_log_start = re.compile( + r"^\[(\d+\.\d+)\]\[(\w+)\]\[(\w+)\] >>> Running case #(\d)+: '(%s)'" % escaped_test_case_name + ) + re_tc_utest_log_finish = re.compile( + r"^\[(\d+\.\d+)\]\[(\w+)\]\[(\w+)\] >>> '(%s)': (\d+) passed, (\d+) failed" % escaped_test_case_name + ) tc_log_lines = [] for line in output.splitlines(): - # utest test case start string search m = re_tc_utest_log_start.search(line) if m: @@ -237,6 +242,7 @@ def get_testcase_utest(output, test_case_name): return tc_log_lines + def get_coverage_data(build_path, output): # Example GCOV output # [1456840876.73][CONN][RXD] {{__coverage_start;c:\Work\core-util/source/PoolAllocator.cpp.gcda;6164636772393034c2733f32...a33e...b9}} @@ -251,18 +257,20 @@ def get_coverage_data(build_path, output): coverage_dump_file(build_path, gcov_path, bin_gcov_payload) except Exception as e: gt_logger.gt_log_err("error while handling GCOV data: " + str(e)) - gt_logger.gt_log_tab("storing %d bytes in '%s'"% (len(bin_gcov_payload), gcov_path)) + gt_logger.gt_log_tab("storing %d bytes in '%s'" % (len(bin_gcov_payload), gcov_path)) + def get_printable_string(unprintable_string): return "".join(filter(lambda x: x in string.printable, unprintable_string)) + def get_testcase_summary(output): """! Searches for test case summary - String to find: - [1459246276.95][CONN][INF] found KV pair in stream: {{__testcase_summary;7;1}}, queued... + String to find: + [1459246276.95][CONN][INF] found KV pair in stream: {{__testcase_summary;7;1}}, queued... - @return Tuple of (passed, failed) or None if no summary found + @return Tuple of (passed, failed) or None if no summary found """ re_tc_summary = re.compile(r"^\[(\d+\.\d+)\][^\{]+\{\{(__testcase_summary);(\d+);(\d+)\}\}") for line in output.splitlines(): @@ -272,6 +280,7 @@ def get_testcase_summary(output): return int(passes), int(failures) return None + def get_testcase_result(output): result_test_cases = {} # Test cases results re_tc_start = re.compile(r"^\[(\d+\.\d+)\][^\{]+\{\{(__testcase_start);([^;]+)\}\}") @@ -284,16 +293,16 @@ def get_testcase_result(output): result_test_cases[testcase_id] = {} # Data collected when __testcase_start is fetched - result_test_cases[testcase_id]['time_start'] = float(timestamp) - result_test_cases[testcase_id]['utest_log'] = get_testcase_utest(output, testcase_id) + result_test_cases[testcase_id]["time_start"] = float(timestamp) + result_test_cases[testcase_id]["utest_log"] = get_testcase_utest(output, testcase_id) # Data collected when __testcase_finish is fetched - result_test_cases[testcase_id]['duration'] = 0.0 - result_test_cases[testcase_id]['result_text'] = 'ERROR' - result_test_cases[testcase_id]['time_end'] = float(timestamp) - result_test_cases[testcase_id]['passed'] = 0 - result_test_cases[testcase_id]['failed'] = 0 - result_test_cases[testcase_id]['result'] = -4096 + result_test_cases[testcase_id]["duration"] = 0.0 + result_test_cases[testcase_id]["result_text"] = "ERROR" + result_test_cases[testcase_id]["time_end"] = float(timestamp) + result_test_cases[testcase_id]["passed"] = 0 + result_test_cases[testcase_id]["failed"] = 0 + result_test_cases[testcase_id]["result"] = -4096 continue m = re_tc_finish.search(line) @@ -303,32 +312,34 @@ def get_testcase_result(output): testcase_passed = int(testcase_passed) testcase_failed = int(testcase_failed) - testcase_result = 0 # OK case + testcase_result = 0 # OK case if testcase_failed != 0: - testcase_result = testcase_failed # testcase_result > 0 is FAILure + testcase_result = testcase_failed # testcase_result > 0 is FAILure if testcase_id not in result_test_cases: result_test_cases[testcase_id] = {} # Setting some info about test case itself - result_test_cases[testcase_id]['duration'] = 0.0 - result_test_cases[testcase_id]['result_text'] = 'OK' - result_test_cases[testcase_id]['time_end'] = float(timestamp) - result_test_cases[testcase_id]['passed'] = testcase_passed - result_test_cases[testcase_id]['failed'] = testcase_failed - result_test_cases[testcase_id]['result'] = testcase_result + result_test_cases[testcase_id]["duration"] = 0.0 + result_test_cases[testcase_id]["result_text"] = "OK" + result_test_cases[testcase_id]["time_end"] = float(timestamp) + result_test_cases[testcase_id]["passed"] = testcase_passed + result_test_cases[testcase_id]["failed"] = testcase_failed + result_test_cases[testcase_id]["result"] = testcase_result # Assign human readable test case result if testcase_result > 0: - result_test_cases[testcase_id]['result_text'] = 'FAIL' + result_test_cases[testcase_id]["result_text"] = "FAIL" elif testcase_result < 0: - result_test_cases[testcase_id]['result_text'] = 'ERROR' + result_test_cases[testcase_id]["result_text"] = "ERROR" - if 'time_start' in result_test_cases[testcase_id]: - result_test_cases[testcase_id]['duration'] = result_test_cases[testcase_id]['time_end'] - result_test_cases[testcase_id]['time_start'] + if "time_start" in result_test_cases[testcase_id]: + result_test_cases[testcase_id]["duration"] = ( + result_test_cases[testcase_id]["time_end"] - result_test_cases[testcase_id]["time_start"] + ) else: - result_test_cases[testcase_id]['duration'] = 0.0 + result_test_cases[testcase_id]["duration"] = 0.0 - if 'utest_log' not in result_test_cases[testcase_id]: - result_test_cases[testcase_id]['utest_log'] = "__testcase_start tag not found." + if "utest_log" not in result_test_cases[testcase_id]: + result_test_cases[testcase_id]["utest_log"] = "__testcase_start tag not found." ### Adding missing test cases which were defined with __testcase_name # Get test case names reported by utest + test case names @@ -339,26 +350,27 @@ def get_testcase_result(output): if testcase_id not in result_test_cases: result_test_cases[testcase_id] = {} # Data collected when __testcase_start is fetched - result_test_cases[testcase_id]['time_start'] = 0.0 - result_test_cases[testcase_id]['utest_log'] = [] + result_test_cases[testcase_id]["time_start"] = 0.0 + result_test_cases[testcase_id]["utest_log"] = [] # Data collected when __testcase_finish is fetched - result_test_cases[testcase_id]['duration'] = 0.0 - result_test_cases[testcase_id]['result_text'] = 'SKIPPED' - result_test_cases[testcase_id]['time_end'] = 0.0 - result_test_cases[testcase_id]['passed'] = 0 - result_test_cases[testcase_id]['failed'] = 0 - result_test_cases[testcase_id]['result'] = -8192 + result_test_cases[testcase_id]["duration"] = 0.0 + result_test_cases[testcase_id]["result_text"] = "SKIPPED" + result_test_cases[testcase_id]["time_end"] = 0.0 + result_test_cases[testcase_id]["passed"] = 0 + result_test_cases[testcase_id]["failed"] = 0 + result_test_cases[testcase_id]["result"] = -8192 return result_test_cases + def get_memory_metrics(output): """! Searches for test case memory metrics - String to find: - [1477505660.40][CONN][INF] found KV pair in stream: {{max_heap_usage;2284}}, queued... + String to find: + [1477505660.40][CONN][INF] found KV pair in stream: {{max_heap_usage;2284}}, queued... - @return Tuple of (max heap usage, thread info list), where thread info list - is a list of dictionaries with format {entry, arg, max_stack, stack_size} + @return Tuple of (max heap usage, thread info list), where thread info list + is a list of dictionaries with format {entry, arg, max_stack, stack_size} """ max_heap_usage = None reserved_heap = None @@ -382,69 +394,75 @@ def get_memory_metrics(output): _, _, thread_entry_arg, thread_max_stack, thread_stack_size = m.groups() thread_max_stack = int(thread_max_stack) thread_stack_size = int(thread_stack_size) - thread_entry_arg_split = thread_entry_arg.split('-') + thread_entry_arg_split = thread_entry_arg.split("-") thread_entry = thread_entry_arg_split[0] thread_info[thread_entry_arg] = { - 'entry': thread_entry, - 'max_stack': thread_max_stack, - 'stack_size': thread_stack_size + "entry": thread_entry, + "max_stack": thread_max_stack, + "stack_size": thread_stack_size, } if len(thread_entry_arg_split) > 1: thread_arg = thread_entry_arg_split[1] - thread_info[thread_entry_arg]['arg'] = thread_arg + thread_info[thread_entry_arg]["arg"] = thread_arg thread_info_list = list(thread_info.values()) return max_heap_usage, reserved_heap, thread_info_list + def get_thread_with_max_stack_size(thread_stack_info): max_thread_stack_size = 0 max_thread = None max_stack_usage_total = 0 reserved_stack_total = 0 for cur_thread_stack_info in thread_stack_info: - if cur_thread_stack_info['stack_size'] > max_thread_stack_size: - max_thread_stack_size = cur_thread_stack_info['stack_size'] + if cur_thread_stack_info["stack_size"] > max_thread_stack_size: + max_thread_stack_size = cur_thread_stack_info["stack_size"] max_thread = cur_thread_stack_info - max_stack_usage_total += cur_thread_stack_info['max_stack'] - reserved_stack_total += cur_thread_stack_info['stack_size'] - max_thread['max_stack_usage_total'] = max_stack_usage_total - max_thread['reserved_stack_total'] = reserved_stack_total + max_stack_usage_total += cur_thread_stack_info["max_stack"] + reserved_stack_total += cur_thread_stack_info["stack_size"] + max_thread["max_stack_usage_total"] = max_stack_usage_total + max_thread["reserved_stack_total"] = reserved_stack_total return max_thread -def get_thread_stack_info_summary(thread_stack_info): +def get_thread_stack_info_summary(thread_stack_info): max_thread_info = get_thread_with_max_stack_size(thread_stack_info) summary = { - 'max_stack_size': max_thread_info['stack_size'], - 'max_stack_usage': max_thread_info['max_stack'], - 'max_stack_usage_total': max_thread_info['max_stack_usage_total'], - 'reserved_stack_total': max_thread_info['reserved_stack_total'] + "max_stack_size": max_thread_info["stack_size"], + "max_stack_usage": max_thread_info["max_stack"], + "max_stack_usage_total": max_thread_info["max_stack_usage_total"], + "reserved_stack_total": max_thread_info["reserved_stack_total"], } return summary -def log_mbed_devices_in_table(muts, cols = ['platform_name', 'platform_name_unique', 'serial_port', 'mount_point', 'target_id']): + +def log_mbed_devices_in_table( + muts, cols=["platform_name", "platform_name_unique", "serial_port", "mount_point", "target_id"] +): """! Print table of muts using prettytable @param muts List of MUTs to print in table @param cols Columns used to for a table, required for each mut @return string with formatted prettytable """ from prettytable import PrettyTable, HEADER + pt = PrettyTable(cols, junction_char="|", hrules=HEADER) for col in cols: pt.align[col] = "l" - pt.padding_width = 1 # One space between column edges and contents (default) + pt.padding_width = 1 # One space between column edges and contents (default) row = [] for mut in muts: for col in cols: - cell_val = mut[col] if col in mut else 'not detected' + cell_val = mut[col] if col in mut else "not detected" row.append(cell_val) pt.add_row(row) row = [] return pt.get_string() + def get_test_spec(opts): """! Closure encapsulating how we get test specification and load it from file of from yotta module @return Returns tuple of (test specification, ret code). Test specification == None if test spec load was not successful @@ -464,7 +482,12 @@ def get_all_test_specs_from_build_dir(path_to_scan): @param path_to_scan Directory path used to recursively search for test_spec.json @result List of locations of test_spec.json """ - return [os.path.join(dp, f) for dp, dn, filenames in os.walk(path_to_scan) for f in filenames if f == 'test_spec.json'] + return [ + os.path.join(dp, f) + for dp, dn, filenames in os.walk(path_to_scan) + for f in filenames + if f == "test_spec.json" + ] def merge_multiple_test_specifications_from_file_list(test_spec_file_name_list): """! For each file in test_spec_file_name_list merge all test specifications into one @@ -474,25 +497,25 @@ def merge_multiple_test_specifications_from_file_list(test_spec_file_name_list): def copy_builds_between_test_specs(source, destination): """! Copies build key-value pairs between two test_spec dicts - @param source Source dictionary - @param destination Dictionary with will be applied with 'builds' key-values - @return Dictionary with merged source + @param source Source dictionary + @param destination Dictionary with will be applied with 'builds' key-values + @return Dictionary with merged source """ result = destination.copy() - if 'builds' in source and 'builds' in destination: - for k in source['builds']: - result['builds'][k] = source['builds'][k] + if "builds" in source and "builds" in destination: + for k in source["builds"]: + result["builds"][k] = source["builds"][k] return result merged_test_spec = {} for test_spec_file in test_spec_file_name_list: - gt_logger.gt_log_tab("using '%s'"% test_spec_file) + gt_logger.gt_log_tab("using '%s'" % test_spec_file) try: - with open(test_spec_file, 'r') as f: + with open(test_spec_file, "r") as f: test_spec_data = json.load(f) merged_test_spec = copy_builds_between_test_specs(merged_test_spec, test_spec_data) except Exception as e: - gt_logger.gt_log_err("Unexpected error while processing '%s' test specification file"% test_spec_file) + gt_logger.gt_log_err("Unexpected error while processing '%s' test specification file" % test_spec_file) gt_logger.gt_log_tab(str(e)) merged_test_spec = {} @@ -503,30 +526,30 @@ def copy_builds_between_test_specs(source, destination): # Test specification look-up if opts.test_spec: # Loading test specification from command line specified file - gt_logger.gt_log("test specification file '%s' (specified with --test-spec option)"% opts.test_spec) - elif os.path.exists('test_spec.json'): + gt_logger.gt_log("test specification file '%s' (specified with --test-spec option)" % opts.test_spec) + elif os.path.exists("test_spec.json"): # Test specification file exists in current directory gt_logger.gt_log("using 'test_spec.json' from current directory!") - test_spec_file_name = 'test_spec.json' - elif 'BUILD' in os.listdir(os.getcwd()): + test_spec_file_name = "test_spec.json" + elif "BUILD" in os.listdir(os.getcwd()): # Checking 'BUILD' directory for test specifications # Using `os.listdir()` since it preserves case - test_spec_file_name_list = get_all_test_specs_from_build_dir('BUILD') - elif os.path.exists('.build'): + test_spec_file_name_list = get_all_test_specs_from_build_dir("BUILD") + elif os.path.exists(".build"): # Checking .build directory for test specifications - test_spec_file_name_list = get_all_test_specs_from_build_dir('.build') - elif os.path.exists('mbed-os') and 'BUILD' in os.listdir('mbed-os'): + test_spec_file_name_list = get_all_test_specs_from_build_dir(".build") + elif os.path.exists("mbed-os") and "BUILD" in os.listdir("mbed-os"): # Checking mbed-os/.build directory for test specifications # Using `os.listdir()` since it preserves case - test_spec_file_name_list = get_all_test_specs_from_build_dir(os.path.join(['mbed-os', 'BUILD'])) - elif os.path.exists(os.path.join('mbed-os', '.build')): + test_spec_file_name_list = get_all_test_specs_from_build_dir(os.path.join(["mbed-os", "BUILD"])) + elif os.path.exists(os.path.join("mbed-os", ".build")): # Checking mbed-os/.build directory for test specifications - test_spec_file_name_list = get_all_test_specs_from_build_dir(os.path.join(['mbed-os', '.build'])) + test_spec_file_name_list = get_all_test_specs_from_build_dir(os.path.join(["mbed-os", ".build"])) # Actual load and processing of test specification from sources if test_spec_file_name: # Test specification from command line (--test-spec) or default test_spec.json will be used - gt_logger.gt_log("using '%s' from current directory!"% test_spec_file_name) + gt_logger.gt_log("using '%s' from current directory!" % test_spec_file_name) test_spec = TestSpec(test_spec_file_name) if opts.list_binaries: list_binaries_for_builds(test_spec) @@ -538,7 +561,7 @@ def copy_builds_between_test_specs(source, destination): if opts.list_binaries: list_binaries_for_builds(test_spec) return None, 0 - elif os.path.exists('module.json'): + elif os.path.exists("module.json"): # If inside yotta module load module data and generate test spec gt_logger.gt_log("using 'module.json' from current directory!") if opts.list_binaries: @@ -552,26 +575,28 @@ def copy_builds_between_test_specs(source, destination): return None, -1 return test_spec, 0 + def get_test_build_properties(test_spec, test_build_name): result = dict() test_builds = test_spec.get_test_builds(filter_by_names=[test_build_name]) if test_builds: test_build = test_builds[0] - result['name'] = test_build.get_name() - result['toolchain'] = test_build.get_toolchain() - result['target'] = test_build.get_platform() + result["name"] = test_build.get_name() + result["toolchain"] = test_build.get_toolchain() + result["target"] = test_build.get_platform() return result else: return None + def parse_global_resource_mgr(global_resource_mgr): """! Parses --grm switch with global resource manager info @details K64F:module_name:10.2.123.43:3334 @return tuple wity four elements from GRM or None if error """ try: - platform_name, module_name, leftover = global_resource_mgr.split(':', 2) - parts = leftover.rsplit(':', 1) + platform_name, module_name, leftover = global_resource_mgr.split(":", 2) + parts = leftover.rsplit(":", 1) try: ip_name, port_name = parts @@ -585,12 +610,13 @@ def parse_global_resource_mgr(global_resource_mgr): return False return platform_name, module_name, ip_name, port_name + def parse_fast_model_connection(fast_model_connection): """! Parses --fm switch with simulator resource manager info @details FVP_MPS2_M3:DEFAULT """ try: - platform_name, config_name = fast_model_connection.split(':') + platform_name, config_name = fast_model_connection.split(":") except ValueError as e: return False return platform_name, config_name diff --git a/tools/python/mbed_os_tools/test/mbed_yotta_api.py b/tools/python/mbed_os_tools/test/mbed_yotta_api.py index fd2258d4686..7a571942be1 100644 --- a/tools/python/mbed_os_tools/test/mbed_yotta_api.py +++ b/tools/python/mbed_os_tools/test/mbed_yotta_api.py @@ -20,26 +20,24 @@ from .mbed_common_api import run_cli_command from .mbed_greentea_log import gt_logger from .mbed_yotta_module_parse import YottaModule, YottaConfig -from .mbed_target_info import ( - get_mbed_target_from_current_dir, get_binary_type_for_platform -) +from .mbed_target_info import get_mbed_target_from_current_dir, get_binary_type_for_platform from .cmake_handlers import load_ctest_testsuite from .tests_spec import TestSpec, TestBuild, Test, TestBinary - class YottaError(Exception): """ Exception raised by this module when it fails to gather test information. """ + pass def build_with_yotta(yotta_target_name, verbose=False, build_to_release=False, build_to_debug=False): - cmd = ["yotta"] # "yotta %s --target=%s,* build" + cmd = ["yotta"] # "yotta %s --target=%s,* build" if verbose: cmd.append("-v") - cmd.append("--target=%s,*"% yotta_target_name) + cmd.append("--target=%s,*" % yotta_target_name) cmd.append("build") if build_to_release: cmd.append("-r") @@ -47,10 +45,10 @@ def build_with_yotta(yotta_target_name, verbose=False, build_to_release=False, b cmd.append("-d") gt_logger.gt_log("building your sources and tests with yotta...") - gt_logger.gt_log_tab("calling yotta: %s"% (" ".join(cmd))) + gt_logger.gt_log_tab("calling yotta: %s" % (" ".join(cmd))) yotta_result, yotta_ret = run_cli_command(cmd, shell=False, verbose=verbose) if yotta_result: - gt_logger.gt_log("yotta build for target '%s' was successful"% gt_logger.gt_bright(yotta_target_name)) + gt_logger.gt_log("yotta build for target '%s' was successful" % gt_logger.gt_bright(yotta_target_name)) else: gt_logger.gt_log_err("yotta build failed!") return yotta_result, yotta_ret @@ -63,35 +61,41 @@ def get_platform_name_from_yotta_target(target): :param target: :return: """ - target_json_path = os.path.join('yotta_targets', target, 'target.json') + target_json_path = os.path.join("yotta_targets", target, "target.json") if not os.path.exists(target_json_path): - gt_logger.gt_log_err('Target json does not exist [%s].\n' % target_json_path + - 'mbed TAS Executor {greentea} must be run inside a pre built yotta module!') + gt_logger.gt_log_err( + "Target json does not exist [%s].\n" % target_json_path + + "mbed TAS Executor {greentea} must be run inside a pre built yotta module!" + ) return None - with open(target_json_path, 'r') as f: + with open(target_json_path, "r") as f: data = f.read() try: target_json = json.loads(data) except (TypeError, ValueError) as e: - gt_logger.gt_log_err('Failed to load json data from target.json! error [%s]\n' % str(e) + - 'Can not determine required mbed platform name!') + gt_logger.gt_log_err( + "Failed to load json data from target.json! error [%s]\n" % str(e) + + "Can not determine required mbed platform name!" + ) return None - if 'keywords' not in target_json: + if "keywords" not in target_json: gt_logger.gt_log_err("No 'keywords' in target.json! Can not determine required mbed platform name!") return None platform_name = None - for keyword in target_json['keywords']: - m = re.search('mbed-target:(.*)', keyword) + for keyword in target_json["keywords"]: + m = re.search("mbed-target:(.*)", keyword) if m is not None: platform_name = m.group(1).upper() if platform_name is None: - gt_logger.gt_log_err('No keyword with format "mbed-target:" found in target.json!\n' + - 'Can not determine required mbed platform name!') + gt_logger.gt_log_err( + 'No keyword with format "mbed-target:" found in target.json!\n' + + "Can not determine required mbed platform name!" + ) return None return platform_name @@ -104,7 +108,7 @@ def get_test_spec_from_yt_module(opts): """ ### Read yotta module basic information yotta_module = YottaModule() - yotta_module.init() # Read actual yotta module data + yotta_module.init() # Read actual yotta module data # Check if NO greentea-client is in module.json of repo to test, if so abort if not yotta_module.check_greentea_client(): @@ -126,42 +130,43 @@ def get_test_spec_from_yt_module(opts): test_spec = TestSpec() ### Selecting yotta targets to process - yt_targets = [] # List of yotta targets specified by user used to process during this run + yt_targets = [] # List of yotta targets specified by user used to process during this run if opts.list_of_targets: - yt_targets = opts.list_of_targets.split(',') + yt_targets = opts.list_of_targets.split(",") else: # Trying to use locally set yotta target gt_logger.gt_log("checking for yotta target in current directory") gt_logger.gt_log_tab("reason: no --target switch set") current_target = get_mbed_target_from_current_dir() if current_target: - gt_logger.gt_log("assuming default target as '%s'"% gt_logger.gt_bright(current_target)) + gt_logger.gt_log("assuming default target as '%s'" % gt_logger.gt_bright(current_target)) # Assuming first target printed by 'yotta search' will be used yt_targets = [current_target] else: gt_logger.gt_log_tab("yotta target in current directory is not set") - gt_logger.gt_log_err("yotta target is not specified. Use '%s' or '%s' command to set target"% - ( - gt_logger.gt_bright('mbedgt -t '), - gt_logger.gt_bright('yotta target ') - )) + gt_logger.gt_log_err( + "yotta target is not specified. Use '%s' or '%s' command to set target" + % (gt_logger.gt_bright("mbedgt -t "), gt_logger.gt_bright("yotta target ")) + ) raise YottaError("Yotta target not set in current directory!") ### Use yotta to search mapping between platform names and available platforms # Convert platform:target, ... mapping to data structure yt_target_to_map_platform = {} if opts.map_platform_to_yt_target: - gt_logger.gt_log("user defined platform -> target supported mapping definition (specified with --map-target switch)") - for mapping in opts.map_platform_to_yt_target.split(','): - if len(mapping.split(':')) == 2: - yt_target, platform = mapping.split(':') + gt_logger.gt_log( + "user defined platform -> target supported mapping definition (specified with --map-target switch)" + ) + for mapping in opts.map_platform_to_yt_target.split(","): + if len(mapping.split(":")) == 2: + yt_target, platform = mapping.split(":") yt_target_to_map_platform[yt_target] = platform - gt_logger.gt_log_tab("mapped yotta target '%s' to be compatible with platform '%s'"% ( - gt_logger.gt_bright(yt_target), - gt_logger.gt_bright(platform) - )) + gt_logger.gt_log_tab( + "mapped yotta target '%s' to be compatible with platform '%s'" + % (gt_logger.gt_bright(yt_target), gt_logger.gt_bright(platform)) + ) else: - gt_logger.gt_log_tab("unknown format '%s', use 'target:platform' format"% mapping) + gt_logger.gt_log_tab("unknown format '%s', use 'target:platform' format" % mapping) for yt_target in yt_targets: if yt_target in yt_target_to_map_platform: @@ -175,13 +180,12 @@ def get_test_spec_from_yt_module(opts): yotta_config = YottaConfig() yotta_config.init(yt_target) baud_rate = yotta_config.get_baudrate() - base_path = os.path.join('.', 'build', yt_target) + base_path = os.path.join(".", "build", yt_target) tb = TestBuild(yt_target, platform, toolchain, baud_rate, base_path) test_spec.add_test_builds(yt_target, tb) # Find tests - ctest_test_list = load_ctest_testsuite(base_path, - binary_type=get_binary_type_for_platform(platform)) + ctest_test_list = load_ctest_testsuite(base_path, binary_type=get_binary_type_for_platform(platform)) for name, path in ctest_test_list.items(): t = Test(name) t.add_binary(path, TestBinary.BIN_TYPE_BOOTABLE) diff --git a/tools/python/mbed_os_tools/test/mbed_yotta_module_parse.py b/tools/python/mbed_os_tools/test/mbed_yotta_module_parse.py index 55a936ea229..97c5acec106 100644 --- a/tools/python/mbed_os_tools/test/mbed_yotta_module_parse.py +++ b/tools/python/mbed_os_tools/test/mbed_yotta_module_parse.py @@ -18,12 +18,11 @@ class YottaConfig(object): - yotta_config = None def __init__(self): - self.BUILD_DIR = 'build' - self.YOTTA_CONFIG_NAME = 'yotta_config.json' + self.BUILD_DIR = "build" + self.YOTTA_CONFIG_NAME = "yotta_config.json" self.DEFAULT_BAUDRATE = 115200 def init(self, target_name): @@ -32,7 +31,7 @@ def init(self, target_name): """ try: path = os.path.join(self.BUILD_DIR, target_name, self.YOTTA_CONFIG_NAME) - with open(path, 'r') as data_file: + with open(path, "r") as data_file: self.yotta_config = json.load(data_file) except IOError as e: self.yotta_config = {} @@ -62,27 +61,26 @@ def get_baudrate(self): }, """ # Get default baudrate for this target - if self.yotta_config and 'mbed-os' in self.yotta_config: - if 'stdio' in self.yotta_config['mbed-os']: - if 'default-baud' in self.yotta_config['mbed-os']['stdio']: - return int(self.yotta_config['mbed-os']['stdio']['default-baud']) + if self.yotta_config and "mbed-os" in self.yotta_config: + if "stdio" in self.yotta_config["mbed-os"]: + if "default-baud" in self.yotta_config["mbed-os"]["stdio"]: + return int(self.yotta_config["mbed-os"]["stdio"]["default-baud"]) return self.DEFAULT_BAUDRATE def get_test_pins(self): - if self.yotta_config and 'hardware' in self.yotta_config: - if 'test-pins' in self.yotta_config['hardware']: - return self.yotta_config['hardware']['test-pins'] + if self.yotta_config and "hardware" in self.yotta_config: + if "test-pins" in self.yotta_config["hardware"]: + return self.yotta_config["hardware"]["test-pins"] return None class YottaModule(object): - __yotta_module = None - __greentea_client = 'greentea-client' + __greentea_client = "greentea-client" def __init__(self): - self.MODULE_PATH = '.' - self.YOTTA_CONFIG_NAME = 'module.json' + self.MODULE_PATH = "." + self.YOTTA_CONFIG_NAME = "module.json" def init(self): """! Loads yotta_module.json as an object from local yotta build directory @@ -94,11 +92,11 @@ def init(self): path = os.path.join(self.MODULE_PATH, self.YOTTA_CONFIG_NAME) if os.path.exists(path): # Load module.json only if it exists - with open(path, 'r') as data_file: + with open(path, "r") as data_file: self.__yotta_module = json.load(data_file) except IOError as e: print("YottaModule: error - %s" % str(e)) - return bool(self.__yotta_module) # bool({}) == False + return bool(self.__yotta_module) # bool({}) == False def set_yotta_module(self, yotta_module): self.__yotta_module = yotta_module @@ -107,14 +105,14 @@ def get_data(self): return self.__yotta_module def get_name(self): - return self.__yotta_module.get('name', 'unknown') + return self.__yotta_module.get("name", "unknown") def check_greentea_client(self): if self.get_name() == self.__greentea_client: return True - dependencies = self.__yotta_module.get('dependencies', False) - testDependencies = self.__yotta_module.get('testDependencies', False) + dependencies = self.__yotta_module.get("dependencies", False) + testDependencies = self.__yotta_module.get("testDependencies", False) if dependencies: if dependencies.get(self.__greentea_client, False): return True diff --git a/tools/python/mbed_os_tools/test/tests_spec.py b/tools/python/mbed_os_tools/test/tests_spec.py index 360fab025dc..0919bb34ad5 100644 --- a/tools/python/mbed_os_tools/test/tests_spec.py +++ b/tools/python/mbed_os_tools/test/tests_spec.py @@ -43,9 +43,9 @@ def __init__(self, path, binary_type, compare_log): :param binary_type: :return: """ - assert binary_type in TestBinary.SUPPORTED_BIN_TYPES, ( - "Binary type %s not supported. Supported types [%s]" - % (binary_type, ", ".join(TestBinary.SUPPORTED_BIN_TYPES)) + assert binary_type in TestBinary.SUPPORTED_BIN_TYPES, "Binary type %s not supported. Supported types [%s]" % ( + binary_type, + ", ".join(TestBinary.SUPPORTED_BIN_TYPES), ) self.__path = path self.__flash_method = binary_type @@ -113,14 +113,12 @@ def parse(self, test_json): assert Test.KW_TEST_BINS in test_json, "Test spec should contain key `binaries`" for binary in test_json[Test.KW_TEST_BINS]: mandatory_keys = [TestBinary.KW_BIN_PATH] - assert set(mandatory_keys).issubset( - set(binary.keys()) - ), "Binary spec should contain key [%s]" % ",".join(mandatory_keys) + assert set(mandatory_keys).issubset(set(binary.keys())), "Binary spec should contain key [%s]" % ",".join( + mandatory_keys + ) fm = binary.get(TestBinary.KW_BIN_TYPE, self.__default_flash_method) assert fm is not None, "Binary type not specified in build and binary spec." - tb = TestBinary(binary[TestBinary.KW_BIN_PATH], - fm, - binary.get(TestBinary.KW_COMP_LOG)) + tb = TestBinary(binary[TestBinary.KW_BIN_PATH], fm, binary.get(TestBinary.KW_COMP_LOG)) self.__binaries_by_flash_method[fm] = tb def add_binary(self, path, binary_type, compare_log=None): @@ -131,9 +129,7 @@ def add_binary(self, path, binary_type, compare_log=None): :param binary_type: :return: """ - self.__binaries_by_flash_method[binary_type] = TestBinary(path, - binary_type, - compare_log) + self.__binaries_by_flash_method[binary_type] = TestBinary(path, binary_type, compare_log) class TestBuild(object): @@ -149,9 +145,7 @@ class for Test build. KW_TESTS = "tests" KW_BIN_TYPE = "binary_type" - def __init__( - self, name, platform, toolchain, baud_rate, base_path, default_flash_method=None - ): + def __init__(self, name, platform, toolchain, baud_rate, base_path, default_flash_method=None): """ ctor. @@ -226,9 +220,7 @@ def parse(self, build_spec): :param build_spec: :return: """ - assert TestBuild.KW_TESTS in build_spec, ( - "Build spec should contain key '%s'" % TestBuild.KW_TESTS - ) + assert TestBuild.KW_TESTS in build_spec, "Build spec should contain key '%s'" % TestBuild.KW_TESTS for name, test_json in build_spec[TestBuild.KW_TESTS].items(): test = Test(name, default_flash_method=self.__default_flash_method) test.parse(test_json) @@ -288,9 +280,7 @@ def parse(self, spec): :param spec: :return: """ - assert TestSpec.KW_BUILDS, ( - "Test spec should contain key '%s'" % TestSpec.KW_BUILDS - ) + assert TestSpec.KW_BUILDS, "Test spec should contain key '%s'" % TestSpec.KW_BUILDS for build_name, build in spec[TestSpec.KW_BUILDS].items(): mandatory_keys = [ TestBuild.KW_PLATFORM, @@ -299,8 +289,7 @@ def parse(self, spec): TestBuild.KW_BUILD_BASE_PATH, ] assert set(mandatory_keys).issubset(set(build.keys())), ( - "Build spec should contain keys [%s]. It has [%s]" - % (",".join(mandatory_keys), ",".join(build.keys())) + "Build spec should contain keys [%s]. It has [%s]" % (",".join(mandatory_keys), ",".join(build.keys())) ) platform = build[TestBuild.KW_PLATFORM] toolchain = build[TestBuild.KW_TOOLCHAIN] diff --git a/tools/python/mbed_platformio/__init__.py b/tools/python/mbed_platformio/__init__.py index 8546dded510..a9d0ccc7eeb 100644 --- a/tools/python/mbed_platformio/__init__.py +++ b/tools/python/mbed_platformio/__init__.py @@ -1,4 +1,4 @@ """ Copyright (c) 2025 Jamie Smith SPDX-License-Identifier: Apache-2.0 -""" \ No newline at end of file +""" diff --git a/tools/python/mbed_platformio/build_mbed_ce.py b/tools/python/mbed_platformio/build_mbed_ce.py index 556c971078c..e8776e8a9fb 100644 --- a/tools/python/mbed_platformio/build_mbed_ce.py +++ b/tools/python/mbed_platformio/build_mbed_ce.py @@ -7,6 +7,7 @@ Copyright (c) 2025 Jamie Smith SPDX-License-Identifier: Apache-2.0 """ + from __future__ import annotations import pathlib @@ -32,7 +33,7 @@ CMAKE_API_QUERY_DIR = CMAKE_API_DIR / "query" CMAKE_API_REPLY_DIR = CMAKE_API_DIR / "reply" -PROJECT_CMAKELISTS_TXT = FRAMEWORK_DIR / "tools" / "python" / "mbed_platformio" / "CMakeLists.txt" +PROJECT_CMAKELISTS_TXT = FRAMEWORK_DIR / "tools" / "python" / "mbed_platformio" / "CMakeLists.txt" PROJECT_MBED_APP_JSON5 = PROJECT_DIR / "mbed_app.json5" PROJECT_TARGET_CONFIG_H = BUILD_DIR / "mbed-os" / "generated-headers" / "mbed-target-config.h" @@ -46,30 +47,31 @@ sys.path.append(str(FRAMEWORK_DIR / "tools" / "python")) from mbed_platformio.pio_variants import PIO_VARIANT_TO_MBED_TARGET -from mbed_platformio.cmake_to_scons_converter import build_library, extract_defines, extract_flags, extract_includes, extract_link_args, find_included_files +from mbed_platformio.cmake_to_scons_converter import ( + build_library, + extract_defines, + extract_flags, + extract_includes, + extract_link_args, + find_included_files, +) + def get_mbed_target(): board_type = env.subst("$BOARD") - variant = ( - PIO_VARIANT_TO_MBED_TARGET[board_type] - if board_type in PIO_VARIANT_TO_MBED_TARGET - else board_type.upper() - ) + variant = PIO_VARIANT_TO_MBED_TARGET[board_type] if board_type in PIO_VARIANT_TO_MBED_TARGET else board_type.upper() return board.get("build.mbed_variant", variant) + def is_proper_mbed_ce_project(): - return all( - path.is_file() - for path in ( - PROJECT_MBED_APP_JSON5, - ) - ) + return all(path.is_file() for path in (PROJECT_MBED_APP_JSON5,)) + def create_default_project_files(): print("Mbed CE: Creating default project files") if not PROJECT_MBED_APP_JSON5.exists(): PROJECT_MBED_APP_JSON5.write_text( -""" + """ { "target_overrides": { "*": { @@ -84,12 +86,10 @@ def create_default_project_files(): """ ) + def is_cmake_reconfigure_required(): cmake_cache_file = BUILD_DIR / "CMakeCache.txt" - cmake_config_files = [ - PROJECT_MBED_APP_JSON5, - PROJECT_CMAKELISTS_TXT - ] + cmake_config_files = [PROJECT_MBED_APP_JSON5, PROJECT_CMAKELISTS_TXT] ninja_buildfile = BUILD_DIR / "build.ninja" if not cmake_cache_file.exists(): @@ -129,7 +129,6 @@ def run_tool(command_and_args: list[str] | None = None): def get_cmake_code_model(cmake_args: list) -> dict: - query_file = CMAKE_API_QUERY_DIR / "codemodel-v2" if not query_file.exists(): @@ -161,6 +160,7 @@ def get_cmake_code_model(cmake_args: list) -> dict: assert codemodel["version"]["major"] == 2 return codemodel + def get_target_config(project_configs: dict, target_index): target_json = project_configs.get("targets")[target_index].get("jsonFile", "") target_config_file = CMAKE_API_REPLY_DIR / target_json @@ -177,15 +177,13 @@ def load_target_configurations(cmake_codemodel: dict) -> dict: project_configs = cmake_codemodel.get("configurations")[0] for config in project_configs.get("projects", []): for target_index in config.get("targetIndexes", []): - target_config = get_target_config( - project_configs, target_index - ) + target_config = get_target_config(project_configs, target_index) configs[target_config["name"]] = target_config return configs -def generate_project_ld_script() -> pathlib.Path: +def generate_project_ld_script() -> pathlib.Path: # Run Ninja to build the target which generates the linker script. # Note that we don't want to use CMake as running it has the side effect of redoing # the file API query. @@ -193,7 +191,7 @@ def generate_project_ld_script() -> pathlib.Path: str(pathlib.Path(platform.get_package_dir("tool-ninja")) / "ninja"), "-C", str(BUILD_DIR), - "mbed-linker-script" + "mbed-linker-script", ] run_tool(cmd) @@ -202,19 +200,19 @@ def generate_project_ld_script() -> pathlib.Path: return next(BUILD_DIR.glob("*.link_script.ld")) -def get_targets_by_type(target_configs: dict, target_types: list[str], ignore_targets: list[str] | None=None) -> list: +def get_targets_by_type(target_configs: dict, target_types: list[str], ignore_targets: list[str] | None = None) -> list: ignore_targets = ignore_targets or [] result = [] for target_config in target_configs.values(): - if ( - target_config["type"] in target_types - and target_config["name"] not in ignore_targets - ): + if target_config["type"] in target_types and target_config["name"] not in ignore_targets: result.append(target_config) return result -def get_components_map(target_configs: dict, target_types: list[str], ignore_components: list[str] | None=None) -> dict: + +def get_components_map( + target_configs: dict, target_types: list[str], ignore_components: list[str] | None = None +) -> dict: result = {} for config in get_targets_by_type(target_configs, target_types, ignore_components): if "nameOnDisk" not in config: @@ -224,17 +222,17 @@ def get_components_map(target_configs: dict, target_types: list[str], ignore_com return result -def build_components( - env: Environment, components_map: dict, project_src_dir: pathlib.Path -): +def build_components(env: Environment, components_map: dict, project_src_dir: pathlib.Path): for k, v in components_map.items(): components_map[k]["lib"] = build_library( env, v["config"], project_src_dir, FRAMEWORK_DIR, pathlib.Path("$BUILD_DIR/mbed-os") ) + def get_app_defines(app_config: dict): return extract_defines(app_config["compileGroups"][0]) + ## CMake configuration ------------------------------------------------------------------------------------------------- project_codemodel = get_cmake_code_model( @@ -245,15 +243,16 @@ def get_app_defines(app_config: dict): BUILD_DIR, "-G", "Ninja", - "-DCMAKE_MAKE_PROGRAM=" + str(NINJA_PATH.as_posix()), # Note: CMake prefers to be passed paths with forward slashes, so use as_posix() + "-DCMAKE_MAKE_PROGRAM=" + + str(NINJA_PATH.as_posix()), # Note: CMake prefers to be passed paths with forward slashes, so use as_posix() "-DCMAKE_BUILD_TYPE=" + CMAKE_BUILD_TYPE, "-DPLATFORMIO_MBED_OS_PATH=" + str(FRAMEWORK_DIR.as_posix()), "-DPLATFORMIO_PROJECT_PATH=" + str(PROJECT_DIR.as_posix()), "-DMBED_TARGET=" + get_mbed_target(), - "-DUPLOAD_METHOD=NONE", # Disable Mbed CE upload method system as PlatformIO has its own + "-DUPLOAD_METHOD=NONE", # Disable Mbed CE upload method system as PlatformIO has its own ] - + click.parser.split_arg_string(board.get("build.cmake_extra_args", "")), - ) + + click.parser.split_arg_string(board.get("build.cmake_extra_args", "")) +) if not project_codemodel: sys.stderr.write("Error: Couldn't find code model generated by CMake\n") @@ -262,11 +261,7 @@ def get_app_defines(app_config: dict): print("Mbed CE: Reading CMake configuration...") target_configs = load_target_configurations(project_codemodel) -framework_components_map = get_components_map( - target_configs, - ["STATIC_LIBRARY", "OBJECT_LIBRARY"], - [], -) +framework_components_map = get_components_map(target_configs, ["STATIC_LIBRARY", "OBJECT_LIBRARY"], []) ## Convert targets & flags from CMake to SCons ------------------------------------------------------------------------- @@ -292,16 +287,13 @@ def get_app_defines(app_config: dict): # The CMake build system adds a flag in mbed_set_post_build() to output a map file. # We need to do that here. -map_file = BUILD_DIR / 'firmware.map' +map_file = BUILD_DIR / "firmware.map" link_args.append(f"-Wl,-Map={str(map_file)}") ## Build environment configuration ------------------------------------------------------------------------------------- env.MergeFlags(project_flags) -env.Prepend( - CPPPATH=app_includes["plain_includes"], - CPPDEFINES=project_defines, -) +env.Prepend(CPPPATH=app_includes["plain_includes"], CPPDEFINES=project_defines) env.Append(_LIBFLAGS=link_args) # Set up a dependency between all application source files and mbed-target-config.h. @@ -318,4 +310,4 @@ def get_app_defines(app_config: dict): env.Depends("$BUILD_DIR/$PROGNAME$PROGSUFFIX", str(project_ld_script)) env.Append(LDSCRIPT_PATH=str(project_ld_script)) -print("Mbed CE: Build environment configured.") \ No newline at end of file +print("Mbed CE: Build environment configured.") diff --git a/tools/python/mbed_platformio/cmake_to_scons_converter.py b/tools/python/mbed_platformio/cmake_to_scons_converter.py index 19e7a02775a..cf920d5b825 100644 --- a/tools/python/mbed_platformio/cmake_to_scons_converter.py +++ b/tools/python/mbed_platformio/cmake_to_scons_converter.py @@ -13,22 +13,20 @@ import pathlib import click + def extract_defines(compile_group: dict) -> list[tuple[str, str]]: def _normalize_define(define_string): define_string = define_string.strip() if "=" in define_string: define, value = define_string.split("=", maxsplit=1) - if any(char in value for char in (' ', '<', '>')): + if any(char in value for char in (" ", "<", ">")): value = f'"{value}"' elif '"' in value and not value.startswith("\\"): value = value.replace('"', '\\"') return define, value return define_string - result = [ - _normalize_define(d.get("define", "")) - for d in compile_group.get("defines", []) if d - ] + result = [_normalize_define(d.get("define", "")) for d in compile_group.get("defines", []) if d] for f in compile_group.get("compileCommandFragments", []): fragment = f.get("fragment", "").strip() @@ -39,6 +37,7 @@ def _normalize_define(define_string): return result + def prepare_build_envs(target_json: dict, default_env: Environment) -> list[Environment]: """ Creates the Scons Environment(s) needed to build the source files in a CMake target @@ -46,9 +45,10 @@ def prepare_build_envs(target_json: dict, default_env: Environment) -> list[Envi build_envs = [] target_compile_groups = target_json.get("compileGroups", []) if not target_compile_groups: - print("Warning! The `%s` component doesn't register any source files. " - "Check if sources are set in component's CMakeLists.txt!" % target_json["name"] - ) + print( + "Warning! The `%s` component doesn't register any source files. " + "Check if sources are set in component's CMakeLists.txt!" % target_json["name"] + ) for cg in target_compile_groups: includes = [] @@ -73,8 +73,14 @@ def prepare_build_envs(target_json: dict, default_env: Environment) -> list[Envi return build_envs + def compile_source_files( - config: dict, default_env: Environment, project_src_dir: pathlib.Path, framework_dir: pathlib.Path, framework_obj_dir: pathlib.Path) -> list: + config: dict, + default_env: Environment, + project_src_dir: pathlib.Path, + framework_dir: pathlib.Path, + framework_obj_dir: pathlib.Path, +) -> list: """ Generates SCons rules to compile the source files in a target. Returns list of object files to build. @@ -89,7 +95,6 @@ def compile_source_files( continue compile_group_idx = source.get("compileGroupIndex") if compile_group_idx is not None: - # Get absolute path to source, resolving relative to source dir if needed src_path = pathlib.Path(source.get("path")) if not src_path.is_absolute(): @@ -101,7 +106,9 @@ def compile_source_files( elif src_path.is_relative_to(framework_dir): obj_path = (framework_obj_dir / src_path.relative_to(framework_dir)).with_suffix(".o") else: - raise RuntimeError(f"Source path {src_path!s} outside of project source dir and framework dir, don't know where to save object file!") + raise RuntimeError( + f"Source path {src_path!s} outside of project source dir and framework dir, don't know where to save object file!" + ) env = build_envs[compile_group_idx] @@ -112,23 +119,24 @@ def compile_source_files( for included_file in find_included_files(env): env.Depends(str(obj_path), included_file) - return objects + def build_library( - default_env: Environment, lib_config: dict, project_src_dir: pathlib.Path, framework_dir: pathlib.Path, framework_obj_dir: pathlib.Path + default_env: Environment, + lib_config: dict, + project_src_dir: pathlib.Path, + framework_dir: pathlib.Path, + framework_obj_dir: pathlib.Path, ): lib_name = lib_config["nameOnDisk"] lib_path = lib_config["paths"]["build"] - lib_objects = compile_source_files( - lib_config, default_env, project_src_dir, framework_dir, framework_obj_dir - ) + lib_objects = compile_source_files(lib_config, default_env, project_src_dir, framework_dir, framework_obj_dir) - #print(f"Created build rule for " + str(pathlib.Path("$BUILD_DIR") / lib_path / lib_name)) + # print(f"Created build rule for " + str(pathlib.Path("$BUILD_DIR") / lib_path / lib_name)) + + return default_env.Library(target=str(pathlib.Path("$BUILD_DIR") / lib_path / lib_name), source=lib_objects) - return default_env.Library( - target=str(pathlib.Path("$BUILD_DIR") / lib_path / lib_name), source=lib_objects - ) def _get_flags_for_compile_group(compile_group_json: dict) -> list[str]: """ @@ -139,11 +147,10 @@ def _get_flags_for_compile_group(compile_group_json: dict) -> list[str]: fragment = ccfragment.get("fragment", "").strip() if not fragment or fragment.startswith("-D"): continue - flags.extend( - click.parser.split_arg_string(fragment) - ) + flags.extend(click.parser.split_arg_string(fragment)) return flags + def extract_flags(target_json: dict) -> dict[str, list[str]]: """ Returns a dictionary with flags for SCons based on a given CMake target @@ -159,6 +166,7 @@ def extract_flags(target_json: dict) -> dict[str, list[str]]: "CXXFLAGS": default_flags.get("CXX"), } + def find_included_files(environment: Environment) -> set[str]: """ Process a list of flags produced by extract_flags() to find files manually included by '-include' @@ -171,6 +179,7 @@ def find_included_files(environment: Environment) -> set[str]: result.add(language_flags[index + 1]) return result + def extract_includes(target_json: dict) -> dict[str, list[str]]: """ Extract the includes from a CMake target and return an SCons-style dict @@ -187,6 +196,7 @@ def extract_includes(target_json: dict) -> dict[str, list[str]]: return {"plain_includes": plain_includes, "sys_includes": sys_includes} + def extract_link_args(target_json: dict) -> list[str]: """ Extract the linker flags from a CMake target @@ -203,4 +213,4 @@ def extract_link_args(target_json: dict) -> list[str]: if fragment_role == "flags": result.extend(args) - return result \ No newline at end of file + return result diff --git a/tools/python/mbed_platformio/pio_variants.py b/tools/python/mbed_platformio/pio_variants.py index 76aa4b16c27..7d01713ef96 100644 --- a/tools/python/mbed_platformio/pio_variants.py +++ b/tools/python/mbed_platformio/pio_variants.py @@ -22,5 +22,5 @@ "cloud_jam_l4": "NUCLEO_L476RG", "nucleo_h743zi": "NUCLEO_H743ZI2", "genericSTM32F103RB": "NUCLEO_F103RB", - "disco_h747xi": "DISCO_H747I" + "disco_h747xi": "DISCO_H747I", } diff --git a/tools/python/mbed_tools/build/__init__.py b/tools/python/mbed_tools/build/__init__.py index 0ddf6a6dcac..40c5c22f8d4 100644 --- a/tools/python/mbed_tools/build/__init__.py +++ b/tools/python/mbed_tools/build/__init__.py @@ -10,6 +10,7 @@ - Invocation of the build process for the command line tools and online build service. - Export of build instructions to third party command line tools and IDEs. """ + from mbed_tools.build.build import build_project, generate_build_system from mbed_tools.build.config import generate_config from mbed_tools.build.flash import flash_binary diff --git a/tools/python/mbed_tools/build/_internal/cmake_file.py b/tools/python/mbed_tools/build/_internal/cmake_file.py index 90f5df98269..b86bcdcb62f 100644 --- a/tools/python/mbed_tools/build/_internal/cmake_file.py +++ b/tools/python/mbed_tools/build/_internal/cmake_file.py @@ -3,6 +3,7 @@ # SPDX-License-Identifier: Apache-2.0 # """Module in charge of CMake file generation.""" + import pathlib from typing import Any @@ -26,15 +27,17 @@ def render_mbed_config_cmake_template(config: Config, toolchain_name: str, targe Returns: The rendered mbed_config template. """ - env = jinja2.Environment(loader=jinja2.PackageLoader("mbed_tools.build", str(TEMPLATES_DIRECTORY)),) + env = jinja2.Environment(loader=jinja2.PackageLoader("mbed_tools.build", str(TEMPLATES_DIRECTORY))) env.filters["to_hex"] = to_hex template = env.get_template(TEMPLATE_NAME) config["supported_c_libs"] = [x for x in config["supported_c_libs"][toolchain_name.lower()]] - context = {"target_name": target_name, - "toolchain_name": toolchain_name, - "json_sources": config.json_sources, - **config} + context = { + "target_name": target_name, + "toolchain_name": toolchain_name, + "json_sources": config.json_sources, + **config, + } return template.render(context) diff --git a/tools/python/mbed_tools/build/_internal/config/assemble_build_config.py b/tools/python/mbed_tools/build/_internal/config/assemble_build_config.py index 0bf995f7cf3..79e2bd360e1 100644 --- a/tools/python/mbed_tools/build/_internal/config/assemble_build_config.py +++ b/tools/python/mbed_tools/build/_internal/config/assemble_build_config.py @@ -3,6 +3,7 @@ # SPDX-License-Identifier: Apache-2.0 # """Configuration assembly algorithm.""" + import itertools from dataclasses import dataclass diff --git a/tools/python/mbed_tools/build/_internal/config/config.py b/tools/python/mbed_tools/build/_internal/config/config.py index ef10ee0fb9e..5fd5fcea6f0 100644 --- a/tools/python/mbed_tools/build/_internal/config/config.py +++ b/tools/python/mbed_tools/build/_internal/config/config.py @@ -3,6 +3,7 @@ # SPDX-License-Identifier: Apache-2.0 # """Build configuration representation.""" + import logging from collections import UserDict @@ -68,15 +69,16 @@ def _handle_overrides(self, overrides: Iterable[Override]) -> None: f"The parameter `{override.namespace}.{override.name}` will not be added to the configuration." ) - valid_params_in_namespace = list(filter( - lambda x: x.namespace == override.namespace, - self.data.get(CONFIG_SECTION, []), - )) + valid_params_in_namespace = list( + filter(lambda x: x.namespace == override.namespace, self.data.get(CONFIG_SECTION, [])) + ) valid_param_names = [f'"{param.namespace}.{param.name}"' for param in valid_params_in_namespace] if len(valid_param_names) > 0: - logger.warning(f'Valid config parameters in this namespace are: {", ".join(valid_param_names)}. ' - f'Maybe you meant one of those?') + logger.warning( + f"Valid config parameters in this namespace are: {', '.join(valid_param_names)}. " + f"Maybe you meant one of those?" + ) else: setting.value = override.value diff --git a/tools/python/mbed_tools/build/_internal/config/source.py b/tools/python/mbed_tools/build/_internal/config/source.py index 9cfbc194a5e..f0b538b8da5 100644 --- a/tools/python/mbed_tools/build/_internal/config/source.py +++ b/tools/python/mbed_tools/build/_internal/config/source.py @@ -3,6 +3,7 @@ # SPDX-License-Identifier: Apache-2.0 # """Configuration source parser.""" + import logging import pathlib @@ -111,9 +112,7 @@ def _extract_config_settings(namespace: str, config_data: dict) -> List[ConfigSe help_text = None value = item - setting = ConfigSetting( - namespace=namespace, name=name, macro_name=macro_name, help_text=help_text, value=value, - ) + setting = ConfigSetting(namespace=namespace, name=name, macro_name=macro_name, help_text=help_text, value=value) # If the config item is about a certain component or feature # being present, avoid adding it to the mbed_config.cmake # configuration file. Instead, applications should depend on diff --git a/tools/python/mbed_tools/build/_internal/find_files.py b/tools/python/mbed_tools/build/_internal/find_files.py index 1dba384ab86..c7b931e1e8d 100644 --- a/tools/python/mbed_tools/build/_internal/find_files.py +++ b/tools/python/mbed_tools/build/_internal/find_files.py @@ -3,6 +3,7 @@ # SPDX-License-Identifier: Apache-2.0 # """Find files in MbedOS program directory.""" + from pathlib import Path import fnmatch from typing import Callable, Iterable, Optional, List, Tuple diff --git a/tools/python/mbed_tools/build/_internal/memory_banks.py b/tools/python/mbed_tools/build/_internal/memory_banks.py index 1edeff223e6..b37afc22a70 100644 --- a/tools/python/mbed_tools/build/_internal/memory_banks.py +++ b/tools/python/mbed_tools/build/_internal/memory_banks.py @@ -6,6 +6,7 @@ from __future__ import annotations from typing import TYPE_CHECKING + if TYPE_CHECKING: from typing import Dict, Any, Set, TypedDict, NotRequired @@ -29,17 +30,18 @@ class MemoryBankInfo(TypedDict): """ Info about one memory bank """ + size: int start: int default: NotRequired[bool] startup: NotRequired[bool] access: Dict[str, bool] - class BanksByType(TypedDict): """ Info about all memory banks, ROM and RAM """ + ROM: Dict[str, MemoryBankInfo] RAM: Dict[str, MemoryBankInfo] @@ -60,8 +62,7 @@ class BanksByType(TypedDict): BANK_TYPES = ("RAM", "ROM") -def incorporate_memory_bank_data_from_cmsis(target_attributes: Dict[str, Any], - program: MbedProgram) -> None: +def incorporate_memory_bank_data_from_cmsis(target_attributes: Dict[str, Any], program: MbedProgram) -> None: """ Incorporate the memory bank information from the CMSIS JSON file into the target attributes. @@ -77,10 +78,11 @@ def incorporate_memory_bank_data_from_cmsis(target_attributes: Dict[str, Any], if target_attributes["device_name"] not in cmsis_mcu_descriptions: raise MbedBuildError( -f"""Target specifies device_name {target_attributes["device_name"]} but this device is not + f"""Target specifies device_name {target_attributes["device_name"]} but this device is not listed in {program.mbed_os.cmsis_mcu_descriptions_json_file}. Perhaps you need to use the 'python -m mbed_tools.cli.main cmsis-mcu-descr fetch-missing' command to download -the missing MCU description?""") +the missing MCU description?""" + ) mcu_description = cmsis_mcu_descriptions[target_attributes["device_name"]] mcu_memory_description: Dict[str, Dict[str, Any]] = mcu_description["memories"] @@ -95,7 +97,6 @@ def incorporate_memory_bank_data_from_cmsis(target_attributes: Dict[str, Any], def _apply_configured_overrides(banks_by_type: BanksByType, bank_config: Dict[str, Dict[str, int]]) -> BanksByType: - """ Apply overrides from configuration to the physical memory bank information, producing the configured memory bank information. @@ -106,14 +107,14 @@ def _apply_configured_overrides(banks_by_type: BanksByType, bank_config: Dict[st configured_memory_banks = copy.deepcopy(banks_by_type) for bank_name, bank_data in bank_config.items(): - if bank_name not in configured_memory_banks["RAM"] and bank_name not in configured_memory_banks["ROM"]: raise MbedBuildError(f"Attempt to configure memory bank {bank_name} which does not exist for this device.") bank_type = "RAM" if bank_name in configured_memory_banks["RAM"] else "ROM" if len(set(bank_data.keys()) - {"size", "start"}): - raise MbedBuildError("Only the size and start properties of a memory bank can be " - "configured in memory_bank_config") + raise MbedBuildError( + "Only the size and start properties of a memory bank can be configured in memory_bank_config" + ) for property_name, property_value in bank_data.items(): if not isinstance(property_value, int): @@ -125,7 +126,6 @@ def _apply_configured_overrides(banks_by_type: BanksByType, bank_config: Dict[st def _print_mem_bank_summary(banks_by_type: BanksByType, configured_banks_by_type: BanksByType) -> None: - """ Print a summary of the memory banks to the console :param banks_by_type: Physical memory bank information @@ -137,15 +137,17 @@ def _print_mem_bank_summary(banks_by_type: BanksByType, configured_banks_by_type banks = banks_by_type[bank_type] if len(banks) == 0: - logger.warning("No %s banks are known to the Mbed configuration system! This can cause problems with " - "features like Mbed Stats and FlashIAPBlockDevice! To fix this, define a 'device_name'" - " property or specify 'memory_banks' in your target JSON.", bank_type) + logger.warning( + "No %s banks are known to the Mbed configuration system! This can cause problems with " + "features like Mbed Stats and FlashIAPBlockDevice! To fix this, define a 'device_name'" + " property or specify 'memory_banks' in your target JSON.", + bank_type, + ) continue print(f"Target {bank_type} banks: -----------------------------------------------------------") for bank_index, (bank_name, bank_data) in enumerate(banks.items()): - bank_size = bank_data["size"] bank_start = bank_data["start"] @@ -160,16 +162,16 @@ def _print_mem_bank_summary(banks_by_type: BanksByType, configured_banks_by_type if configured_start_addr != bank_start: configured_start_addr_str = f" (configured to 0x{configured_start_addr:08x})" - print(f"{bank_index}. {bank_name}, " - f"start addr 0x{bank_start:08x}{configured_start_addr_str}, " - f"size {humanize.naturalsize(bank_size, binary=True)}{configured_size_str}") + print( + f"{bank_index}. {bank_name}, " + f"start addr 0x{bank_start:08x}{configured_start_addr_str}, " + f"size {humanize.naturalsize(bank_size, binary=True)}{configured_size_str}" + ) print() -def _generate_macros_for_memory_banks(banks_by_type: BanksByType, - configured_banks_by_type: BanksByType) -> Set[str]: - +def _generate_macros_for_memory_banks(banks_by_type: BanksByType, configured_banks_by_type: BanksByType) -> Set[str]: """ Generate a set of macros to define to pass the memory bank information into Mbed. :param banks_by_type: Physical memory bank information @@ -181,7 +183,6 @@ def _generate_macros_for_memory_banks(banks_by_type: BanksByType, banks = banks_by_type[bank_type] for bank_index, (bank_name, bank_data) in enumerate(banks.items()): - bank_number_str = "" if bank_index == 0 else str(bank_index) configured_bank_data = configured_banks_by_type[bank_type][bank_name] @@ -217,10 +218,13 @@ def process_memory_banks(config: Config) -> Dict[str, BanksByType]: # Check for deprecated properties for property_name in DEPRECATED_MEM_CONFIG_PROPERTIES: if property_name in config: - logger.warning("Configuration uses old-style memory bank configuration property %s. " - "This is deprecated and is not processed anymore, replace it with a " - "'memory_bank_config' section. See here for more: " - "https://github.com/mbed-ce/mbed-os/wiki/Mbed-Memory-Bank-Information", property_name) + logger.warning( + "Configuration uses old-style memory bank configuration property %s. " + "This is deprecated and is not processed anymore, replace it with a " + "'memory_bank_config' section. See here for more: " + "https://github.com/mbed-ce/mbed-os/wiki/Mbed-Memory-Bank-Information", + property_name, + ) # Check attributes, sort into rom and ram banks_by_type: BanksByType = {"ROM": {}, "RAM": {}} @@ -246,8 +250,4 @@ def process_memory_banks(config: Config) -> Dict[str, BanksByType]: config["memory_bank_macros"] = _generate_macros_for_memory_banks(banks_by_type, configured_banks_by_type) # Write out JSON file - return { - "memory_banks": banks_by_type, - "configured_memory_banks": configured_banks_by_type - } - + return {"memory_banks": banks_by_type, "configured_memory_banks": configured_banks_by_type} diff --git a/tools/python/mbed_tools/build/_internal/write_files.py b/tools/python/mbed_tools/build/_internal/write_files.py index c1e063fd0b0..d4e1d89efb8 100644 --- a/tools/python/mbed_tools/build/_internal/write_files.py +++ b/tools/python/mbed_tools/build/_internal/write_files.py @@ -3,6 +3,7 @@ # SPDX-License-Identifier: Apache-2.0 # """Writes out files to specified locations.""" + import pathlib from mbed_tools.build.exceptions import InvalidExportOutputDirectory diff --git a/tools/python/mbed_tools/build/build.py b/tools/python/mbed_tools/build/build.py index 3fe17edae5d..4b4c08270b5 100644 --- a/tools/python/mbed_tools/build/build.py +++ b/tools/python/mbed_tools/build/build.py @@ -3,6 +3,7 @@ # SPDX-License-Identifier: Apache-2.0 # """Configure and build a CMake project.""" + import logging import pathlib import subprocess diff --git a/tools/python/mbed_tools/build/config.py b/tools/python/mbed_tools/build/config.py index e0303c4b477..55db7fed93f 100644 --- a/tools/python/mbed_tools/build/config.py +++ b/tools/python/mbed_tools/build/config.py @@ -3,6 +3,7 @@ # SPDX-License-Identifier: Apache-2.0 # """Parses the Mbed configuration system and generates a CMake config script.""" + import pathlib from typing import Any, Tuple @@ -37,9 +38,7 @@ def generate_config(target_name: str, toolchain: str, program: MbedProgram) -> T targets_data = _load_raw_targets_data(program) target_build_attributes = get_target_by_name(target_name, targets_data) incorporate_memory_bank_data_from_cmsis(target_build_attributes, program) - config = assemble_config( - target_build_attributes, program - ) + config = assemble_config(target_build_attributes, program) # Process memory banks and save JSON data for other tools (e.g. memap) to use memory_banks_json_content = process_memory_banks(config) @@ -47,7 +46,7 @@ def generate_config(target_name: str, toolchain: str, program: MbedProgram) -> T (program.files.cmake_build_dir / MEMORY_BANKS_JSON_FILE).write_text(json.dumps(memory_banks_json_content, indent=4)) cmake_file_contents = render_mbed_config_cmake_template( - target_name=target_name, config=config, toolchain_name=toolchain, + target_name=target_name, config=config, toolchain_name=toolchain ) cmake_config_file_path = program.files.cmake_build_dir / CMAKE_CONFIG_FILE write_file(cmake_config_file_path, cmake_file_contents) diff --git a/tools/python/mbed_tools/build/exceptions.py b/tools/python/mbed_tools/build/exceptions.py index a99be0cd516..12c3defdb7d 100644 --- a/tools/python/mbed_tools/build/exceptions.py +++ b/tools/python/mbed_tools/build/exceptions.py @@ -3,6 +3,7 @@ # SPDX-License-Identifier: Apache-2.0 # """Public exceptions raised by the package.""" + from mbed_tools.lib.exceptions import ToolsError diff --git a/tools/python/mbed_tools/cli/cmsis_mcu_descr.py b/tools/python/mbed_tools/cli/cmsis_mcu_descr.py index 845f6d9d6be..13494e10f97 100644 --- a/tools/python/mbed_tools/cli/cmsis_mcu_descr.py +++ b/tools/python/mbed_tools/cli/cmsis_mcu_descr.py @@ -13,6 +13,7 @@ from the CMSIS pack index (a resource hosted by ARM), but can also be edited manually after being downloaded. This is needed since the index is missing certain MCUs and has wrong information about a few others. """ + from mbed_tools.lib.json_helpers import decode_json_file import click @@ -38,13 +39,10 @@ TARGETS_JSON5_PATH = MBED_OS_DIR / "targets" / "targets.json5" CMSIS_MCU_DESCRIPTIONS_JSON_PATH = MBED_OS_DIR / "targets" / "cmsis_mcu_descriptions.json5" + # Top-level command -@click.group( - name="cmsis-mcu-descr", - help="Manage CMSIS MCU description JSON file" -) +@click.group(name="cmsis-mcu-descr", help="Manage CMSIS MCU description JSON file") def cmsis_mcu_descr(): - # Set up logger defaults LOGGER.setLevel(logging.INFO) @@ -58,7 +56,9 @@ def open_cmsis_cache(*, must_exist: bool = True) -> cmsis_pack_manager.Cache: index_file_path = pathlib.Path(cmsis_cache.index_path) if not index_file_path.exists() and must_exist: - raise RuntimeError("CMSIS device descriptor cache does not exist! Run 'python -m mbed_tools.cli.main cmsis-mcu-descr reload-cache' to populate it!") + raise RuntimeError( + "CMSIS device descriptor cache does not exist! Run 'python -m mbed_tools.cli.main cmsis-mcu-descr reload-cache' to populate it!" + ) if index_file_path.exists(): # Check how old the index file is @@ -103,7 +103,7 @@ def get_mcu_names_used_by_targets_json5() -> Set[str]: # Search for files starting with "custom_targets" of type .json or .json5. Also exclude some folders like build and mbed-os exclude_dirs = ["build", "mbed-os", ".git"] - file_pattern = r"custom_targets\.(json|json5)" + file_pattern = r"custom_targets\.(json|json5)" custom_targets_file = find_json_files(PROJECT_ROOT, exclude_dirs, file_pattern) custom_targets_json_path = {} @@ -112,7 +112,6 @@ def get_mcu_names_used_by_targets_json5() -> Set[str]: custom_targets_json_path = file LOGGER.info(f"Custom_targets file detected - {custom_targets_json_path}") - used_mcu_names = set() LOGGER.info("Scanning targets.json5 for used MCU names...") json_contents = decode_json_file(TARGETS_JSON5_PATH) @@ -126,9 +125,7 @@ def get_mcu_names_used_by_targets_json5() -> Set[str]: return used_mcu_names -@cmsis_mcu_descr.command( - short_help="Reload the cache of CMSIS MCU descriptions. This can take several minutes." -) +@cmsis_mcu_descr.command(short_help="Reload the cache of CMSIS MCU descriptions. This can take several minutes.") def reload_cache(): """ Reload the cache of CMSIS MCU descriptions. This can take several minutes. @@ -145,10 +142,7 @@ def reload_cache(): cmsis_cache.cache_descriptors() -@cmsis_mcu_descr.command( - name="find-unused", - short_help="Find MCU descriptions that are not used by targets.json5." -) +@cmsis_mcu_descr.command(name="find-unused", short_help="Find MCU descriptions that are not used by targets.json5.") def find_unused(): """ Remove MCU descriptions that are not used by targets.json5. @@ -174,8 +168,7 @@ def find_unused(): @cmsis_mcu_descr.command( - name="check-missing", - short_help="Check if there are any missing MCU descriptions used by targets.json5." + name="check-missing", short_help="Check if there are any missing MCU descriptions used by targets.json5." ) def check_missing(): used_mcu_names = get_mcu_names_used_by_targets_json5() @@ -191,19 +184,20 @@ def check_missing(): print("No missing MCUs, no work to do.") return - print("The following MCU descriptions are used by targets.json5 and need to be added to" - " cmsis_mcu_descriptions.json5:") + print( + "The following MCU descriptions are used by targets.json5 and need to be added to cmsis_mcu_descriptions.json5:" + ) print("\n".join(missing_mcu_names)) sys.exit(1) @cmsis_mcu_descr.command( name="fetch-missing", - short_help="Fetch any missing MCU descriptions used by targets.json5 or custom_targets.json/json5.." + short_help="Fetch any missing MCU descriptions used by targets.json5 or custom_targets.json/json5..", ) def fetch_missing(): """ - Scans through cmsis_mcu_descriptions.json5 for any missing MCU descriptions that are referenced by + Scans through cmsis_mcu_descriptions.json5 for any missing MCU descriptions that are referenced by targets.json5 or custom_targets.json/json5. If any are found, they are imported from the CMSIS cache. Note that downloaded descriptions should be checked for accuracy before they are committed. @@ -229,13 +223,17 @@ def fetch_missing(): for mcu in missing_mcu_names: if mcu not in cmsis_cache.index: - raise RuntimeError(f"MCU {mcu} is not present in the CMSIS MCU index ({cmsis_cache.index_path}). Maybe " - f"wrong part number, or this MCU simply doesn't exist in the CMSIS index and has " - f"to be added manually?") + raise RuntimeError( + f"MCU {mcu} is not present in the CMSIS MCU index ({cmsis_cache.index_path}). Maybe " + f"wrong part number, or this MCU simply doesn't exist in the CMSIS index and has " + f"to be added manually?" + ) missing_mcus_dict[mcu] = cmsis_cache.index[mcu] - - LOGGER.info("In case of Custom target remove 'device_name' from your custom_targets.json5 file and add\n" + - "just the 'memories' section as 'memory_banks' section from content below.\n" + - f"Otherwise add the whole following entries to {CMSIS_MCU_DESCRIPTIONS_JSON_PATH}:") + + LOGGER.info( + "In case of Custom target remove 'device_name' from your custom_targets.json5 file and add\n" + + "just the 'memories' section as 'memory_banks' section from content below.\n" + + f"Otherwise add the whole following entries to {CMSIS_MCU_DESCRIPTIONS_JSON_PATH}:" + ) print(json.dumps(missing_mcus_dict, indent=4, sort_keys=True)) - sys.exit(1) + sys.exit(1) diff --git a/tools/python/mbed_tools/cli/configure.py b/tools/python/mbed_tools/cli/configure.py index 016b7dbfcca..aaf39f08072 100644 --- a/tools/python/mbed_tools/cli/configure.py +++ b/tools/python/mbed_tools/cli/configure.py @@ -3,6 +3,7 @@ # SPDX-License-Identifier: Apache-2.0 # """Command to generate the application CMake configuration script used by the build/compile system.""" + import pathlib import click @@ -14,9 +15,7 @@ @click.command( help="Generate an Mbed OS config CMake file and write it to a .mbedbuild folder in the program directory." ) -@click.option( - "--custom-targets-json", type=click.Path(), default=None, help="Path to custom_targets.json.", -) +@click.option("--custom-targets-json", type=click.Path(), default=None, help="Path to custom_targets.json.") @click.option( "-t", "--toolchain", @@ -25,10 +24,13 @@ help="The toolchain you are using to build your app.", ) @click.option("-m", "--mbed-target", required=True, help="A build target for an Mbed-enabled device, eg. K64F") -@click.option("-o", "--output-dir", - type=click.Path(path_type=pathlib.Path), - required=True, - help="Path to output directory (CMake binary dir)") +@click.option( + "-o", + "--output-dir", + type=click.Path(path_type=pathlib.Path), + required=True, + help="Path to output directory (CMake binary dir)", +) @click.option( "-p", "--program-path", @@ -36,12 +38,8 @@ default=".", help="Path to local Mbed program. By default is the current working directory.", ) -@click.option( - "--mbed-os-path", type=click.Path(), default=None, help="Path to local Mbed OS directory.", -) -@click.option( - "--app-config", type=click.Path(), default=None, help="Path to application configuration file.", -) +@click.option("--mbed-os-path", type=click.Path(), default=None, help="Path to local Mbed OS directory.") +@click.option("--app-config", type=click.Path(), default=None, help="Path to application configuration file.") def configure( toolchain: str, mbed_target: str, @@ -49,7 +47,7 @@ def configure( mbed_os_path: str, output_dir: pathlib.Path, custom_targets_json: str, - app_config: str + app_config: str, ) -> None: """Exports a mbed_config.cmake file to build directory in the program root. @@ -72,7 +70,9 @@ def configure( if mbed_os_path is None: program = MbedProgram.from_existing(pathlib.Path(program_path), output_dir) else: - program = MbedProgram.from_existing(pathlib.Path(program_path), output_dir, pathlib.Path(mbed_os_path).resolve()) + program = MbedProgram.from_existing( + pathlib.Path(program_path), output_dir, pathlib.Path(mbed_os_path).resolve() + ) if custom_targets_json is not None: program.files.custom_targets_json = pathlib.Path(custom_targets_json) if app_config is not None: diff --git a/tools/python/mbed_tools/cli/list_connected_devices.py b/tools/python/mbed_tools/cli/list_connected_devices.py index 1e74d51ea60..ae644c7b52c 100644 --- a/tools/python/mbed_tools/cli/list_connected_devices.py +++ b/tools/python/mbed_tools/cli/list_connected_devices.py @@ -3,6 +3,7 @@ # SPDX-License-Identifier: Apache-2.0 # """Command to list all Mbed enabled devices connected to the host computer.""" + import click import json from operator import attrgetter @@ -33,10 +34,7 @@ def list_connected_devices(format: str, show_all: bool) -> None: else: devices = _sort_devices(connected_devices.identified_devices) - output_builders = { - "table": _build_tabular_output, - "json": _build_json_output, - } + output_builders = {"table": _build_tabular_output, "json": _build_json_output} if devices: output = output_builders[format](devices) click.echo(output) diff --git a/tools/python/mbed_tools/cli/main.py b/tools/python/mbed_tools/cli/main.py index 5ab21fc3291..01b755f3865 100644 --- a/tools/python/mbed_tools/cli/main.py +++ b/tools/python/mbed_tools/cli/main.py @@ -3,6 +3,7 @@ # SPDX-License-Identifier: Apache-2.0 # """Main cli entry point.""" + import logging import sys @@ -80,5 +81,5 @@ def cli(verbose: int, traceback: bool) -> None: cli.add_command(sterm, "sterm") cli.add_command(cmsis_mcu_descr) -if __name__ == '__main__': +if __name__ == "__main__": cli() diff --git a/tools/python/mbed_tools/cli/project_management.py b/tools/python/mbed_tools/cli/project_management.py index a0b3b762fab..c2e2bbd9b2f 100644 --- a/tools/python/mbed_tools/cli/project_management.py +++ b/tools/python/mbed_tools/cli/project_management.py @@ -3,6 +3,7 @@ # SPDX-License-Identifier: Apache-2.0 # """Project management commands: new, import_, deploy and libs.""" + import os import pathlib diff --git a/tools/python/mbed_tools/cli/sterm.py b/tools/python/mbed_tools/cli/sterm.py index ec40861b928..2b7050771da 100644 --- a/tools/python/mbed_tools/cli/sterm.py +++ b/tools/python/mbed_tools/cli/sterm.py @@ -3,6 +3,7 @@ # SPDX-License-Identifier: Apache-2.0 # """Command to launch a serial terminal to a connected Mbed device.""" + from typing import Any, Optional, Tuple import click diff --git a/tools/python/mbed_tools/devices/__init__.py b/tools/python/mbed_tools/devices/__init__.py index b0e573ffe06..428683a4383 100644 --- a/tools/python/mbed_tools/devices/__init__.py +++ b/tools/python/mbed_tools/devices/__init__.py @@ -10,10 +10,7 @@ For the command line interface to the API see the package https://github.com/ARMmbed/mbed-tools """ -from mbed_tools.devices.devices import ( - get_connected_devices, - find_connected_device, - find_all_connected_devices, -) + +from mbed_tools.devices.devices import get_connected_devices, find_connected_device, find_all_connected_devices from mbed_tools.devices.device import Device from mbed_tools.devices import exceptions diff --git a/tools/python/mbed_tools/devices/_internal/base_detector.py b/tools/python/mbed_tools/devices/_internal/base_detector.py index 84d8882b89b..5adfeedee82 100644 --- a/tools/python/mbed_tools/devices/_internal/base_detector.py +++ b/tools/python/mbed_tools/devices/_internal/base_detector.py @@ -3,6 +3,7 @@ # SPDX-License-Identifier: Apache-2.0 # """Interface for device detectors.""" + from abc import ABC, abstractmethod from typing import List diff --git a/tools/python/mbed_tools/devices/_internal/candidate_device.py b/tools/python/mbed_tools/devices/_internal/candidate_device.py index 9246479f17a..377798cbeb9 100644 --- a/tools/python/mbed_tools/devices/_internal/candidate_device.py +++ b/tools/python/mbed_tools/devices/_internal/candidate_device.py @@ -3,6 +3,7 @@ # SPDX-License-Identifier: Apache-2.0 # """Defines CandidateDevice model used for device detection.""" + from dataclasses import dataclass from typing import Optional, Tuple, Any, Union, cast from pathlib import Path diff --git a/tools/python/mbed_tools/devices/_internal/darwin/device_detector.py b/tools/python/mbed_tools/devices/_internal/darwin/device_detector.py index 3fbdf1bc6ee..4eca620ea52 100644 --- a/tools/python/mbed_tools/devices/_internal/darwin/device_detector.py +++ b/tools/python/mbed_tools/devices/_internal/darwin/device_detector.py @@ -3,6 +3,7 @@ # SPDX-License-Identifier: Apache-2.0 # """Device detector for Darwin.""" + import logging import pathlib import re diff --git a/tools/python/mbed_tools/devices/_internal/darwin/diskutil.py b/tools/python/mbed_tools/devices/_internal/darwin/diskutil.py index e266570d73d..a0e6fab140e 100644 --- a/tools/python/mbed_tools/devices/_internal/darwin/diskutil.py +++ b/tools/python/mbed_tools/devices/_internal/darwin/diskutil.py @@ -3,6 +3,7 @@ # SPDX-License-Identifier: Apache-2.0 # """Interactions with `diskutil`.""" + import plistlib import subprocess from typing import Dict, Iterable, List, Optional, cast diff --git a/tools/python/mbed_tools/devices/_internal/darwin/ioreg.py b/tools/python/mbed_tools/devices/_internal/darwin/ioreg.py index afab905cdda..61d9d2fe337 100644 --- a/tools/python/mbed_tools/devices/_internal/darwin/ioreg.py +++ b/tools/python/mbed_tools/devices/_internal/darwin/ioreg.py @@ -3,6 +3,7 @@ # SPDX-License-Identifier: Apache-2.0 # """Interactions with `ioreg`.""" + import plistlib import subprocess from typing import Any, Dict, Iterable, List, Optional, cast diff --git a/tools/python/mbed_tools/devices/_internal/darwin/system_profiler.py b/tools/python/mbed_tools/devices/_internal/darwin/system_profiler.py index a0b99ddf6f5..633ebf68e26 100644 --- a/tools/python/mbed_tools/devices/_internal/darwin/system_profiler.py +++ b/tools/python/mbed_tools/devices/_internal/darwin/system_profiler.py @@ -3,6 +3,7 @@ # SPDX-License-Identifier: Apache-2.0 # """Interactions with `system_profiler`.""" + import plistlib import re import subprocess diff --git a/tools/python/mbed_tools/devices/_internal/detect_candidate_devices.py b/tools/python/mbed_tools/devices/_internal/detect_candidate_devices.py index 418cbc2b9b9..2302a616f42 100644 --- a/tools/python/mbed_tools/devices/_internal/detect_candidate_devices.py +++ b/tools/python/mbed_tools/devices/_internal/detect_candidate_devices.py @@ -3,6 +3,7 @@ # SPDX-License-Identifier: Apache-2.0 # """Detect Mbed devices connected to host computer.""" + import platform from typing import Iterable diff --git a/tools/python/mbed_tools/devices/_internal/file_parser.py b/tools/python/mbed_tools/devices/_internal/file_parser.py index b39622dc5d2..de0f9a4681d 100644 --- a/tools/python/mbed_tools/devices/_internal/file_parser.py +++ b/tools/python/mbed_tools/devices/_internal/file_parser.py @@ -89,6 +89,7 @@ """ + import logging import pathlib import re diff --git a/tools/python/mbed_tools/devices/_internal/linux/device_detector.py b/tools/python/mbed_tools/devices/_internal/linux/device_detector.py index 628a5e20b1d..c4623d87cbb 100644 --- a/tools/python/mbed_tools/devices/_internal/linux/device_detector.py +++ b/tools/python/mbed_tools/devices/_internal/linux/device_detector.py @@ -3,6 +3,7 @@ # SPDX-License-Identifier: Apache-2.0 # """Defines a device detector for Linux.""" + import logging from pathlib import Path from typing import Tuple, List, Optional, cast diff --git a/tools/python/mbed_tools/devices/_internal/resolve_board.py b/tools/python/mbed_tools/devices/_internal/resolve_board.py index 5a8a12ce2d7..53919ada141 100644 --- a/tools/python/mbed_tools/devices/_internal/resolve_board.py +++ b/tools/python/mbed_tools/devices/_internal/resolve_board.py @@ -9,16 +9,12 @@ For more information on the mbed-targets package visit https://github.com/ARMmbed/mbed-targets """ + import logging from typing import Optional -from mbed_tools.targets import ( - Board, - get_board_by_product_code, - get_board_by_online_id, - get_board_by_jlink_slug, -) +from mbed_tools.targets import Board, get_board_by_product_code, get_board_by_online_id, get_board_by_jlink_slug from mbed_tools.targets.exceptions import UnknownBoard, MbedTargetsError from mbed_tools.devices._internal.exceptions import NoBoardForCandidate, ResolveBoardError diff --git a/tools/python/mbed_tools/devices/_internal/windows/component_descriptor.py b/tools/python/mbed_tools/devices/_internal/windows/component_descriptor.py index facd11da22f..48d5cad78e6 100644 --- a/tools/python/mbed_tools/devices/_internal/windows/component_descriptor.py +++ b/tools/python/mbed_tools/devices/_internal/windows/component_descriptor.py @@ -3,6 +3,7 @@ # SPDX-License-Identifier: Apache-2.0 # """Defines a generic Win32 component.""" + import logging from abc import ABC, abstractmethod from typing import List, Any, Generator, Optional, NamedTuple, cast @@ -10,10 +11,7 @@ import pythoncom import win32com.client -from mbed_tools.devices._internal.windows.component_descriptor_utils import ( - UNKNOWN_VALUE, - is_undefined_data_object, -) +from mbed_tools.devices._internal.windows.component_descriptor_utils import UNKNOWN_VALUE, is_undefined_data_object NAMED_TUPLE_FIELDS_ATTRIBUTE = "_fields" diff --git a/tools/python/mbed_tools/devices/_internal/windows/component_descriptor_utils.py b/tools/python/mbed_tools/devices/_internal/windows/component_descriptor_utils.py index 20436d00c10..c2468d831a3 100644 --- a/tools/python/mbed_tools/devices/_internal/windows/component_descriptor_utils.py +++ b/tools/python/mbed_tools/devices/_internal/windows/component_descriptor_utils.py @@ -3,6 +3,7 @@ # SPDX-License-Identifier: Apache-2.0 # """Utilities with regards to Win32 component descriptors.""" + from typing import Any, NamedTuple, Union from collections import OrderedDict diff --git a/tools/python/mbed_tools/devices/_internal/windows/device_detector.py b/tools/python/mbed_tools/devices/_internal/windows/device_detector.py index 338772db733..561d03b1405 100644 --- a/tools/python/mbed_tools/devices/_internal/windows/device_detector.py +++ b/tools/python/mbed_tools/devices/_internal/windows/device_detector.py @@ -3,6 +3,7 @@ # SPDX-License-Identifier: Apache-2.0 # """Defines a device detector for Windows.""" + from pathlib import Path from typing import List diff --git a/tools/python/mbed_tools/devices/_internal/windows/device_instance_id.py b/tools/python/mbed_tools/devices/_internal/windows/device_instance_id.py index 55beb5c839f..35a1687d0a3 100644 --- a/tools/python/mbed_tools/devices/_internal/windows/device_instance_id.py +++ b/tools/python/mbed_tools/devices/_internal/windows/device_instance_id.py @@ -3,6 +3,7 @@ # SPDX-License-Identifier: Apache-2.0 # """Utility in charge of finding the instance ID of a device.""" + import win32con import win32api from mbed_tools.devices._internal.exceptions import SystemException diff --git a/tools/python/mbed_tools/devices/_internal/windows/disk_aggregation.py b/tools/python/mbed_tools/devices/_internal/windows/disk_aggregation.py index bbdb366d888..95c52b275f6 100644 --- a/tools/python/mbed_tools/devices/_internal/windows/disk_aggregation.py +++ b/tools/python/mbed_tools/devices/_internal/windows/disk_aggregation.py @@ -9,6 +9,7 @@ This file tries to reconcile all these pieces of information so that it is presented as a single object: AggregatedDiskData. """ + from typing import List, Optional, Callable from typing import NamedTuple, cast diff --git a/tools/python/mbed_tools/devices/_internal/windows/serial_port.py b/tools/python/mbed_tools/devices/_internal/windows/serial_port.py index 2a77fe14215..c52aecf9d1f 100644 --- a/tools/python/mbed_tools/devices/_internal/windows/serial_port.py +++ b/tools/python/mbed_tools/devices/_internal/windows/serial_port.py @@ -14,10 +14,7 @@ import re from typing import NamedTuple, cast -from mbed_tools.devices._internal.windows.component_descriptor import ( - ComponentDescriptor, - UNKNOWN_VALUE, -) +from mbed_tools.devices._internal.windows.component_descriptor import ComponentDescriptor, UNKNOWN_VALUE CAPTION_PATTERN = re.compile(r"^.* [(](.*)[)]$") diff --git a/tools/python/mbed_tools/devices/_internal/windows/serial_port_data_loader.py b/tools/python/mbed_tools/devices/_internal/windows/serial_port_data_loader.py index 3c9b34d2788..b517425b05d 100644 --- a/tools/python/mbed_tools/devices/_internal/windows/serial_port_data_loader.py +++ b/tools/python/mbed_tools/devices/_internal/windows/serial_port_data_loader.py @@ -3,6 +3,7 @@ # SPDX-License-Identifier: Apache-2.0 # """Loads serial port data.""" + from typing import Optional, Generator, cast, List from mbed_tools.devices._internal.windows.system_data_loader import SystemDataLoader, ComponentsLoader diff --git a/tools/python/mbed_tools/devices/_internal/windows/system_data_loader.py b/tools/python/mbed_tools/devices/_internal/windows/system_data_loader.py index 73968e55b41..9cda64745b1 100644 --- a/tools/python/mbed_tools/devices/_internal/windows/system_data_loader.py +++ b/tools/python/mbed_tools/devices/_internal/windows/system_data_loader.py @@ -3,6 +3,7 @@ # SPDX-License-Identifier: Apache-2.0 # """Loads system data in parallel and all at once in order to improve performance.""" + from concurrent.futures import ThreadPoolExecutor from typing import List, Tuple, Dict, Generator, Optional, cast diff --git a/tools/python/mbed_tools/devices/_internal/windows/usb_data_aggregation.py b/tools/python/mbed_tools/devices/_internal/windows/usb_data_aggregation.py index 7d66c76cbd6..b26a470f64d 100644 --- a/tools/python/mbed_tools/devices/_internal/windows/usb_data_aggregation.py +++ b/tools/python/mbed_tools/devices/_internal/windows/usb_data_aggregation.py @@ -3,6 +3,7 @@ # SPDX-License-Identifier: Apache-2.0 # """Aggregation of all USB data given by Windows in various locations.""" + from typing import NamedTuple, List, cast from mbed_tools.devices._internal.windows.component_descriptor import ComponentDescriptor @@ -75,7 +76,7 @@ def aggregate(self, usb_id: UsbIdentifier) -> AggregatedUsbData: usb_data = self._usb_devices.get_usb_devices(usb_id) aggregated_data = AggregatedUsbData() aggregated_data.set_data_values( - dict(usb_identifier=usb_id, disks=disk_data, serial_port=serial_data, related_usb_interfaces=usb_data,) + dict(usb_identifier=usb_id, disks=disk_data, serial_port=serial_data, related_usb_interfaces=usb_data) ) return aggregated_data diff --git a/tools/python/mbed_tools/devices/_internal/windows/windows_identifier.py b/tools/python/mbed_tools/devices/_internal/windows/windows_identifier.py index d8e794876b5..cfb1517e85a 100644 --- a/tools/python/mbed_tools/devices/_internal/windows/windows_identifier.py +++ b/tools/python/mbed_tools/devices/_internal/windows/windows_identifier.py @@ -3,6 +3,7 @@ # SPDX-License-Identifier: Apache-2.0 # """Defines a Windows identifier.""" + from functools import total_ordering from typing import Any, Optional diff --git a/tools/python/mbed_tools/devices/device.py b/tools/python/mbed_tools/devices/device.py index c7836ef9fca..7bcf955d3a4 100644 --- a/tools/python/mbed_tools/devices/device.py +++ b/tools/python/mbed_tools/devices/device.py @@ -3,6 +3,7 @@ # SPDX-License-Identifier: Apache-2.0 # """Data model definition for Device and ConnectedDevices.""" + from dataclasses import dataclass, field from pathlib import Path from typing import Tuple, Optional, List diff --git a/tools/python/mbed_tools/devices/devices.py b/tools/python/mbed_tools/devices/devices.py index 5736535b8ca..60b2596118f 100644 --- a/tools/python/mbed_tools/devices/devices.py +++ b/tools/python/mbed_tools/devices/devices.py @@ -48,7 +48,7 @@ def find_connected_device(target_name: str, identifier: Optional[int] = None) -> return devices[identifier] detected_targets = "\n".join( - f"target: {dev.mbed_board.board_type}[{i}]," f" port: {dev.serial_port}, mount point(s): {dev.mount_points}" + f"target: {dev.mbed_board.board_type}[{i}], port: {dev.serial_port}, mount point(s): {dev.mount_points}" for i, dev in enumerate(devices) ) if identifier is None: diff --git a/tools/python/mbed_tools/devices/exceptions.py b/tools/python/mbed_tools/devices/exceptions.py index 570941d9356..ea453e850e9 100644 --- a/tools/python/mbed_tools/devices/exceptions.py +++ b/tools/python/mbed_tools/devices/exceptions.py @@ -3,6 +3,7 @@ # SPDX-License-Identifier: Apache-2.0 # """Public exceptions raised by the package.""" + from mbed_tools.lib.exceptions import ToolsError diff --git a/tools/python/mbed_tools/lib/json_helpers.py b/tools/python/mbed_tools/lib/json_helpers.py index 740f81b92b2..ef6f9f85489 100644 --- a/tools/python/mbed_tools/lib/json_helpers.py +++ b/tools/python/mbed_tools/lib/json_helpers.py @@ -3,6 +3,7 @@ # SPDX-License-Identifier: Apache-2.0 # """Helpers for json related functions.""" + import json import pyjson5 import logging @@ -15,14 +16,14 @@ def decode_json_file(path: Path) -> Any: """Return the contents of json file.""" - if path.suffix == '.json': + if path.suffix == ".json": try: logger.debug(f"Loading JSON file {path}") return json.loads(path.read_text()) except json.JSONDecodeError: logger.error(f"Failed to decode JSON data in the file located at '{path}'") raise - elif path.suffix == '.json5': + elif path.suffix == ".json5": try: logger.debug(f"Loading JSON file {path}") with path.open() as json_file: diff --git a/tools/python/mbed_tools/lib/logging.py b/tools/python/mbed_tools/lib/logging.py index 6dcf8841d46..923c5b129a1 100644 --- a/tools/python/mbed_tools/lib/logging.py +++ b/tools/python/mbed_tools/lib/logging.py @@ -3,6 +3,7 @@ # SPDX-License-Identifier: Apache-2.0 # """Helpers for logging errors according to severity of the exception.""" + from typing import Type, Optional, cast from types import TracebackType import logging diff --git a/tools/python/mbed_tools/lib/python_helpers.py b/tools/python/mbed_tools/lib/python_helpers.py index 9dc236e834a..739b5fa95f7 100644 --- a/tools/python/mbed_tools/lib/python_helpers.py +++ b/tools/python/mbed_tools/lib/python_helpers.py @@ -3,6 +3,7 @@ # SPDX-License-Identifier: Apache-2.0 # """Helpers for python language related functions.""" + from typing import Iterable, List diff --git a/tools/python/mbed_tools/project/_internal/git_utils.py b/tools/python/mbed_tools/project/_internal/git_utils.py index a84d5820627..5adb2d3eaf0 100644 --- a/tools/python/mbed_tools/project/_internal/git_utils.py +++ b/tools/python/mbed_tools/project/_internal/git_utils.py @@ -3,6 +3,7 @@ # SPDX-License-Identifier: Apache-2.0 # """Wrappers for git operations.""" + from dataclasses import dataclass from pathlib import Path diff --git a/tools/python/mbed_tools/project/_internal/libraries.py b/tools/python/mbed_tools/project/_internal/libraries.py index 8c3f1a99445..84bbd101677 100644 --- a/tools/python/mbed_tools/project/_internal/libraries.py +++ b/tools/python/mbed_tools/project/_internal/libraries.py @@ -3,6 +3,7 @@ # SPDX-License-Identifier: Apache-2.0 # """Objects for library reference handling.""" + import logging from dataclasses import dataclass diff --git a/tools/python/mbed_tools/project/_internal/progress.py b/tools/python/mbed_tools/project/_internal/progress.py index 7ad4d5de026..d9b81b1ee8a 100644 --- a/tools/python/mbed_tools/project/_internal/progress.py +++ b/tools/python/mbed_tools/project/_internal/progress.py @@ -3,6 +3,7 @@ # SPDX-License-Identifier: Apache-2.0 # """Progress bar for git operations.""" + import sys from typing import Optional, Any diff --git a/tools/python/mbed_tools/project/_internal/project_data.py b/tools/python/mbed_tools/project/_internal/project_data.py index 952521659cd..3be4b102f03 100644 --- a/tools/python/mbed_tools/project/_internal/project_data.py +++ b/tools/python/mbed_tools/project/_internal/project_data.py @@ -3,6 +3,7 @@ # SPDX-License-Identifier: Apache-2.0 # """Objects representing Mbed program and library data.""" + import json import logging @@ -164,14 +165,19 @@ def from_existing(cls, root_path: Path, check_root_path_exists: bool = True) -> raise ValueError(f"This MbedOS copy does not contain a {TARGETS_JSON_FILE_PATH} file.") if root_path.exists() and not cmsis_mcu_descriptions_json_file.exists(): - raise ValueError(f"This MbedOS copy does not contain a " - f"{CMSIS_MCU_DESCRIPTIONS_JSON_FILE_PATH.name} file.") + raise ValueError(f"This MbedOS copy does not contain a {CMSIS_MCU_DESCRIPTIONS_JSON_FILE_PATH.name} file.") - return cls(root=root_path, targets_json_file=targets_json_file, - cmsis_mcu_descriptions_json_file=cmsis_mcu_descriptions_json_file) + return cls( + root=root_path, + targets_json_file=targets_json_file, + cmsis_mcu_descriptions_json_file=cmsis_mcu_descriptions_json_file, + ) @classmethod def from_new(cls, root_path: Path) -> "MbedOS": """Create MbedOS from an empty or new directory.""" - return cls(root=root_path, targets_json_file=root_path / TARGETS_JSON_FILE_PATH, - cmsis_mcu_descriptions_json_file=root_path / CMSIS_MCU_DESCRIPTIONS_JSON_FILE_PATH) + return cls( + root=root_path, + targets_json_file=root_path / TARGETS_JSON_FILE_PATH, + cmsis_mcu_descriptions_json_file=root_path / CMSIS_MCU_DESCRIPTIONS_JSON_FILE_PATH, + ) diff --git a/tools/python/mbed_tools/project/_internal/render_templates.py b/tools/python/mbed_tools/project/_internal/render_templates.py index cb8e1043a71..824b174cde6 100644 --- a/tools/python/mbed_tools/project/_internal/render_templates.py +++ b/tools/python/mbed_tools/project/_internal/render_templates.py @@ -3,6 +3,7 @@ # SPDX-License-Identifier: Apache-2.0 # """Render jinja templates required by the project package.""" + import datetime from pathlib import Path diff --git a/tools/python/mbed_tools/project/mbed_program.py b/tools/python/mbed_tools/project/mbed_program.py index cca36f356fc..084d5338f49 100644 --- a/tools/python/mbed_tools/project/mbed_program.py +++ b/tools/python/mbed_tools/project/mbed_program.py @@ -3,6 +3,7 @@ # SPDX-License-Identifier: Apache-2.0 # """Mbed Program abstraction layer.""" + import logging import pathlib @@ -68,7 +69,7 @@ def from_new(cls, dir_path: Path) -> "MbedProgram": @classmethod def from_existing( - cls, dir_path: Path, build_dir: Path, mbed_os_path: Path = None, check_mbed_os: bool = True, + cls, dir_path: Path, build_dir: Path, mbed_os_path: Path = None, check_mbed_os: bool = True ) -> "MbedProgram": """Create an MbedProgram from an existing program directory. diff --git a/tools/python/mbed_tools/project/project.py b/tools/python/mbed_tools/project/project.py index df7fedda850..8469dd2e2d7 100644 --- a/tools/python/mbed_tools/project/project.py +++ b/tools/python/mbed_tools/project/project.py @@ -3,6 +3,7 @@ # SPDX-License-Identifier: Apache-2.0 # """Defines the public API of the package.""" + import pathlib import logging diff --git a/tools/python/mbed_tools/sterm/terminal.py b/tools/python/mbed_tools/sterm/terminal.py index cfe5e85c97b..85149d48e88 100644 --- a/tools/python/mbed_tools/sterm/terminal.py +++ b/tools/python/mbed_tools/sterm/terminal.py @@ -12,6 +12,7 @@ To start the terminal clients should call the "run" function, this is the entry point to the module. """ + from typing import Any from serial import Serial diff --git a/tools/python/mbed_tools/targets/__init__.py b/tools/python/mbed_tools/targets/__init__.py index e6ba80a75b4..44ec67ecc98 100644 --- a/tools/python/mbed_tools/targets/__init__.py +++ b/tools/python/mbed_tools/targets/__init__.py @@ -20,14 +20,8 @@ For details about configuration of this module, look at `mbed_tools.targets.config`. """ + from mbed_tools.targets import exceptions -from mbed_tools.targets.get_target import ( - get_target_by_name, - get_target_by_board_type, -) -from mbed_tools.targets.get_board import ( - get_board_by_product_code, - get_board_by_online_id, - get_board_by_jlink_slug, -) +from mbed_tools.targets.get_target import get_target_by_name, get_target_by_board_type +from mbed_tools.targets.get_board import get_board_by_product_code, get_board_by_online_id, get_board_by_jlink_slug from mbed_tools.targets.board import Board diff --git a/tools/python/mbed_tools/targets/_internal/target_attributes.py b/tools/python/mbed_tools/targets/_internal/target_attributes.py index f3b519bb43c..6cefdc5a067 100644 --- a/tools/python/mbed_tools/targets/_internal/target_attributes.py +++ b/tools/python/mbed_tools/targets/_internal/target_attributes.py @@ -7,6 +7,7 @@ This information is parsed from the targets.json configuration file found in the mbed-os repo. """ + import logging import pathlib from typing import Dict, Any, Set, Optional @@ -69,7 +70,9 @@ def get_target_attributes(targets_json_data: dict, target_name: str, allow_non_p return target_attributes -def _extract_target_attributes(all_targets_data: Dict[str, Any], target_name: str, allow_non_public_targets: bool) -> dict: +def _extract_target_attributes( + all_targets_data: Dict[str, Any], target_name: str, allow_non_public_targets: bool +) -> dict: """Extracts the definition for a particular target from all the targets in targets.json. Args: @@ -88,7 +91,9 @@ def _extract_target_attributes(all_targets_data: Dict[str, Any], target_name: st # All target definitions are assumed to be public unless specifically set as public=false if not all_targets_data[target_name].get("public", True) and not allow_non_public_targets: - raise TargetNotFoundError(f"Cannot get attributes for {target_name} because it is marked non-public in targets JSON. This likely means you set MBED_TARGET to the name of the MCU rather than the name of the board.") + raise TargetNotFoundError( + f"Cannot get attributes for {target_name} because it is marked non-public in targets JSON. This likely means you set MBED_TARGET to the name of the MCU rather than the name of the board." + ) target_attributes = get_overriding_attributes_for_target(all_targets_data, target_name) accumulated_attributes = get_accumulating_attributes_for_target(all_targets_data, target_name) diff --git a/tools/python/mbed_tools/targets/_internal/targets_json_parsers/accumulating_attribute_parser.py b/tools/python/mbed_tools/targets/_internal/targets_json_parsers/accumulating_attribute_parser.py index 9b37cf41819..eadaa66a135 100644 --- a/tools/python/mbed_tools/targets/_internal/targets_json_parsers/accumulating_attribute_parser.py +++ b/tools/python/mbed_tools/targets/_internal/targets_json_parsers/accumulating_attribute_parser.py @@ -8,6 +8,7 @@ The hierarchy is also slightly different to the other fields as it is determined as 'breadth-first' in multiple inheritance, so targets at a lower level will always take precedence over targets at a higher level. """ + import itertools from collections import deque from typing import Dict, List, Any, Deque @@ -151,7 +152,6 @@ def _calculate_attribute_elements( """ accumulator = starting_state for target in reversed(applicable_accumulation_order): - add_modifier = f"{attribute_name}_add" if add_modifier in target: to_add = target[add_modifier] diff --git a/tools/python/mbed_tools/targets/_internal/targets_json_parsers/overriding_attribute_parser.py b/tools/python/mbed_tools/targets/_internal/targets_json_parsers/overriding_attribute_parser.py index 158f0f71036..214ba3e01bb 100644 --- a/tools/python/mbed_tools/targets/_internal/targets_json_parsers/overriding_attribute_parser.py +++ b/tools/python/mbed_tools/targets/_internal/targets_json_parsers/overriding_attribute_parser.py @@ -18,6 +18,7 @@ This means a target on a higher level could potentially override one on a lower level. """ + from collections import deque from functools import reduce from typing import Dict, List, Any, Deque, Set diff --git a/tools/python/mbed_tools/targets/board.py b/tools/python/mbed_tools/targets/board.py index 775e0b03338..a4b8539e772 100644 --- a/tools/python/mbed_tools/targets/board.py +++ b/tools/python/mbed_tools/targets/board.py @@ -3,6 +3,7 @@ # SPDX-License-Identifier: Apache-2.0 # """Representation of an Mbed-Enabled Development Board and related utilities.""" + from dataclasses import dataclass from typing import Tuple diff --git a/tools/python/mbed_tools/targets/boards.py b/tools/python/mbed_tools/targets/boards.py index ff9af22636f..3440af00258 100644 --- a/tools/python/mbed_tools/targets/boards.py +++ b/tools/python/mbed_tools/targets/boards.py @@ -3,6 +3,7 @@ # SPDX-License-Identifier: Apache-2.0 # """Interface to the Board Database.""" + import json from dataclasses import asdict diff --git a/tools/python/mbed_tools/targets/env.py b/tools/python/mbed_tools/targets/env.py index ddf4afc9f8b..205b443f6f2 100644 --- a/tools/python/mbed_tools/targets/env.py +++ b/tools/python/mbed_tools/targets/env.py @@ -18,6 +18,7 @@ Do not upload `.env` files containing private tokens to version control! If you use this package as a dependency of your project, please ensure to include the `.env` in your `.gitignore`. """ + import os import dotenv diff --git a/tools/python/mbed_tools/targets/get_board.py b/tools/python/mbed_tools/targets/get_board.py index 65c8dc6ba2f..53df7719f94 100644 --- a/tools/python/mbed_tools/targets/get_board.py +++ b/tools/python/mbed_tools/targets/get_board.py @@ -6,6 +6,7 @@ An instance of `mbed_tools.targets.board.Board` can be retrieved by calling one of the public functions. """ + import logging from enum import Enum from typing import Callable diff --git a/tools/python/mbed_tools/targets/get_target.py b/tools/python/mbed_tools/targets/get_target.py index 3cc7bd1984e..9df9182b3ea 100644 --- a/tools/python/mbed_tools/targets/get_target.py +++ b/tools/python/mbed_tools/targets/get_target.py @@ -7,6 +7,7 @@ An instance of `mbed_tools.targets.target.Target` can be retrieved by calling one of the public functions. """ + from mbed_tools.targets.exceptions import TargetError from mbed_tools.targets._internal import target_attributes diff --git a/tools/python/memap/__init__.py b/tools/python/memap/__init__.py index 2bae17afc88..04d33f049d1 100644 --- a/tools/python/memap/__init__.py +++ b/tools/python/memap/__init__.py @@ -1,4 +1,4 @@ # # Copyright (c) 2020-2023 Arm Limited and Contributors. All rights reserved. # SPDX-License-Identifier: Apache-2.0 -# \ No newline at end of file +# diff --git a/tools/python/memap/memap.py b/tools/python/memap/memap.py index df86bf7b95e..55a95b340a8 100644 --- a/tools/python/memap/memap.py +++ b/tools/python/memap/memap.py @@ -17,6 +17,7 @@ See the License for the specific language governing permissions and limitations under the License. """ + from __future__ import annotations """ @@ -53,8 +54,7 @@ from sys import stdout, exit, argv, path import sys from os import sep -from os.path import (basename, dirname, join, relpath, abspath, commonprefix, - splitext) +from os.path import basename, dirname, join, relpath, abspath, commonprefix, splitext import re import csv import json @@ -66,6 +66,7 @@ # prettytable moved this constant into an enum in the Python 3.9 release. if sys.version_info >= (3, 9): from prettytable import HRuleStyle + HEADER = HRuleStyle.HEADER else: from prettytable import HEADER @@ -78,11 +79,7 @@ ROOT = abspath(join(dirname(__file__), "..")) path.insert(0, ROOT) -from .utils import ( - argparse_filestring_type, - argparse_lowercase_hyphen_type, - argparse_uppercase_type -) # noqa: E402 +from .utils import argparse_filestring_type, argparse_lowercase_hyphen_type, argparse_uppercase_type # noqa: E402 @dataclasses.dataclass @@ -108,12 +105,24 @@ def contains_addr(self, addr: int) -> bool: class _Parser(ABC): """Internal interface for parsing""" - SECTIONS = ('.text', '.data', '.bss', '.heap', '.heap_0', '.stack') - MISC_FLASH_SECTIONS = ('.interrupts', '.flash_config') - OTHER_SECTIONS = ('.interrupts_ram', '.init', '.ARM.extab', - '.ARM.exidx', '.ARM.attributes', '.eh_frame', - '.init_array', '.fini_array', '.jcr', '.stab', - '.stabstr', '.ARM.exidx', '.ARM') + + SECTIONS = (".text", ".data", ".bss", ".heap", ".heap_0", ".stack") + MISC_FLASH_SECTIONS = (".interrupts", ".flash_config") + OTHER_SECTIONS = ( + ".interrupts_ram", + ".init", + ".ARM.extab", + ".ARM.exidx", + ".ARM.attributes", + ".eh_frame", + ".init_array", + ".fini_array", + ".jcr", + ".stab", + ".stabstr", + ".ARM.exidx", + ".ARM", + ) def __init__(self): self.modules: dict[str, dict[str, int]] = {} @@ -135,7 +144,9 @@ def _add_symbol_to_memory_banks(self, symbol_name: str, symbol_start_addr: int, for banks in self.memory_banks.values(): for bank_info in banks: if bank_info.contains_addr(symbol_start_addr): - if bank_info.contains_addr(end_addr - 1): # end_addr is the first address past the end of the symbol so we subtract 1 here + if bank_info.contains_addr( + end_addr - 1 + ): # end_addr is the first address past the end of the symbol so we subtract 1 here # Symbol fully inside this memory bank bank_info.used_size += size @@ -147,11 +158,15 @@ def _add_symbol_to_memory_banks(self, symbol_name: str, symbol_start_addr: int, first_addr_after_bank = bank_info.start_addr + bank_info.total_size bank_info.used_size += first_addr_after_bank - symbol_start_addr - print(f"Warning: Symbol {symbol_name} (at address 0x{symbol_start_addr:x}, size {size}) is not inside a " - f"defined memory bank for this target.") + print( + f"Warning: Symbol {symbol_name} (at address 0x{symbol_start_addr:x}, size {size}) is not inside a " + f"defined memory bank for this target." + ) - def add_symbol(self, symbol_name: str, object_name: str, start_addr: int, size: int, section: str, vma_lma_offset: int) -> None: - """ Adds information about a symbol (e.g. a function or global variable) to the data structures. + def add_symbol( + self, symbol_name: str, object_name: str, start_addr: int, size: int, section: str, vma_lma_offset: int + ) -> None: + """Adds information about a symbol (e.g. a function or global variable) to the data structures. Positional arguments: symbol_name - Descriptive name of the symbol, e.g. ".text.some_function" or "*fill*" @@ -195,11 +210,9 @@ def load_memory_banks_info(self, memory_banks_json_file: TextIO) -> None: memory_banks_json = json.load(memory_banks_json_file) for bank_type, banks in memory_banks_json["configured_memory_banks"].items(): for bank_name, bank_data in banks.items(): - self.memory_banks[bank_type].append(MemoryBankInfo( - name=bank_name, - start_addr=bank_data["start"], - total_size=bank_data["size"] - )) + self.memory_banks[bank_type].append( + MemoryBankInfo(name=bank_name, start_addr=bank_data["start"], total_size=bank_data["size"]) + ) @abstractmethod def parse_mapfile(self, file_desc: TextIO) -> dict[str, dict[str, int]]: @@ -215,13 +228,11 @@ def parse_mapfile(self, file_desc: TextIO) -> dict[str, dict[str, int]]: class _GccParser(_Parser): - RE_OBJECT_FILE = re.compile(r'^(.+\/.+\.o(bj)?)$') - RE_LIBRARY_OBJECT = re.compile( - r'^.*' + r''.format(sep) + r'lib((.+\.a)\((.+\.o(bj)?)\))$' - ) - RE_STD_SECTION = re.compile(r'^\s+.*0x(\w{8,16})\s+0x(\w+)\s(.+)$') - RE_FILL_SECTION = re.compile(r'^\s*\*fill\*\s+0x(\w{8,16})\s+0x(\w+).*$') - RE_TRANS_FILE = re.compile(r'^(.+\/|.+\.ltrans.o(bj)?)$') + RE_OBJECT_FILE = re.compile(r"^(.+\/.+\.o(bj)?)$") + RE_LIBRARY_OBJECT = re.compile(r"^.*" + r"".format(sep) + r"lib((.+\.a)\((.+\.o(bj)?)\))$") + RE_STD_SECTION = re.compile(r"^\s+.*0x(\w{8,16})\s+0x(\w+)\s(.+)$") + RE_FILL_SECTION = re.compile(r"^\s*\*fill\*\s+0x(\w{8,16})\s+0x(\w+).*$") + RE_TRANS_FILE = re.compile(r"^(.+\/|.+\.ltrans.o(bj)?)$") OBJECT_EXTENSIONS = (".o", ".obj") # Parses a line beginning a new output section in the map file that has a load address @@ -230,7 +241,9 @@ class _GccParser(_Parser): # 2 = in-memory address, hex, no 0x # 3 = section size # 4 = load address, i.e. where is the data for this section stored in flash - RE_OUTPUT_SECTION_WITH_LOAD_ADDRESS = re.compile(r'^(.\w+) +0x([0-9a-f]+) +0x([0-9a-f]+) +load address +0x([0-9a-f]+)') + RE_OUTPUT_SECTION_WITH_LOAD_ADDRESS = re.compile( + r"^(.\w+) +0x([0-9a-f]+) +0x([0-9a-f]+) +load address +0x([0-9a-f]+)" + ) # Parses a line beginning a new output section in the map file does not have a load address # Groups: @@ -238,23 +251,18 @@ class _GccParser(_Parser): # 2 = in-memory address, hex, no 0x # 3 = section size # 4 = load address, i.e. where is the data for this section stored in flash - RE_OUTPUT_SECTION_NO_LOAD_ADDRESS = re.compile(r'^(.\w+) +0x([0-9a-f]+) +0x([0-9a-f]+)') + RE_OUTPUT_SECTION_NO_LOAD_ADDRESS = re.compile(r"^(.\w+) +0x([0-9a-f]+) +0x([0-9a-f]+)") # Gets the input section name from the line, if it exists. # Input section names are always indented 1 space. # Note: This allows up to 3 dots... hopefully that's enough... # It can also capture "*fill*" instead of something that looks like a section name. - RE_INPUT_SECTION_NAME = re.compile(r'^ ((?:\.\w+\.?\w*\.?\w*)|(?:\*fill\*))') + RE_INPUT_SECTION_NAME = re.compile(r"^ ((?:\.\w+\.?\w*\.?\w*)|(?:\*fill\*))") - ALL_SECTIONS = ( - _Parser.SECTIONS - + _Parser.OTHER_SECTIONS - + _Parser.MISC_FLASH_SECTIONS - + ('unknown', ) - ) + ALL_SECTIONS = _Parser.SECTIONS + _Parser.OTHER_SECTIONS + _Parser.MISC_FLASH_SECTIONS + ("unknown",) def check_new_output_section(self, line: str) -> tuple[str, int] | None: - """ Check whether a new output section in a map file has been detected + """Check whether a new output section in a map file has been detected Positional arguments: line - the line to check for a new section @@ -290,7 +298,7 @@ def check_new_output_section(self, line: str) -> tuple[str, int] | None: return section_name, load_addr_offset def check_input_section(self, line) -> Optional[str]: - """ Check whether a new input section in a map file has been detected. + """Check whether a new input section in a map file has been detected. Positional arguments: line - the line to check for a new section @@ -304,7 +312,7 @@ def check_input_section(self, line) -> Optional[str]: return match.group(1) def parse_object_name(self, line: str) -> str: - """ Parse a path to object file + """Parse a path to object file Positional arguments: line - the path to parse the object and module name from @@ -312,7 +320,7 @@ def parse_object_name(self, line: str) -> str: return value - an object file name """ if re.match(self.RE_TRANS_FILE, line): - return '[misc]' + return "[misc]" test_re_mbed_os_name = re.match(self.RE_OBJECT_FILE, line) @@ -320,27 +328,22 @@ def parse_object_name(self, line: str) -> str: object_name = test_re_mbed_os_name.group(1) # corner case: certain objects are provided by the GCC toolchain - if 'arm-none-eabi' in line: - return join('[lib]', 'misc', basename(object_name)) + if "arm-none-eabi" in line: + return join("[lib]", "misc", basename(object_name)) return object_name else: test_re_obj_name = re.match(self.RE_LIBRARY_OBJECT, line) if test_re_obj_name: - return join('[lib]', test_re_obj_name.group(2), - test_re_obj_name.group(3)) + return join("[lib]", test_re_obj_name.group(2), test_re_obj_name.group(3)) else: - if ( - not line.startswith("LONG") and - not line.startswith("linker stubs") - ): - print("Unknown object name found in GCC map file: %s" - % line) - return '[misc]' + if not line.startswith("LONG") and not line.startswith("linker stubs"): + print("Unknown object name found in GCC map file: %s" % line) + return "[misc]" def parse_section(self, line: str) -> tuple[str, int, int]: - """ Parse data from a section of gcc map file describing one symbol in the code. + """Parse data from a section of gcc map file describing one symbol in the code. examples: 0x00004308 0x7c ./BUILD/K64F/GCC_ARM/spi_api.o @@ -353,7 +356,7 @@ def parse_section(self, line: str) -> tuple[str, int, int]: """ is_fill = re.match(self.RE_FILL_SECTION, line) if is_fill: - o_name = '[fill]' + o_name = "[fill]" o_start_addr = int(is_fill.group(1), 16) o_size = int(is_fill.group(2), 16) return o_name, o_start_addr, o_size @@ -369,7 +372,7 @@ def parse_section(self, line: str) -> tuple[str, int, int]: return "", 0, 0 def parse_mapfile(self, file_desc: TextIO) -> dict[str, dict[str, int]]: - """ Main logic to decode gcc map files + """Main logic to decode gcc map files Positional arguments: file_desc - a stream object to parse as a gcc map file @@ -378,13 +381,13 @@ def parse_mapfile(self, file_desc: TextIO) -> dict[str, dict[str, int]]: # GCC can put the section/symbol info on its own line or on the same line as the size and address. # So since this is a line oriented parser, we have to remember the most recently seen input & output # section name for later. - current_output_section = 'unknown' + current_output_section = "unknown" current_output_section_addr_offset = 0 - current_input_section = 'unknown' + current_input_section = "unknown" with file_desc as infile: for line in infile: - if line.startswith('Linker script and memory map'): + if line.startswith("Linker script and memory map"): break for line in infile: @@ -405,14 +408,20 @@ def parse_mapfile(self, file_desc: TextIO) -> dict[str, dict[str, int]]: # With GCC at least, the closest we can get to a descriptive symbol name is the input section # name. Thanks to the -ffunction-sections and -fdata-sections options, the section names should # be unique for each symbol. - self.add_symbol(current_input_section, symbol_name, symbol_start_addr, symbol_size, current_output_section, current_output_section_addr_offset) - - common_prefix = dirname(commonprefix([ - o for o in self.modules.keys() - if ( - o.endswith(self.OBJECT_EXTENSIONS) - and not o.startswith("[lib]") - )])) + self.add_symbol( + current_input_section, + symbol_name, + symbol_start_addr, + symbol_size, + current_output_section, + current_output_section_addr_offset, + ) + + common_prefix = dirname( + commonprefix( + [o for o in self.modules.keys() if (o.endswith(self.OBJECT_EXTENSIONS) and not o.startswith("[lib]"))] + ) + ) new_modules = {} for name, stats in self.modules.items(): if name.startswith("[lib]"): @@ -429,8 +438,8 @@ class MemapParser(object): and writes out different file types of memory results """ - print_sections = ('.text', '.data', '.bss') - delta_sections = ('.text-delta', '.data-delta', '.bss-delta') + print_sections = (".text", ".data", ".bss") + delta_sections = (".text-delta", ".data-delta", ".bss-delta") # sections to print info (generic for all toolchains) sections = _Parser.SECTIONS @@ -480,29 +489,29 @@ def reduce_depth(self, depth): self.short_modules = dict() for module_name, v in self.modules.items(): split_name = module_name.split(sep) - if split_name[0] == '': + if split_name[0] == "": split_name = split_name[1:] new_name = join(*split_name[:depth]) self.short_modules.setdefault(new_name, defaultdict(int)) for section_idx, value in v.items(): self.short_modules[new_name][section_idx] += value - delta_name = section_idx + '-delta' + delta_name = section_idx + "-delta" self.short_modules[new_name][delta_name] += value if self.old_modules: for module_name, v in self.old_modules.items(): split_name = module_name.split(sep) - if split_name[0] == '': + if split_name[0] == "": split_name = split_name[1:] new_name = join(*split_name[:depth]) self.short_modules.setdefault(new_name, defaultdict(int)) for section_idx, value in v.items(): - delta_name = section_idx + '-delta' + delta_name = section_idx + "-delta" self.short_modules[new_name][delta_name] -= value export_formats = ["json", "csv-ci", "html", "table"] def generate_output(self, export_format, depth, file_output=None): - """ Generates summary of memory map data + """Generates summary of memory map data Positional arguments: export_format - the format to dump @@ -518,17 +527,19 @@ def generate_output(self, export_format, depth, file_output=None): self.compute_report() try: if file_output: - file_desc = open(file_output, 'w') + file_desc = open(file_output, "w") else: file_desc = stdout except IOError as error: print("I/O error({0}): {1}".format(error.errno, error.strerror)) return False - to_call = {'json': self.generate_json, - 'html': self.generate_html, - 'csv-ci': self.generate_csv, - 'table': self.generate_table}[export_format] + to_call = { + "json": self.generate_json, + "html": self.generate_html, + "csv-ci": self.generate_csv, + "table": self.generate_table, + }[export_format] output = to_call(file_desc) if file_desc is not stdout: @@ -563,18 +574,18 @@ def generate_html(self, file_desc): modules = name.split(sep) while True: try: - cur_text["value"] += dct['.text'] - cur_text["delta"] += dct['.text'] + cur_text["value"] += dct[".text"] + cur_text["delta"] += dct[".text"] except KeyError: pass try: - cur_bss["value"] += dct['.bss'] - cur_bss["delta"] += dct['.bss'] + cur_bss["value"] += dct[".bss"] + cur_bss["delta"] += dct[".bss"] except KeyError: pass try: - cur_data["value"] += dct['.data'] - cur_data["delta"] += dct['.data'] + cur_data["value"] += dct[".data"] + cur_data["delta"] += dct[".data"] except KeyError: pass if not modules: @@ -591,24 +602,21 @@ def generate_html(self, file_desc): modules = name.split(sep) while True: try: - cur_text["delta"] -= dct['.text'] + cur_text["delta"] -= dct[".text"] except KeyError: pass try: - cur_bss["delta"] -= dct['.bss'] + cur_bss["delta"] -= dct[".bss"] except KeyError: pass try: - cur_data["delta"] -= dct['.data'] + cur_data["delta"] -= dct[".data"] except KeyError: pass if not modules: break next_module = modules.pop(0) - if not any( - cld['name'] == next_module - for cld in cur_text['children'] - ): + if not any(cld["name"] == next_module for cld in cur_text["children"]): break cur_text = self._move_up_tree(cur_text, next_module) cur_data = self._move_up_tree(cur_data, next_module) @@ -618,18 +626,17 @@ def generate_html(self, file_desc): "name": "ROM", "value": tree_text["value"] + tree_data["value"], "delta": tree_text["delta"] + tree_data["delta"], - "children": [tree_text, tree_data] + "children": [tree_text, tree_data], } tree_ram = { "name": "RAM", "value": tree_bss["value"] + tree_data["value"], "delta": tree_bss["delta"] + tree_data["delta"], - "children": [tree_bss, tree_data] + "children": [tree_bss, tree_data], } jinja_loader = FileSystemLoader(dirname(abspath(__file__))) - jinja_environment = Environment(loader=jinja_loader, - undefined=StrictUndefined) + jinja_environment = Environment(loader=jinja_loader, undefined=StrictUndefined) template = jinja_environment.get_template("memap_flamegraph.html") name, _ = splitext(basename(file_desc.name)) @@ -637,11 +644,7 @@ def generate_html(self, file_desc): name = name[:-4] if self.tc_name: name = "%s %s" % (name, self.tc_name) - data = { - "name": name, - "rom": json.dumps(tree_rom), - "ram": json.dumps(tree_ram), - } + data = {"name": name, "rom": json.dumps(tree_rom), "ram": json.dumps(tree_ram)} file_desc.write(template.render(data)) return None @@ -652,16 +655,12 @@ def generate_json(self, file_desc): file_desc - the file to write out the final report to """ file_desc.write(json.dumps(self.mem_report, indent=4)) - file_desc.write('\n') + file_desc.write("\n") return None - RAM_FORMAT_STR = ( - "Total Static RAM memory (data + bss): {}({:+}) bytes\n" - ) + RAM_FORMAT_STR = "Total Static RAM memory (data + bss): {}({:+}) bytes\n" - ROM_FORMAT_STR = ( - "Total Flash memory (text + data): {}({:+}) bytes\n" - ) + ROM_FORMAT_STR = "Total Flash memory (text + data): {}({:+}) bytes\n" def generate_csv(self, file_desc: TextIO) -> None: """Generate a CSV file from a memoy map @@ -669,8 +668,7 @@ def generate_csv(self, file_desc: TextIO) -> None: Positional arguments: file_desc - the file to write out the final report to """ - writer = csv.writer(file_desc, delimiter=',', - quoting=csv.QUOTE_MINIMAL) + writer = csv.writer(file_desc, delimiter=",", quoting=csv.QUOTE_MINIMAL) module_section = [] sizes = [] @@ -679,11 +677,11 @@ def generate_csv(self, file_desc: TextIO) -> None: module_section.append((i + k)) sizes += [self.short_modules[i][k]] - module_section.append('static_ram') - sizes.append(self.mem_summary['static_ram']) + module_section.append("static_ram") + sizes.append(self.mem_summary["static_ram"]) - module_section.append('total_flash') - sizes.append(self.mem_summary['total_flash']) + module_section.append("total_flash") + sizes.append(self.mem_summary["total_flash"]) writer.writerow(module_section) writer.writerow(sizes) @@ -695,54 +693,46 @@ def generate_table(self, file_desc): Returns: string of the generated table """ # Create table - columns = ['Module'] + columns = ["Module"] columns.extend(self.print_sections) table = PrettyTable(columns, junction_char="|", hrules=HEADER) table.align["Module"] = "l" for col in self.print_sections: - table.align[col] = 'r' + table.align[col] = "r" for i in list(self.print_sections): - table.align[i] = 'r' + table.align[i] = "r" for i in sorted(self.short_modules): row = [i] for k in self.print_sections: - row.append("{}({:+})".format( - self.short_modules[i][k], - self.short_modules[i][k + "-delta"] - )) + row.append("{}({:+})".format(self.short_modules[i][k], self.short_modules[i][k + "-delta"])) table.add_row(row) - subtotal_row = ['Subtotals'] + subtotal_row = ["Subtotals"] for k in self.print_sections: - subtotal_row.append("{}({:+})".format( - self.subtotal[k], self.subtotal[k + '-delta'])) + subtotal_row.append("{}({:+})".format(self.subtotal[k], self.subtotal[k + "-delta"])) table.add_row(subtotal_row) output = table.get_string() - output += '\n' + output += "\n" - output += self.RAM_FORMAT_STR.format( - self.mem_summary['static_ram'], - self.mem_summary['static_ram_delta'] - ) - output += self.ROM_FORMAT_STR.format( - self.mem_summary['total_flash'], - self.mem_summary['total_flash_delta'] - ) + output += self.RAM_FORMAT_STR.format(self.mem_summary["static_ram"], self.mem_summary["static_ram_delta"]) + output += self.ROM_FORMAT_STR.format(self.mem_summary["total_flash"], self.mem_summary["total_flash_delta"]) - output += '\n' + output += "\n" for bank_type, banks in self.memory_banks.items(): for bank_info in banks: this_bank_deltas = self.memory_bank_summary[bank_type][bank_info.name] - output += (f"{bank_type} Bank {bank_info.name}: {bank_info.used_size}({this_bank_deltas['delta_bytes_used']:+})/" - f"{bank_info.total_size} bytes used, " - f"{this_bank_deltas['percent_used']:.01f}% ({this_bank_deltas['delta_percent_used']:+.01f}%) used\n") + output += ( + f"{bank_type} Bank {bank_info.name}: {bank_info.used_size}({this_bank_deltas['delta_bytes_used']:+})/" + f"{bank_info.total_size} bytes used, " + f"{this_bank_deltas['percent_used']:.01f}% ({this_bank_deltas['delta_percent_used']:+.01f}%) used\n" + ) return output @@ -758,42 +748,35 @@ def compute_report(self): for mod in self.modules.values(): for k in self.sections: self.subtotal[k] += mod[k] - self.subtotal[k + '-delta'] += mod[k] + self.subtotal[k + "-delta"] += mod[k] if self.old_modules: for mod in self.old_modules.values(): for k in self.sections: - self.subtotal[k + '-delta'] -= mod[k] + self.subtotal[k + "-delta"] -= mod[k] self.mem_summary = { - 'static_ram': self.subtotal['.data'] + self.subtotal['.bss'], - 'static_ram_delta': - self.subtotal['.data-delta'] + self.subtotal['.bss-delta'], - 'total_flash': (self.subtotal['.text'] + self.subtotal['.data']), - 'total_flash_delta': - self.subtotal['.text-delta'] + self.subtotal['.data-delta'], + "static_ram": self.subtotal[".data"] + self.subtotal[".bss"], + "static_ram_delta": self.subtotal[".data-delta"] + self.subtotal[".bss-delta"], + "total_flash": (self.subtotal[".text"] + self.subtotal[".data"]), + "total_flash_delta": self.subtotal[".text-delta"] + self.subtotal[".data-delta"], } self.mem_report = {} modules = [] if self.short_modules: for name, sizes in sorted(self.short_modules.items()): - modules.append({ - "module": name, - "size": { - k: sizes.get(k, 0) for k in (self.print_sections + - self.delta_sections) - } - }) + modules.append( + {"module": name, "size": {k: sizes.get(k, 0) for k in (self.print_sections + self.delta_sections)}} + ) self.mem_report["modules"] = modules self.mem_report["summary"] = self.mem_summary # Calculate the delta sizes for each memory bank in a couple different formats - self.memory_bank_summary: dict[str, dict[str, dict[str, float|int]]] = {} + self.memory_bank_summary: dict[str, dict[str, dict[str, float | int]]] = {} for bank_type, banks in self.memory_banks.items(): self.memory_bank_summary[bank_type] = {} for bank_info in banks: - this_bank_info = {} # Find matching memory bank in old memory banks. Compare by name as it would be possible @@ -807,16 +790,18 @@ def compute_report(self): this_bank_info["bytes_used"] = bank_info.used_size this_bank_info["total_size"] = bank_info.total_size - this_bank_info["delta_bytes_used"] = 0 if old_bank_info is None else bank_info.used_size - old_bank_info.used_size - this_bank_info["percent_used"] = 100 * bank_info.used_size/bank_info.total_size - this_bank_info["delta_percent_used"] = 100 * this_bank_info["delta_bytes_used"]/bank_info.total_size + this_bank_info["delta_bytes_used"] = ( + 0 if old_bank_info is None else bank_info.used_size - old_bank_info.used_size + ) + this_bank_info["percent_used"] = 100 * bank_info.used_size / bank_info.total_size + this_bank_info["delta_percent_used"] = 100 * this_bank_info["delta_bytes_used"] / bank_info.total_size self.memory_bank_summary[bank_type][bank_info.name] = this_bank_info self.mem_report["memory_bank_usage"] = self.memory_bank_summary def parse(self, mapfile: str, toolchain: str, memory_banks_json_path: str | None) -> bool: - """ Parse and decode map file depending on the toolchain + """Parse and decode map file depending on the toolchain Positional arguments: mapfile - the file name of the memory map file @@ -831,15 +816,15 @@ def parse(self, mapfile: str, toolchain: str, memory_banks_json_path: str | None old_map_parser = parser_class() if memory_banks_json_path is not None: - with open(memory_banks_json_path, 'r') as memory_banks_json_file: + with open(memory_banks_json_path, "r") as memory_banks_json_file: parser.load_memory_banks_info(memory_banks_json_file) try: - with open(mapfile, 'r') as file_input: + with open(mapfile, "r") as file_input: self.modules = parser.parse_mapfile(file_input) self.memory_banks = parser.memory_banks try: - with open("%s.old" % mapfile, 'r') as old_input: + with open("%s.old" % mapfile, "r") as old_input: self.old_modules = old_map_parser.parse_mapfile(old_input) self.old_memory_banks = old_map_parser.memory_banks except IOError: @@ -854,43 +839,46 @@ def parse(self, mapfile: str, toolchain: str, memory_banks_json_path: str | None def main(): """Entry Point""" - version = '1.0.0' + version = "1.0.0" # Parser handling - parser = ArgumentParser( - description="Memory Map File Analyser for ARM mbed\nversion %s" % - version) + parser = ArgumentParser(description="Memory Map File Analyser for ARM mbed\nversion %s" % version) - parser.add_argument( - 'file', type=argparse_filestring_type, help='memory map file') + parser.add_argument("file", type=argparse_filestring_type, help="memory map file") parser.add_argument( - '-t', '--toolchain', dest='toolchain', - help='select a toolchain used to build the memory map file (%s)' % - ", ".join(MemapParser.toolchains), + "-t", + "--toolchain", + dest="toolchain", + help="select a toolchain used to build the memory map file (%s)" % ", ".join(MemapParser.toolchains), required=True, - type=argparse_uppercase_type(MemapParser.toolchains, "toolchain")) + type=argparse_uppercase_type(MemapParser.toolchains, "toolchain"), + ) parser.add_argument( - '-d', '--depth', dest='depth', type=int, - help='specify directory depth level to display report', required=False) + "-d", "--depth", dest="depth", type=int, help="specify directory depth level to display report", required=False + ) - parser.add_argument( - '-o', '--output', help='output file name', required=False) + parser.add_argument("-o", "--output", help="output file name", required=False) parser.add_argument( - '-e', '--export', dest='export', required=False, default='table', - type=argparse_lowercase_hyphen_type(MemapParser.export_formats, - 'export format'), - help="export format (examples: %s: default)" % - ", ".join(MemapParser.export_formats)) + "-e", + "--export", + dest="export", + required=False, + default="table", + type=argparse_lowercase_hyphen_type(MemapParser.export_formats, "export format"), + help="export format (examples: %s: default)" % ", ".join(MemapParser.export_formats), + ) - parser.add_argument('-v', '--version', action='version', version=version) + parser.add_argument("-v", "--version", action="version", version=version) parser.add_argument( - '-m', '--memory-banks-json', + "-m", + "--memory-banks-json", type=argparse_filestring_type, - help='Path to memory bank JSON file. If passed, memap will track the used space in each memory bank.') + help="Path to memory bank JSON file. If passed, memap will track the used space in each memory bank.", + ) # Parse/run command if len(argv) <= 1: @@ -915,15 +903,11 @@ def main(): returned_string = None # Write output in file if args.output is not None: - returned_string = memap.generate_output( - args.export, - depth, - args.output - ) + returned_string = memap.generate_output(args.export, depth, args.output) else: # Write output in screen returned_string = memap.generate_output(args.export, depth) - if args.export == 'table' and returned_string: + if args.export == "table" and returned_string: print(returned_string) exit(0) diff --git a/tools/python/memap/utils.py b/tools/python/memap/utils.py index 378b15b699c..6c0ee2f35f7 100644 --- a/tools/python/memap/utils.py +++ b/tools/python/memap/utils.py @@ -15,6 +15,7 @@ See the License for the specific language governing permissions and limitations under the License. """ + from __future__ import print_function, division, absolute_import import sys import inspect @@ -38,6 +39,7 @@ def remove_if_in(lst, thing): if thing in lst: lst.remove(thing) + def compile_worker(job): """Standard task runner used for compiling @@ -46,29 +48,20 @@ def compile_worker(job): to run_cmd """ results = [] - for command in job['commands']: + for command in job["commands"]: try: - _, _stderr, _rc = run_cmd(command, work_dir=job['work_dir'], - chroot=job['chroot']) + _, _stderr, _rc = run_cmd(command, work_dir=job["work_dir"], chroot=job["chroot"]) except KeyboardInterrupt: raise ToolException - results.append({ - 'code': _rc, - 'output': _stderr, - 'command': command - }) + results.append({"code": _rc, "output": _stderr, "command": command}) + + return {"source": job["source"], "object": job["object"], "commands": job["commands"], "results": results} - return { - 'source': job['source'], - 'object': job['object'], - 'commands': job['commands'], - 'results': results - } def cmd(command, check=True, verbose=False, shell=False, cwd=None): """A wrapper to run a command as a blocking job""" - text = command if shell else ' '.join(command) + text = command if shell else " ".join(command) if verbose: print(text) return_code = call(command, shell=shell, cwd=cwd) @@ -89,30 +82,28 @@ def run_cmd(command, work_dir=None, chroot=None, redirect=False): """ if chroot: # Conventions managed by the web team for the mbed.org build system - chroot_cmd = [ - '/usr/sbin/chroot', '--userspec=33:33', chroot - ] + chroot_cmd = ["/usr/sbin/chroot", "--userspec=33:33", chroot] for element in command: - chroot_cmd += [element.replace(chroot, '')] + chroot_cmd += [element.replace(chroot, "")] - logging.debug("Running command %s", ' '.join(chroot_cmd)) + logging.debug("Running command %s", " ".join(chroot_cmd)) command = chroot_cmd work_dir = None try: - process = Popen(command, stdout=PIPE, - stderr=STDOUT if redirect else PIPE, cwd=work_dir, - universal_newlines=True) + process = Popen( + command, stdout=PIPE, stderr=STDOUT if redirect else PIPE, cwd=work_dir, universal_newlines=True + ) _stdout, _stderr = process.communicate() except OSError: - print("[OS ERROR] Command: "+(' '.join(command))) + print("[OS ERROR] Command: " + (" ".join(command))) raise return _stdout, _stderr, process.returncode def run_cmd_ext(command): - """ A version of run command that checks if the command exists befor running + """A version of run command that checks if the command exists befor running Positional arguments: command - the command line you are trying to invoke @@ -124,7 +115,7 @@ def run_cmd_ext(command): def is_cmd_valid(command): - """ Verify that a command exists and is executable + """Verify that a command exists and is executable Positional arguments: command - the command to check @@ -134,8 +125,7 @@ def is_cmd_valid(command): if not cmd_path: error("%s: Command '%s' can't be found" % (caller, command)) if not is_exec(cmd_path): - error("%s: Command '%s' resolves to file '%s' which is not executable" - % (caller, command, cmd_path)) + error("%s: Command '%s' resolves to file '%s' which is not executable" % (caller, command, cmd_path)) return True @@ -145,30 +135,29 @@ def is_exec(path): Positional arguments: path - the executable """ - return os.access(path, os.X_OK) or os.access(path+'.exe', os.X_OK) + return os.access(path, os.X_OK) or os.access(path + ".exe", os.X_OK) def find_cmd_abspath(command): - """ Returns the absolute path to a command. + """Returns the absolute path to a command. None is returned if no absolute path was found. Positional arguhments: command - the command to find the path of """ - if exists(command) or exists(command + '.exe'): + if exists(command) or exists(command + ".exe"): return os.path.abspath(command) - if not 'PATH' in os.environ: - raise Exception("Can't find command path for current platform ('%s')" - % sys.platform) - path_env = os.environ['PATH'] + if not "PATH" in os.environ: + raise Exception("Can't find command path for current platform ('%s')" % sys.platform) + path_env = os.environ["PATH"] for path in path_env.split(os.pathsep): - cmd_path = '%s/%s' % (path, command) - if exists(cmd_path) or exists(cmd_path + '.exe'): + cmd_path = "%s/%s" % (path, command) + if exists(cmd_path) or exists(cmd_path + ".exe"): return cmd_path def mkdir(path): - """ a wrapped makedirs that only tries to create a directory if it does not + """a wrapped makedirs that only tries to create a directory if it does not exist already Positional arguments: @@ -192,7 +181,7 @@ def write_json_to_file(json_data, file_name): mkdir(test_spec_dir) try: - with open(file_name, 'w') as f: + with open(file_name, "w") as f: f.write(json.dumps(json_data, indent=2)) except IOError as e: print("[ERROR] Error writing test spec to file") @@ -200,7 +189,7 @@ def write_json_to_file(json_data, file_name): def copy_file(src, dst): - """ Implement the behaviour of "shutil.copy(src, dst)" without copying the + """Implement the behaviour of "shutil.copy(src, dst)" without copying the permissions (this was causing errors with directories mounted with samba) Positional arguments: @@ -214,7 +203,7 @@ def copy_file(src, dst): def copy_when_different(src, dst): - """ Only copy the file when it's different from its destination. + """Only copy the file when it's different from its destination. Positional arguments: src - the source of the copy operation @@ -224,14 +213,14 @@ def copy_when_different(src, dst): _, base = split(src) dst = join(dst, base) if exists(dst): - with open(src, 'rb') as srcfd, open(dst, 'rb') as dstfd: + with open(src, "rb") as srcfd, open(dst, "rb") as dstfd: if srcfd.read() == dstfd.read(): return copyfile(src, dst) def delete_dir_files(directory): - """ A function that does rm -rf + """A function that does rm -rf Positional arguments: directory - the directory to remove @@ -277,27 +266,34 @@ def rel_path(path, base, dot=False): dot - if True, the path will always start with a './' """ final_path = relpath(path, base) - if dot and not final_path.startswith('.'): - final_path = './' + final_path + if dot and not final_path.startswith("."): + final_path = "./" + final_path return final_path class ToolException(Exception): """A class representing an exception throw by the tools""" + pass + class NotSupportedException(Exception): """A class a toolchain not supporting a particular target""" + pass + class InvalidReleaseTargetException(Exception): pass + class NoValidToolchainException(Exception): """A class representing no valid toolchain configurations found on the system""" + pass + def split_path(path): """spilt a file name into it's directory name, base name, and extension @@ -310,7 +306,7 @@ def split_path(path): def get_path_depth(path): - """ Given a path, return the number of directory levels present. + """Given a path, return the number of directory levels present. This roughly translates to the number of path separators (os.sep) + 1. Ex. Given "path/to/dir", this would return 3 Special cases: "." and "/" return 0 @@ -322,7 +318,7 @@ def get_path_depth(path): path_depth = 0 head, tail = split(normalized_path) - while tail and tail != '.': + while tail and tail != ".": path_depth += 1 head, tail = split(head) @@ -336,24 +332,25 @@ def args_error(parser, message): parser - the ArgumentParser object that parsed the command line message - what went wrong """ - parser.exit(status=2, message=message+'\n') + parser.exit(status=2, message=message + "\n") def construct_enum(**enums): - """ Create your own pseudo-enums + """Create your own pseudo-enums Keyword arguments: * - a member of the Enum you are creating and it's value """ - return type('Enum', (), enums) + return type("Enum", (), enums) def check_required_modules(required_modules, verbose=True): - """ Function checks for Python modules which should be "importable" - before test suite can be used. - @return returns True if all modules are installed already + """Function checks for Python modules which should be "importable" + before test suite can be used. + @return returns True if all modules are installed already """ import imp + not_installed_modules = [] for module_name in required_modules: try: @@ -369,9 +366,10 @@ def check_required_modules(required_modules, verbose=True): if verbose: if not_installed_modules: - print("Warning: Module(s) %s not installed. Please install " - "required module(s) before using this script." - % (', '.join(not_installed_modules))) + print( + "Warning: Module(s) %s not installed. Please install " + "required module(s) before using this script." % (", ".join(not_installed_modules)) + ) if not_installed_modules: return False @@ -388,16 +386,14 @@ def _ordered_dict_collapse_dups(pair_list): elif isinstance(to_ret[key], list): to_ret[key].extend(value) else: - raise ValueError( - "Key %s found twice and is not mergeable" % key - ) + raise ValueError("Key %s found twice and is not mergeable" % key) else: to_ret[key] = value return to_ret def json_file_to_dict(fname): - """ Read a JSON file and return its Python representation, transforming all + """Read a JSON file and return its Python representation, transforming all the strings from Unicode to ASCII. The order of keys in the JSON file is preserved. @@ -405,20 +401,18 @@ def json_file_to_dict(fname): fname - the name of the file to parse """ try: - with io.open(fname, encoding='ascii', - errors='ignore') as file_obj: - return json.load( - file_obj, object_pairs_hook=_ordered_dict_collapse_dups - ) + with io.open(fname, encoding="ascii", errors="ignore") as file_obj: + return json.load(file_obj, object_pairs_hook=_ordered_dict_collapse_dups) except (ValueError, IOError) as e: sys.stderr.write("Error parsing '%s': %s\n" % (fname, e)) raise + # Wowza, double closure def argparse_type(casedness, prefer_hyphen=False): def middle(lst, type_name): def parse_type(string): - """ validate that an argument passed in (as string) is a member of + """validate that an argument passed in (as string) is a member of the list of possible arguments. Offer a suggestion if the case of the string, or the hyphens/underscores do not match the expected style of the argument. @@ -433,66 +427,79 @@ def parse_type(string): return string elif string not in lst and newstring in lst: raise argparse.ArgumentTypeError( - "{0} is not a supported {1}. Did you mean {2}?".format( - string, type_name, newstring)) + "{0} is not a supported {1}. Did you mean {2}?".format(string, type_name, newstring) + ) else: raise argparse.ArgumentTypeError( - "{0} is not a supported {1}. Supported {1}s are:\n{2}". - format(string, type_name, columnate(lst))) + "{0} is not a supported {1}. Supported {1}s are:\n{2}".format(string, type_name, columnate(lst)) + ) + return parse_type + return middle + # short cuts for the argparse_type versions argparse_uppercase_type = argparse_type(str.upper, False) argparse_lowercase_type = argparse_type(str.lower, False) argparse_uppercase_hyphen_type = argparse_type(str.upper, True) argparse_lowercase_hyphen_type = argparse_type(str.lower, True) + def argparse_force_type(case): - """ validate that an argument passed in (as string) is a member of the list + """validate that an argument passed in (as string) is a member of the list of possible arguments after converting it's case. """ + def middle(lst, type_name): - """ The parser type generator""" + """The parser type generator""" if not isinstance(lst[0], str): lst = [o.decode() for o in lst] + def parse_type(string): - """ The parser type""" + """The parser type""" if not isinstance(string, str): string = string.decode() for option in lst: if case(string) == case(option): return option raise argparse.ArgumentTypeError( - "{0} is not a supported {1}. Supported {1}s are:\n{2}". - format(string, type_name, columnate(lst))) + "{0} is not a supported {1}. Supported {1}s are:\n{2}".format(string, type_name, columnate(lst)) + ) + return parse_type + return middle + # these two types convert the case of their arguments _before_ validation argparse_force_uppercase_type = argparse_force_type(str.upper) argparse_force_lowercase_type = argparse_force_type(str.lower) + def argparse_many(func): - """ An argument parser combinator that takes in an argument parser and + """An argument parser combinator that takes in an argument parser and creates a new parser that accepts a comma separated list of the same thing. """ + def wrap(string): - """ The actual parser""" + """The actual parser""" return [func(s) for s in string.split(",")] + return wrap + def argparse_filestring_type(string): - """ An argument parser that verifies that a string passed in corresponds + """An argument parser that verifies that a string passed in corresponds to a file""" if exists(string): return string else: - raise argparse.ArgumentTypeError( - "{0}"" does not exist in the filesystem.".format(string)) + raise argparse.ArgumentTypeError("{0} does not exist in the filesystem.".format(string)) + def argparse_profile_filestring_type(string): - """ An argument parser that verifies that a string passed in is either + """An argument parser that verifies that a string passed in is either absolute path or a file name (expanded to mbed-os/tools/profiles/.json) of a existing file""" fpath = join(dirname(__file__), "profiles/{}.json".format(string)) @@ -503,11 +510,11 @@ def argparse_profile_filestring_type(string): elif exists(string): return string else: - raise argparse.ArgumentTypeError( - "{0} does not exist in the filesystem.".format(string)) + raise argparse.ArgumentTypeError("{0} does not exist in the filesystem.".format(string)) + def columnate(strings, separator=", ", chars=80): - """ render a list of strings as a in a bunch of columns + """render a list of strings as a in a bunch of columns Positional arguments: strings - the strings to columnate @@ -531,29 +538,34 @@ def columnate(strings, separator=", ", chars=80): output += append return output + def argparse_dir_not_parent(other): """fail if argument provided is a parent of the specified directory""" + def parse_type(not_parent): """The parser type""" abs_other = abspath(other) abs_not_parent = abspath(not_parent) if abs_not_parent == commonprefix([abs_not_parent, abs_other]): - raise argparse.ArgumentTypeError( - "{0} may not be a parent directory of {1}".format( - not_parent, other)) + raise argparse.ArgumentTypeError("{0} may not be a parent directory of {1}".format(not_parent, other)) else: return not_parent + return parse_type + def argparse_deprecate(replacement_message): """fail if argument is provided with deprecation warning""" + def parse_type(_): """The parser type""" raise argparse.ArgumentTypeError("Deprecated." + replacement_message) + return parse_type + def print_large_string(large_string): - """ Breaks a string up into smaller pieces before print them + """Breaks a string up into smaller pieces before print them This is a limitation within Windows, as detailed here: https://bugs.python.org/issue11395 @@ -569,10 +581,10 @@ def print_large_string(large_string): if string_part == num_parts - 1: sys.stdout.write(large_string[start_index:]) else: - sys.stdout.write(large_string[start_index: - start_index + string_limit]) + sys.stdout.write(large_string[start_index : start_index + string_limit]) sys.stdout.write("\n") + def intelhex_offset(filename, offset): """Load a hex or bin file at a particular offset""" _, inteltype = splitext(filename) @@ -582,10 +594,10 @@ def intelhex_offset(filename, offset): elif inteltype == ".hex": ih.loadhex(filename) else: - raise ToolException("File %s does not have a known binary file type" - % filename) + raise ToolException("File %s does not have a known binary file type" % filename) return ih + def integer(maybe_string, base): """Make an integer of a number or a string""" if isinstance(maybe_string, int): @@ -593,14 +605,13 @@ def integer(maybe_string, base): else: return int(maybe_string, base) + def generate_update_filename(name, target): - return "%s_update.%s" % ( - name, - getattr(target, "OUTPUT_EXT_UPDATE", "bin") - ) + return "%s_update.%s" % (name, getattr(target, "OUTPUT_EXT_UPDATE", "bin")) + def print_end_warnings(end_warnings): - """ Print a formatted list of warnings + """Print a formatted list of warnings Positional arguments: end_warnings - A list of warnings (strings) to print diff --git a/tools/python/scancode_evaluate/__init__.py b/tools/python/scancode_evaluate/__init__.py index 2bae17afc88..04d33f049d1 100644 --- a/tools/python/scancode_evaluate/__init__.py +++ b/tools/python/scancode_evaluate/__init__.py @@ -1,4 +1,4 @@ # # Copyright (c) 2020-2023 Arm Limited and Contributors. All rights reserved. # SPDX-License-Identifier: Apache-2.0 -# \ No newline at end of file +# diff --git a/tools/python/scancode_evaluate/scancode_evaluate.py b/tools/python/scancode_evaluate/scancode_evaluate.py index ea581d05544..2c39b725ca4 100644 --- a/tools/python/scancode_evaluate/scancode_evaluate.py +++ b/tools/python/scancode_evaluate/scancode_evaluate.py @@ -13,7 +13,7 @@ distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and -limitations +limitations """ import argparse @@ -43,6 +43,7 @@ this_script_dir = pathlib.Path(__file__).parent mbed_os_root = this_script_dir.parent.parent.parent + class ReturnCode(Enum): """Return codes.""" @@ -53,11 +54,7 @@ class ReturnCode(Enum): def init_logger(): """Initialise the logger.""" userlog.setLevel(logging.INFO) - userlog.addHandler( - logging.FileHandler( - pathlib.Path.cwd() / 'scancode_evaluate.log', mode='w' - ) - ) + userlog.addHandler(logging.FileHandler(pathlib.Path.cwd() / "scancode_evaluate.log", mode="w")) def format_path_for_display(path: pathlib.Path) -> str: @@ -68,7 +65,7 @@ def format_path_for_display(path: pathlib.Path) -> str: def has_permissive_text_in_scancode_output(scancode_output_data_file_licenses): """Returns true if at least one license in the scancode output is permissive""" return any( - scancode_output_data_file_license['category'] == 'Permissive' + scancode_output_data_file_license["category"] == "Permissive" for scancode_output_data_file_license in scancode_output_data_file_licenses ) @@ -76,7 +73,7 @@ def has_permissive_text_in_scancode_output(scancode_output_data_file_licenses): def has_spdx_text_in_scancode_output(scancode_output_data_file_licenses): """Returns true if at least one license in the scancode output has the spdx identifier.""" return any( - 'spdx' in scancode_output_data_file_license['matched_rule']['identifier'] + "spdx" in scancode_output_data_file_license["matched_rule"]["identifier"] for scancode_output_data_file_license in scancode_output_data_file_licenses ) @@ -88,6 +85,7 @@ def has_spdx_text_in_analysed_file(scanned_file_content): """Returns true if the file analysed by ScanCode contains SPDX identifier.""" return bool(SPDX_LICENSE_REGEX.findall(scanned_file_content)) + def has_exempted_spdx_identifier(scanned_file_content): """ Returns true if the file analysed by scancode contains an exempted SPDX identifier. @@ -114,9 +112,9 @@ def get_file_text(scancode_output_data_file): Returns file text for scancode output file. File path is expected to be relative to mbed-os root. """ - file_path = mbed_os_root / scancode_output_data_file['path'] + file_path = mbed_os_root / scancode_output_data_file["path"] try: - with open(file_path, 'r') as read_file: + with open(file_path, "r") as read_file: return read_file.read() except UnicodeDecodeError: userlog.warning("Unable to decode file text in: %s" % file_path) @@ -141,45 +139,44 @@ def license_check(scancode_output_path): license_offenders = [] spdx_offenders = [] try: - with open(scancode_output_path, 'r') as read_file: + with open(scancode_output_path, "r") as read_file: scancode_output_data = json.load(read_file) except json.JSONDecodeError as jex: userlog.warning("JSON could not be decoded, Invalid JSON in body: %s", jex) return ReturnCode.ERROR.value - if 'files' not in scancode_output_data: + if "files" not in scancode_output_data: userlog.warning("Missing `files` attribute in %s" % (scancode_output_path)) return ReturnCode.ERROR.value - for scancode_output_data_file in scancode_output_data['files']: - if scancode_output_data_file['type'] != 'file': + for scancode_output_data_file in scancode_output_data["files"]: + if scancode_output_data_file["type"] != "file": continue is_ignored = False for regex in IGNORE_PATH_REGEXES: - if re.search(regex, scancode_output_data_file['path']) is not None: - userlog.info("Ignoring %s due to ignore rule." % (scancode_output_data_file['path'],)) + if re.search(regex, scancode_output_data_file["path"]) is not None: + userlog.info("Ignoring %s due to ignore rule." % (scancode_output_data_file["path"],)) is_ignored = True break if is_ignored: continue - if not scancode_output_data_file['licenses']: - scancode_output_data_file['fail_reason'] = MISSING_LICENSE_TEXT + if not scancode_output_data_file["licenses"]: + scancode_output_data_file["fail_reason"] = MISSING_LICENSE_TEXT license_offenders.append(scancode_output_data_file) # check the next file in the scancode output continue - if not has_permissive_text_in_scancode_output(scancode_output_data_file['licenses']): + if not has_permissive_text_in_scancode_output(scancode_output_data_file["licenses"]): scanned_file_content = get_file_text(scancode_output_data_file) - if (scanned_file_content is None - or has_exempted_spdx_identifier(scanned_file_content)): + if scanned_file_content is None or has_exempted_spdx_identifier(scanned_file_content): continue else: - scancode_output_data_file['fail_reason'] = MISSING_PERMISSIVE_LICENSE_TEXT + scancode_output_data_file["fail_reason"] = MISSING_PERMISSIVE_LICENSE_TEXT license_offenders.append(scancode_output_data_file) - if not has_spdx_text_in_scancode_output(scancode_output_data_file['licenses']): + if not has_spdx_text_in_scancode_output(scancode_output_data_file["licenses"]): # Scancode does not recognize license notice in Python file headers. # Issue: https://github.com/nexB/scancode-toolkit/issues/1913 # Therefore check if the file tested by ScanCode actually has a licence notice. @@ -188,29 +185,30 @@ def license_check(scancode_output_path): if not scanned_file_content: continue elif not has_spdx_text_in_analysed_file(scanned_file_content): - scancode_output_data_file['fail_reason'] = MISSING_SPDX_TEXT + scancode_output_data_file["fail_reason"] = MISSING_SPDX_TEXT spdx_offenders.append(scancode_output_data_file) if license_offenders: userlog.warning("Found files with missing license details, please review and fix") for offender in license_offenders: - userlog.warning("File: %s reason: %s" % - (format_path_for_display(pathlib.Path(offender['path'])), offender['fail_reason'])) + userlog.warning( + "File: %s reason: %s" + % (format_path_for_display(pathlib.Path(offender["path"])), offender["fail_reason"]) + ) if spdx_offenders: userlog.warning("Found files with missing SPDX identifier, please review and fix") for offender in spdx_offenders: - userlog.warning("File: %s reason: %s" % - (format_path_for_display(pathlib.Path(offender['path'])), offender['fail_reason'])) + userlog.warning( + "File: %s reason: %s" + % (format_path_for_display(pathlib.Path(offender["path"])), offender["fail_reason"]) + ) return len(license_offenders) def parse_args(): """Parse command line arguments.""" parser = argparse.ArgumentParser(description="License check.") - parser.add_argument( - 'scancode_output_path', - help="scancode-toolkit output json file" - ) + parser.add_argument("scancode_output_path", help="scancode-toolkit output json file") return parser.parse_args() @@ -218,14 +216,11 @@ def main(): init_logger() args = parse_args() if pathlib.Path(args.scancode_output_path).is_file(): - sys.exit( - ReturnCode.SUCCESS.value - if license_check(args.scancode_output_path) == 0 - else ReturnCode.ERROR.value - ) + sys.exit(ReturnCode.SUCCESS.value if license_check(args.scancode_output_path) == 0 else ReturnCode.ERROR.value) else: userlog.warning("Could not find the scancode json file") sys.exit(ReturnCode.ERROR.value) + if __name__ == "__main__": - main() \ No newline at end of file + main() diff --git a/tools/python_tests/__init__.py b/tools/python_tests/__init__.py index 2bae17afc88..04d33f049d1 100644 --- a/tools/python_tests/__init__.py +++ b/tools/python_tests/__init__.py @@ -1,4 +1,4 @@ # # Copyright (c) 2020-2023 Arm Limited and Contributors. All rights reserved. # SPDX-License-Identifier: Apache-2.0 -# \ No newline at end of file +# diff --git a/tools/python_tests/mbed_host_tests/basic.py b/tools/python_tests/mbed_host_tests/basic.py index 547a31bea3b..6320a05f270 100644 --- a/tools/python_tests/mbed_host_tests/basic.py +++ b/tools/python_tests/mbed_host_tests/basic.py @@ -17,8 +17,8 @@ import unittest -class BasicTestCase(unittest.TestCase): +class BasicTestCase(unittest.TestCase): def setUp(self): pass @@ -29,5 +29,6 @@ def test_example(self): self.assertEqual(True, True) self.assertNotEqual(True, False) -if __name__ == '__main__': + +if __name__ == "__main__": unittest.main() diff --git a/tools/python_tests/mbed_host_tests/basic_ht.py b/tools/python_tests/mbed_host_tests/basic_ht.py index af94e3be670..87dae99ae92 100644 --- a/tools/python_tests/mbed_host_tests/basic_ht.py +++ b/tools/python_tests/mbed_host_tests/basic_ht.py @@ -19,8 +19,8 @@ from mbed_host_tests import get_plugin_caps -class BasicHostTestsTestCase(unittest.TestCase): +class BasicHostTestsTestCase(unittest.TestCase): def setUp(self): pass @@ -32,5 +32,5 @@ def test_get_plugin_caps(self): self.assertIs(type(d), dict) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tools/python_tests/mbed_host_tests/conn_primitive_remote.py b/tools/python_tests/mbed_host_tests/conn_primitive_remote.py index 87c468527f2..a5d2b63c1fe 100644 --- a/tools/python_tests/mbed_host_tests/conn_primitive_remote.py +++ b/tools/python_tests/mbed_host_tests/conn_primitive_remote.py @@ -65,7 +65,6 @@ def create(host, port): class ConnPrimitiveRemoteTestCase(unittest.TestCase): - def setUp(self): self.config = { "grm_module": "RemoteModuleMock", @@ -86,11 +85,14 @@ def test_constructor(self): self.assertIsInstance(self.remote.selected_resource, RemoteResourceMock) # allocate is called - self.remote.client.allocate.assert_called_once_with({ - 'platform_name': self.config.get('platform_name'), - 'power_on': True, - 'connected': True, - 'tags': {"a": True, "b": True}}) + self.remote.client.allocate.assert_called_once_with( + { + "platform_name": self.config.get("platform_name"), + "power_on": True, + "connected": True, + "tags": {"a": True, "b": True}, + } + ) # flash is called self.remote.selected_resource.flash.assert_called_once_with("test.bin", forceflash=True) @@ -124,5 +126,5 @@ def test_finish(self): resource.release.assert_called_once() -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tools/python_tests/mbed_host_tests/event_callback_decorator.py b/tools/python_tests/mbed_host_tests/event_callback_decorator.py index 35801fe71be..9b48f57af7e 100644 --- a/tools/python_tests/mbed_host_tests/event_callback_decorator.py +++ b/tools/python_tests/mbed_host_tests/event_callback_decorator.py @@ -26,16 +26,16 @@ def tearDown(self): def test_event_callback_decorator(self): class Ht(BaseHostTest): - - @event_callback('Hi') + @event_callback("Hi") def hi(self, key, value, timestamp): - print('hi') + print("hi") - @event_callback('Hello') + @event_callback("Hello") def hello(self, key, value, timestamp): - print('hello') + print("hello") + h = Ht() h.setup() callbacks = h.get_callbacks() - self.assertIn('Hi', callbacks) - self.assertIn('Hello', callbacks) + self.assertIn("Hi", callbacks) + self.assertIn("Hello", callbacks) diff --git a/tools/python_tests/mbed_host_tests/host_registry.py b/tools/python_tests/mbed_host_tests/host_registry.py index 794fab8fae3..2a71741e3a7 100644 --- a/tools/python_tests/mbed_host_tests/host_registry.py +++ b/tools/python_tests/mbed_host_tests/host_registry.py @@ -21,7 +21,6 @@ class HostRegistryTestCase(unittest.TestCase): - class HostTestClassMock(BaseHostTest): def setup(self): pass @@ -39,25 +38,25 @@ def tearDown(self): pass def test_register_host_test(self): - self.HOSTREGISTRY.register_host_test('host_test_mock_auto', self.HostTestClassMock()) - self.assertEqual(True, self.HOSTREGISTRY.is_host_test('host_test_mock_auto')) + self.HOSTREGISTRY.register_host_test("host_test_mock_auto", self.HostTestClassMock()) + self.assertEqual(True, self.HOSTREGISTRY.is_host_test("host_test_mock_auto")) def test_unregister_host_test(self): - self.HOSTREGISTRY.register_host_test('host_test_mock_2_auto', self.HostTestClassMock()) - self.assertEqual(True, self.HOSTREGISTRY.is_host_test('host_test_mock_2_auto')) - self.assertNotEqual(None, self.HOSTREGISTRY.get_host_test('host_test_mock_2_auto')) - self.HOSTREGISTRY.unregister_host_test('host_test_mock_2_auto') - self.assertEqual(False, self.HOSTREGISTRY.is_host_test('host_test_mock_2_auto')) + self.HOSTREGISTRY.register_host_test("host_test_mock_2_auto", self.HostTestClassMock()) + self.assertEqual(True, self.HOSTREGISTRY.is_host_test("host_test_mock_2_auto")) + self.assertNotEqual(None, self.HOSTREGISTRY.get_host_test("host_test_mock_2_auto")) + self.HOSTREGISTRY.unregister_host_test("host_test_mock_2_auto") + self.assertEqual(False, self.HOSTREGISTRY.is_host_test("host_test_mock_2_auto")) def test_get_host_test(self): - self.HOSTREGISTRY.register_host_test('host_test_mock_3_auto', self.HostTestClassMock()) - self.assertEqual(True, self.HOSTREGISTRY.is_host_test('host_test_mock_3_auto')) - self.assertNotEqual(None, self.HOSTREGISTRY.get_host_test('host_test_mock_3_auto')) + self.HOSTREGISTRY.register_host_test("host_test_mock_3_auto", self.HostTestClassMock()) + self.assertEqual(True, self.HOSTREGISTRY.is_host_test("host_test_mock_3_auto")) + self.assertNotEqual(None, self.HOSTREGISTRY.get_host_test("host_test_mock_3_auto")) def test_is_host_test(self): - self.assertEqual(False, self.HOSTREGISTRY.is_host_test('')) + self.assertEqual(False, self.HOSTREGISTRY.is_host_test("")) self.assertEqual(False, self.HOSTREGISTRY.is_host_test(None)) - self.assertEqual(False, self.HOSTREGISTRY.is_host_test('xyz')) + self.assertEqual(False, self.HOSTREGISTRY.is_host_test("xyz")) def test_host_test_str_not_empty(self): for ht_name in self.HOSTREGISTRY.HOST_TESTS: @@ -67,10 +66,10 @@ def test_host_test_str_not_empty(self): def test_host_test_has_name_attribute(self): for ht_name in self.HOSTREGISTRY.HOST_TESTS: ht = self.HOSTREGISTRY.HOST_TESTS[ht_name] - self.assertTrue(hasattr(ht, 'setup')) - self.assertTrue(hasattr(ht, 'result')) - self.assertTrue(hasattr(ht, 'teardown')) + self.assertTrue(hasattr(ht, "setup")) + self.assertTrue(hasattr(ht, "result")) + self.assertTrue(hasattr(ht, "teardown")) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tools/python_tests/mbed_host_tests/host_test_base.py b/tools/python_tests/mbed_host_tests/host_test_base.py index c2342583eae..a0db041cf70 100644 --- a/tools/python_tests/mbed_host_tests/host_test_base.py +++ b/tools/python_tests/mbed_host_tests/host_test_base.py @@ -19,8 +19,8 @@ from mbed_host_tests.host_tests_registry import HostRegistry -class BaseHostTestTestCase(unittest.TestCase): +class BaseHostTestTestCase(unittest.TestCase): def setUp(self): self.HOSTREGISTRY = HostRegistry() @@ -30,14 +30,15 @@ def tearDown(self): def test_host_test_has_setup_teardown_attribute(self): for ht_name in self.HOSTREGISTRY.HOST_TESTS: ht = self.HOSTREGISTRY.HOST_TESTS[ht_name] - self.assertTrue(hasattr(ht, 'setup')) - self.assertTrue(hasattr(ht, 'teardown')) + self.assertTrue(hasattr(ht, "setup")) + self.assertTrue(hasattr(ht, "teardown")) def test_host_test_has_no_rampUpDown_attribute(self): for ht_name in self.HOSTREGISTRY.HOST_TESTS: ht = self.HOSTREGISTRY.HOST_TESTS[ht_name] - self.assertFalse(hasattr(ht, 'rampUp')) - self.assertFalse(hasattr(ht, 'rampDown')) + self.assertFalse(hasattr(ht, "rampUp")) + self.assertFalse(hasattr(ht, "rampDown")) + -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tools/python_tests/mbed_host_tests/host_test_os_detect.py b/tools/python_tests/mbed_host_tests/host_test_os_detect.py index 27753fd904f..eb39820d657 100644 --- a/tools/python_tests/mbed_host_tests/host_test_os_detect.py +++ b/tools/python_tests/mbed_host_tests/host_test_os_detect.py @@ -25,10 +25,9 @@ class HostOSDetectionTestCase(unittest.TestCase): - def setUp(self): self.plugin_base = HostTestPluginBase() - self.os_names = ['Windows7', 'Ubuntu', 'LinuxGeneric', 'Darwin'] + self.os_names = ["Windows7", "Ubuntu", "LinuxGeneric", "Darwin"] self.re_float = re.compile("^\d+\.\d+$") def tearDown(self): @@ -44,14 +43,10 @@ def test_supported_os_name(self): self.assertIn(self.plugin_base.mbed_os_support(), self.os_names) def test_detect_os_support_ext(self): - os_info = (os.name, - platform.system(), - platform.release(), - platform.version(), - sys.platform) + os_info = (os.name, platform.system(), platform.release(), platform.version(), sys.platform) self.assertEqual(os_info, self.plugin_base.mbed_os_info()) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tools/python_tests/mbed_host_tests/host_test_scheme.py b/tools/python_tests/mbed_host_tests/host_test_scheme.py index 63f29eb6f6b..ba63fc5b931 100644 --- a/tools/python_tests/mbed_host_tests/host_test_scheme.py +++ b/tools/python_tests/mbed_host_tests/host_test_scheme.py @@ -21,7 +21,6 @@ class HostRegistryTestCase(unittest.TestCase): - def setUp(self): self.HOSTREGISTRY = HostRegistry() @@ -29,33 +28,30 @@ def tearDown(self): pass def test_host_test_class_has_test_attr(self): - """ Check if host test has 'result' class member - """ + """Check if host test has 'result' class member""" for i, ht_name in enumerate(self.HOSTREGISTRY.HOST_TESTS): ht = self.HOSTREGISTRY.HOST_TESTS[ht_name] if ht is not None: - self.assertEqual(True, hasattr(ht, 'result')) + self.assertEqual(True, hasattr(ht, "result")) def test_host_test_class_test_attr_callable(self): - """ Check if host test has callable 'result' class member - """ + """Check if host test has callable 'result' class member""" for i, ht_name in enumerate(self.HOSTREGISTRY.HOST_TESTS): ht = self.HOSTREGISTRY.HOST_TESTS[ht_name] if ht: - self.assertEqual(True, hasattr(ht, 'result') and callable(getattr(ht, 'result'))) + self.assertEqual(True, hasattr(ht, "result") and callable(getattr(ht, "result"))) def test_host_test_class_test_attr_callable_args_num(self): - """ Check if host test has callable setup(), result() and teardown() class member has 2 arguments - """ + """Check if host test has callable setup(), result() and teardown() class member has 2 arguments""" for i, ht_name in enumerate(self.HOSTREGISTRY.HOST_TESTS): ht = self.HOSTREGISTRY.HOST_TESTS[ht_name] - if ht and hasattr(ht, 'setup') and callable(getattr(ht, 'setup')): + if ht and hasattr(ht, "setup") and callable(getattr(ht, "setup")): self.assertEqual(1, six.get_function_code(ht.setup).co_argcount) - if ht and hasattr(ht, 'result') and callable(getattr(ht, 'result')): + if ht and hasattr(ht, "result") and callable(getattr(ht, "result")): self.assertEqual(1, six.get_function_code(ht.result).co_argcount) - if ht and hasattr(ht, 'teardown') and callable(getattr(ht, 'teardown')): + if ht and hasattr(ht, "teardown") and callable(getattr(ht, "teardown")): self.assertEqual(1, six.get_function_code(ht.teardown).co_argcount) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tools/python_tests/mbed_host_tests/mps2_copy.py b/tools/python_tests/mbed_host_tests/mps2_copy.py index 6202ecbee58..12a07db66d6 100644 --- a/tools/python_tests/mbed_host_tests/mps2_copy.py +++ b/tools/python_tests/mbed_host_tests/mps2_copy.py @@ -20,8 +20,8 @@ from mbed_host_tests.host_tests_plugins.module_copy_mps2 import HostTestPluginCopyMethod_MPS2 -class MPS2CopyTestCase(unittest.TestCase): +class MPS2CopyTestCase(unittest.TestCase): def setUp(self): self.mps2_copy_plugin = HostTestPluginCopyMethod_MPS2() self.filename = "toto.bin" @@ -45,5 +45,6 @@ def test_copy_elf(self): self.assertTrue(os.path.isfile("mbed.elf")) os.remove("mbed.elf") -if __name__ == '__main__': + +if __name__ == "__main__": unittest.main() diff --git a/tools/python_tests/mbed_host_tests/mps2_reset.py b/tools/python_tests/mbed_host_tests/mps2_reset.py index 07a030dcde7..4c03223b3e1 100644 --- a/tools/python_tests/mbed_host_tests/mps2_reset.py +++ b/tools/python_tests/mbed_host_tests/mps2_reset.py @@ -22,8 +22,8 @@ from mbed_host_tests.host_tests_plugins.module_reset_mps2 import HostTestPluginResetMethod_MPS2 -class MPS2ResetTestCase(unittest.TestCase): +class MPS2ResetTestCase(unittest.TestCase): def setUp(self): self.mps2_reset_plugin = HostTestPluginResetMethod_MPS2() @@ -40,5 +40,6 @@ def test_check_sync(self, run_command_function, sleep_function): self.assertTrue("sync" in args[0]) os.remove("reboot.txt") -if __name__ == '__main__': + +if __name__ == "__main__": unittest.main() diff --git a/tools/python_tests/mbed_lstools/__init__.py b/tools/python_tests/mbed_lstools/__init__.py index 57f0caca492..bfa700b946a 100644 --- a/tools/python_tests/mbed_lstools/__init__.py +++ b/tools/python_tests/mbed_lstools/__init__.py @@ -19,4 +19,4 @@ Unit tests for mbed-ls package -""" \ No newline at end of file +""" diff --git a/tools/python_tests/mbed_lstools/base.py b/tools/python_tests/mbed_lstools/base.py index 4df14081c6c..94b0ed99fd5 100644 --- a/tools/python_tests/mbed_lstools/base.py +++ b/tools/python_tests/mbed_lstools/base.py @@ -34,19 +34,23 @@ # Python 3 basestring = str + class CLIComands(unittest.TestCase): - """ Test the CLI - """ + """Test the CLI""" def setUp(self): - self._stdout = patch('sys.stdout', new_callable=StringIO) + self._stdout = patch("sys.stdout", new_callable=StringIO) self.stdout = self._stdout.start() self.mbeds = MagicMock() self.args = MagicMock() self.mbeds.list_mbeds.return_value = [ - {'platform_name': 'foo', 'platform_name_unique': 'foo[0]', - 'mount_point': 'a mount point', 'serial_port': 'a serial port', - 'target_id': 'DEADBEEF', 'daplink_version': 'v12345' + { + "platform_name": "foo", + "platform_name_unique": "foo[0]", + "mount_point": "a mount point", + "serial_port": "a serial port", + "target_id": "DEADBEEF", + "daplink_version": "v12345", } ] @@ -55,8 +59,7 @@ def tearDown(self): def test_print_version(self): cli.print_version(self.mbeds, self.args) - self.assertIn(mbed_os_tools.VERSION, - self.stdout.getvalue()) + self.assertIn(mbed_os_tools.VERSION, self.stdout.getvalue()) def test_print_table(self): cli.print_table(self.mbeds, self.args) @@ -72,8 +75,7 @@ def test_print_simple(self): def test_mbeds_as_json(self): cli.mbeds_as_json(self.mbeds, self.args) - self.assertEqual(self.mbeds.list_mbeds.return_value, - json.loads(self.stdout.getvalue())) + self.assertEqual(self.mbeds.list_mbeds.return_value, json.loads(self.stdout.getvalue())) def test_json_by_target_id(self): cli.json_by_target_id(self.mbeds, self.args) @@ -83,30 +85,27 @@ def test_json_by_target_id(self): def test_json_platforms(self): cli.json_platforms(self.mbeds, self.args) - platform_names = [d['platform_name'] for d - in self.mbeds.list_mbeds.return_value] + platform_names = [d["platform_name"] for d in self.mbeds.list_mbeds.return_value] for name in json.loads(self.stdout.getvalue()): self.assertIn(name, platform_names) def test_json_platforms_ext(self): cli.json_platforms_ext(self.mbeds, self.args) - platform_names = [d['platform_name'] for d - in self.mbeds.list_mbeds.return_value] + platform_names = [d["platform_name"] for d in self.mbeds.list_mbeds.return_value] for name in json.loads(self.stdout.getvalue()).keys(): self.assertIn(name, platform_names) def test_list_platform(self): - self.mbeds.list_manufacture_ids.return_value =""" + self.mbeds.list_manufacture_ids.return_value = """ foo bar baz """ cli.list_platforms(self.mbeds, self.args) - self.assertIn(self.mbeds.list_manufacture_ids.return_value, - self.stdout.getvalue()) + self.assertIn(self.mbeds.list_manufacture_ids.return_value, self.stdout.getvalue()) -class CLIParser(unittest.TestCase): +class CLIParser(unittest.TestCase): def setUp(self): pass @@ -125,10 +124,11 @@ def test_parse_cli_conflict(self): pass def test_parse_cli_single_param(self): - for p in ['j', 'J', 'p', 'P', '-version', 'd', 'u']: - args = cli.parse_cli(['-' + p]) + for p in ["j", "J", "p", "P", "-version", "d", "u"]: + args = cli.parse_cli(["-" + p]) assert callable(args.command) + class CLISetup(unittest.TestCase): def test_start_logging(self): cli.start_logging() diff --git a/tools/python_tests/mbed_lstools/details_txt.py b/tools/python_tests/mbed_lstools/details_txt.py index 22ec0af9711..549bb38e7b0 100644 --- a/tools/python_tests/mbed_lstools/details_txt.py +++ b/tools/python_tests/mbed_lstools/details_txt.py @@ -23,10 +23,8 @@ from mbed_lstools.main import create - class ParseMbedHTMTestCase(unittest.TestCase): - """ Unit tests checking HTML parsing code for 'mbed.htm' files - """ + """Unit tests checking HTML parsing code for 'mbed.htm' files""" details_txt_0226 = """Version: 0226 Build: Aug 24 2015 17:06:30 @@ -61,13 +59,13 @@ def test_simplified_daplink_txt_content(self): # Check parsing content result = self.mbeds._parse_details(lines) self.assertEqual(4, len(result)) - self.assertIn('Version', result) - self.assertIn('Build', result) - self.assertIn('Git Commit SHA', result) - self.assertIn('Git Local mods', result) + self.assertIn("Version", result) + self.assertIn("Build", result) + self.assertIn("Git Commit SHA", result) + self.assertIn("Git Local mods", result) # Check for daplink_version - self.assertEqual(result['Version'], "0226") + self.assertEqual(result["Version"], "0226") def test_extended_daplink_txt_content(self): # Fetch lines from DETAILS.TXT @@ -76,30 +74,30 @@ def test_extended_daplink_txt_content(self): # Check parsing content result = self.mbeds._parse_details(lines) - self.assertEqual(11, len(result)) # 12th would be comment - self.assertIn('Unique ID', result) - self.assertIn('HIF ID', result) - self.assertIn('Auto Reset', result) - self.assertIn('Automation allowed', result) - self.assertIn('Daplink Mode', result) - self.assertIn('Interface Version', result) - self.assertIn('Git SHA', result) - self.assertIn('Local Mods', result) - self.assertIn('USB Interfaces', result) - self.assertIn('Interface CRC', result) + self.assertEqual(11, len(result)) # 12th would be comment + self.assertIn("Unique ID", result) + self.assertIn("HIF ID", result) + self.assertIn("Auto Reset", result) + self.assertIn("Automation allowed", result) + self.assertIn("Daplink Mode", result) + self.assertIn("Interface Version", result) + self.assertIn("Git SHA", result) + self.assertIn("Local Mods", result) + self.assertIn("USB Interfaces", result) + self.assertIn("Interface CRC", result) # Check if we parsed comment line: # "# DAPLink Firmware - see https://mbed.com/daplink" for key in result: # Check if we parsed comment - self.assertFalse(key.startswith('#')) + self.assertFalse(key.startswith("#")) # Check if we parsed - self.assertFalse('https://mbed.com/daplink' in result[key]) + self.assertFalse("https://mbed.com/daplink" in result[key]) # Check for daplink_version # DAPlink <240 compatibility - self.assertEqual(result['Interface Version'], "0240") - self.assertEqual(result['Version'], "0240") + self.assertEqual(result["Interface Version"], "0240") + self.assertEqual(result["Version"], "0240") def test_(self): pass @@ -107,5 +105,6 @@ def test_(self): def test_(self): pass -if __name__ == '__main__': + +if __name__ == "__main__": unittest.main() diff --git a/tools/python_tests/mbed_lstools/detect_os.py b/tools/python_tests/mbed_lstools/detect_os.py index 74fa7d61603..0e2ed90c170 100644 --- a/tools/python_tests/mbed_lstools/detect_os.py +++ b/tools/python_tests/mbed_lstools/detect_os.py @@ -28,8 +28,7 @@ class DetectOSTestCase(unittest.TestCase): - """ Test cases for host OS related functionality. Helpful during porting - """ + """Test cases for host OS related functionality. Helpful during porting""" def setUp(self): pass @@ -47,18 +46,14 @@ def test_porting_create(self): self.assertNotEqual(None, create()) def test_supported_os_name(self): - os_names = ['Windows7', 'Ubuntu', 'LinuxGeneric', 'Darwin'] + os_names = ["Windows7", "Ubuntu", "LinuxGeneric", "Darwin"] self.assertIn(mbed_os_support(), os_names) def test_detect_os_support_ext(self): - os_info = (os.name, - platform.system(), - platform.release(), - platform.version(), - sys.platform) + os_info = (os.name, platform.system(), platform.release(), platform.version(), sys.platform) self.assertEqual(os_info, mbed_lstools_os_info()) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tools/python_tests/mbed_lstools/mbed_htm.py b/tools/python_tests/mbed_lstools/mbed_htm.py index b06f618dab0..bf56705936a 100644 --- a/tools/python_tests/mbed_lstools/mbed_htm.py +++ b/tools/python_tests/mbed_lstools/mbed_htm.py @@ -23,17 +23,20 @@ from mbed_lstools.main import create - - class ParseMbedHTMTestCase(unittest.TestCase): - """ Unit tests checking HTML parsing code for 'mbed.htm' files - """ + """Unit tests checking HTML parsing code for 'mbed.htm' files""" # DAPlink <0240 - test_mbed_htm_k64f_url_str = '' - test_mbed_htm_l152re_url_str = '' + test_mbed_htm_k64f_url_str = ( + '' + ) + test_mbed_htm_l152re_url_str = ( + '' + ) test_mbed_htm_lpc1768_url_str = '' - test_mbed_htm_nucleo_l031k6_str = '' + test_mbed_htm_nucleo_l031k6_str = ( + '' + ) test_mbed_htm_nrf51_url_str = '' # DAPLink 0240 @@ -47,58 +50,63 @@ def tearDown(self): def test_mbed_htm_k64f_url(self): target_id = self.mbeds._target_id_from_htm(self.test_mbed_htm_k64f_url_str) - self.assertEqual('02400203D94B0E7724B7F3CF', target_id) + self.assertEqual("02400203D94B0E7724B7F3CF", target_id) def test_mbed_htm_l152re_url(self): target_id = self.mbeds._target_id_from_htm(self.test_mbed_htm_l152re_url_str) - self.assertEqual('07100200656A9A955A0F0CB8', target_id) + self.assertEqual("07100200656A9A955A0F0CB8", target_id) def test_mbed_htm_lpc1768_url(self): target_id = self.mbeds._target_id_from_htm(self.test_mbed_htm_lpc1768_url_str) - self.assertEqual('101000000000000000000002F7F1869557200730298d254d3ff3509e3fe4722d', target_id) + self.assertEqual("101000000000000000000002F7F1869557200730298d254d3ff3509e3fe4722d", target_id) def test_daplink_nucleo_l031k6_url(self): target_id = self.mbeds._target_id_from_htm(self.test_mbed_htm_nucleo_l031k6_str) - self.assertEqual('07900221461663077952F5AA', target_id) + self.assertEqual("07900221461663077952F5AA", target_id) def test_daplink_240_mbed_html(self): target_id = self.mbeds._target_id_from_htm(self.test_daplink_240_mbed_html_str) - self.assertEqual('0240000029164e45002f0012706e0006f301000097969900', target_id) + self.assertEqual("0240000029164e45002f0012706e0006f301000097969900", target_id) def test_mbed_htm_nrf51_url(self): target_id = self.mbeds._target_id_from_htm(self.test_mbed_htm_nrf51_url_str) - self.assertEqual('1100021952333120353935373130313232323032AFD5DFD8', target_id) + self.assertEqual("1100021952333120353935373130313232323032AFD5DFD8", target_id) def get_mbed_htm_comment_section_ver_build(self): # Incorrect data - ver_bld = self.mbeds._mbed_htm_comment_section_ver_build('') + ver_bld = self.mbeds._mbed_htm_comment_section_ver_build("") self.assertIsNone(ver_bld) - ver_bld = self.mbeds._mbed_htm_comment_section_ver_build('') + ver_bld = self.mbeds._mbed_htm_comment_section_ver_build( + "" + ) self.assertIsNone(ver_bld) - ver_bld = self.mbeds._mbed_htm_comment_section_ver_build('') + ver_bld = self.mbeds._mbed_htm_comment_section_ver_build("") self.assertIsNone(ver_bld) # Correct data - ver_bld = self.mbeds._mbed_htm_comment_section_ver_build('') + ver_bld = self.mbeds._mbed_htm_comment_section_ver_build("") self.assertIsNotNone(ver_bld) - self.assertEqual(('0200', 'Mar 26 2014 13:22:20'), ver_bld) + self.assertEqual(("0200", "Mar 26 2014 13:22:20"), ver_bld) - ver_bld = self.mbeds._mbed_htm_comment_section_ver_build('') + ver_bld = self.mbeds._mbed_htm_comment_section_ver_build("") self.assertIsNotNone(ver_bld) - self.assertEqual(('0200', 'Aug 27 2014 13:29:28'), ver_bld) + self.assertEqual(("0200", "Aug 27 2014 13:29:28"), ver_bld) - ver_bld = self.mbeds._mbed_htm_comment_section_ver_build('') + ver_bld = self.mbeds._mbed_htm_comment_section_ver_build( + "" + ) self.assertIsNotNone(ver_bld) - self.assertEqual(('0219', 'Feb 2 2016 15:20:54'), ver_bld) + self.assertEqual(("0219", "Feb 2 2016 15:20:54"), ver_bld) - ver_bld = self.mbeds._mbed_htm_comment_section_ver_build('') + ver_bld = self.mbeds._mbed_htm_comment_section_ver_build("") self.assertIsNotNone(ver_bld) - self.assertEqual(('0.14.3', '471'), ver_bld) + self.assertEqual(("0.14.3", "471"), ver_bld) def test_(self): pass -if __name__ == '__main__': + +if __name__ == "__main__": unittest.main() diff --git a/tools/python_tests/mbed_lstools/mbedls_toolsbase.py b/tools/python_tests/mbed_lstools/mbedls_toolsbase.py index 0f5f028be12..6bb52e5c585 100644 --- a/tools/python_tests/mbed_lstools/mbedls_toolsbase.py +++ b/tools/python_tests/mbed_lstools/mbedls_toolsbase.py @@ -27,20 +27,23 @@ from mbed_lstools.lstools_base import MbedLsToolsBase, FSInteraction + class DummyLsTools(MbedLsToolsBase): return_value = [] + def find_candidates(self): return self.return_value + try: basestring except NameError: # Python 3 basestring = str + class BasicTestCase(unittest.TestCase): - """ Basic test cases checking trivial asserts - """ + """Basic test cases checking trivial asserts""" def setUp(self): self.base = DummyLsTools(force_mock=True) @@ -49,80 +52,88 @@ def tearDown(self): pass def test_list_mbeds_valid_platform(self): - self.base.return_value = [{'mount_point': 'dummy_mount_point', - 'target_id_usb_id': u'0240DEADBEEF', - 'serial_port': "dummy_serial_port"}, - {'mount_point': None, - 'target_id_usb_id': '00000000000', - 'serial_port': 'not_valid'}] - with patch("mbed_lstools.lstools_base.MbedLsToolsBase._read_htm_ids") as _read_htm,\ - patch("mbed_lstools.lstools_base.MbedLsToolsBase.mount_point_ready") as _mpr,\ - patch("mbed_os_tools.detect.platform_database.PlatformDatabase.get") as _get,\ - patch("mbed_os_tools.detect.lstools_base.listdir") as _listdir: + self.base.return_value = [ + { + "mount_point": "dummy_mount_point", + "target_id_usb_id": "0240DEADBEEF", + "serial_port": "dummy_serial_port", + }, + {"mount_point": None, "target_id_usb_id": "00000000000", "serial_port": "not_valid"}, + ] + with patch("mbed_lstools.lstools_base.MbedLsToolsBase._read_htm_ids") as _read_htm, patch( + "mbed_lstools.lstools_base.MbedLsToolsBase.mount_point_ready" + ) as _mpr, patch("mbed_os_tools.detect.platform_database.PlatformDatabase.get") as _get, patch( + "mbed_os_tools.detect.lstools_base.listdir" + ) as _listdir: _mpr.return_value = True - _read_htm.return_value = (u'0241BEEFDEAD', {}) - _get.return_value = { - 'platform_name': 'foo_target' - } - _listdir.return_value = ['mbed.htm'] + _read_htm.return_value = ("0241BEEFDEAD", {}) + _get.return_value = {"platform_name": "foo_target"} + _listdir.return_value = ["mbed.htm"] to_check = self.base.list_mbeds() - _read_htm.assert_called_once_with('dummy_mount_point') - _get.assert_any_call('0241', device_type='daplink', verbose_data=True) + _read_htm.assert_called_once_with("dummy_mount_point") + _get.assert_any_call("0241", device_type="daplink", verbose_data=True) self.assertEqual(len(to_check), 1) - self.assertEqual(to_check[0]['target_id'], "0241BEEFDEAD") - self.assertEqual(to_check[0]['platform_name'], 'foo_target') + self.assertEqual(to_check[0]["target_id"], "0241BEEFDEAD") + self.assertEqual(to_check[0]["platform_name"], "foo_target") def test_list_mbeds_invalid_tid(self): - self.base.return_value = [{'mount_point': 'dummy_mount_point', - 'target_id_usb_id': u'0240DEADBEEF', - 'serial_port': "dummy_serial_port"}, - {'mount_point': 'dummy_mount_point', - 'target_id_usb_id': "", - 'serial_port': 'not_valid'}] - with patch("mbed_lstools.lstools_base.MbedLsToolsBase._read_htm_ids") as _read_htm,\ - patch("mbed_lstools.lstools_base.MbedLsToolsBase.mount_point_ready") as _mpr,\ - patch("mbed_os_tools.detect.platform_database.PlatformDatabase.get") as _get,\ - patch("mbed_os_tools.detect.lstools_base.listdir") as _listdir: + self.base.return_value = [ + { + "mount_point": "dummy_mount_point", + "target_id_usb_id": "0240DEADBEEF", + "serial_port": "dummy_serial_port", + }, + {"mount_point": "dummy_mount_point", "target_id_usb_id": "", "serial_port": "not_valid"}, + ] + with patch("mbed_lstools.lstools_base.MbedLsToolsBase._read_htm_ids") as _read_htm, patch( + "mbed_lstools.lstools_base.MbedLsToolsBase.mount_point_ready" + ) as _mpr, patch("mbed_os_tools.detect.platform_database.PlatformDatabase.get") as _get, patch( + "mbed_os_tools.detect.lstools_base.listdir" + ) as _listdir: _mpr.return_value = True - _read_htm.side_effect = [(u'0241BEEFDEAD', {}), (None, {})] - _get.return_value = { - 'platform_name': 'foo_target' - } - _listdir.return_value = ['mbed.htm'] + _read_htm.side_effect = [("0241BEEFDEAD", {}), (None, {})] + _get.return_value = {"platform_name": "foo_target"} + _listdir.return_value = ["mbed.htm"] to_check = self.base.list_mbeds() - _get.assert_any_call('0241', device_type='daplink', verbose_data=True) + _get.assert_any_call("0241", device_type="daplink", verbose_data=True) self.assertEqual(len(to_check), 2) - self.assertEqual(to_check[0]['target_id'], "0241BEEFDEAD") - self.assertEqual(to_check[0]['platform_name'], 'foo_target') - self.assertEqual(to_check[1]['target_id'], "") - self.assertEqual(to_check[1]['platform_name'], None) + self.assertEqual(to_check[0]["target_id"], "0241BEEFDEAD") + self.assertEqual(to_check[0]["platform_name"], "foo_target") + self.assertEqual(to_check[1]["target_id"], "") + self.assertEqual(to_check[1]["platform_name"], None) def test_list_mbeds_invalid_platform(self): - self.base.return_value = [{'mount_point': 'dummy_mount_point', - 'target_id_usb_id': u'not_in_target_db', - 'serial_port': "dummy_serial_port"}] + self.base.return_value = [ + { + "mount_point": "dummy_mount_point", + "target_id_usb_id": "not_in_target_db", + "serial_port": "dummy_serial_port", + } + ] for qos in [FSInteraction.BeforeFilter, FSInteraction.AfterFilter]: - with patch("mbed_lstools.lstools_base.MbedLsToolsBase._read_htm_ids") as _read_htm,\ - patch("mbed_lstools.lstools_base.MbedLsToolsBase.mount_point_ready") as _mpr,\ - patch("mbed_os_tools.detect.platform_database.PlatformDatabase.get") as _get,\ - patch("mbed_os_tools.detect.lstools_base.listdir") as _listdir: + with patch("mbed_lstools.lstools_base.MbedLsToolsBase._read_htm_ids") as _read_htm, patch( + "mbed_lstools.lstools_base.MbedLsToolsBase.mount_point_ready" + ) as _mpr, patch("mbed_os_tools.detect.platform_database.PlatformDatabase.get") as _get, patch( + "mbed_os_tools.detect.lstools_base.listdir" + ) as _listdir: _mpr.return_value = True - _read_htm.return_value = (u'not_in_target_db', {}) + _read_htm.return_value = ("not_in_target_db", {}) _get.return_value = None - _listdir.return_value = ['MBED.HTM'] + _listdir.return_value = ["MBED.HTM"] to_check = self.base.list_mbeds() - _read_htm.assert_called_once_with('dummy_mount_point') - _get.assert_any_call('not_', device_type='daplink', verbose_data=True) + _read_htm.assert_called_once_with("dummy_mount_point") + _get.assert_any_call("not_", device_type="daplink", verbose_data=True) self.assertEqual(len(to_check), 1) - self.assertEqual(to_check[0]['target_id'], "not_in_target_db") - self.assertEqual(to_check[0]['platform_name'], None) + self.assertEqual(to_check[0]["target_id"], "not_in_target_db") + self.assertEqual(to_check[0]["platform_name"], None) def test_list_mbeds_unmount_mid_read(self): - self.base.return_value = [{'mount_point': 'dummy_mount_point', - 'target_id_usb_id': u'0240DEADBEEF', - 'serial_port': "dummy_serial_port"}] - with patch("mbed_lstools.lstools_base.MbedLsToolsBase.mount_point_ready") as _mpr,\ - patch("mbed_os_tools.detect.lstools_base.listdir") as _listdir: + self.base.return_value = [ + {"mount_point": "dummy_mount_point", "target_id_usb_id": "0240DEADBEEF", "serial_port": "dummy_serial_port"} + ] + with patch("mbed_lstools.lstools_base.MbedLsToolsBase.mount_point_ready") as _mpr, patch( + "mbed_os_tools.detect.lstools_base.listdir" + ) as _listdir: _mpr.return_value = True _listdir.side_effect = OSError to_check = self.base.list_mbeds() @@ -130,16 +141,20 @@ def test_list_mbeds_unmount_mid_read(self): def test_list_mbeds_read_mbed_htm_failure(self): def _test(mock): - self.base.return_value = [{'mount_point': 'dummy_mount_point', - 'target_id_usb_id': u'0240DEADBEEF', - 'serial_port': "dummy_serial_port"}] - with patch("mbed_lstools.lstools_base.MbedLsToolsBase.mount_point_ready") as _mpr,\ - patch("mbed_os_tools.detect.lstools_base.listdir") as _listdir,\ - patch('mbed_os_tools.detect.lstools_base.open', mock, create=True): + self.base.return_value = [ + { + "mount_point": "dummy_mount_point", + "target_id_usb_id": "0240DEADBEEF", + "serial_port": "dummy_serial_port", + } + ] + with patch("mbed_lstools.lstools_base.MbedLsToolsBase.mount_point_ready") as _mpr, patch( + "mbed_os_tools.detect.lstools_base.listdir" + ) as _listdir, patch("mbed_os_tools.detect.lstools_base.open", mock, create=True): _mpr.return_value = True - _listdir.return_value = ['MBED.HTM', 'DETAILS.TXT'] + _listdir.return_value = ["MBED.HTM", "DETAILS.TXT"] to_check = self.base.list_mbeds() - mock.assert_called_once_with(os.path.join('dummy_mount_point', 'mbed.htm'), 'r') + mock.assert_called_once_with(os.path.join("dummy_mount_point", "mbed.htm"), "r") self.assertEqual(len(to_check), 0) m = mock_open() @@ -151,11 +166,11 @@ def _test(mock): _test(m) def test_list_mbeds_read_no_mbed_htm(self): - self.base.return_value = [{'mount_point': 'dummy_mount_point', - 'target_id_usb_id': u'0240DEADBEEF', - 'serial_port': "dummy_serial_port"}] + self.base.return_value = [ + {"mount_point": "dummy_mount_point", "target_id_usb_id": "0240DEADBEEF", "serial_port": "dummy_serial_port"} + ] - details_txt_contents = '''\ + details_txt_contents = """\ # DAPLink Firmware - see https://mbed.com/daplink Unique ID: 0240000032044e4500257009997b00386781000097969900 HIC ID: 97969900 @@ -171,39 +186,45 @@ def test_list_mbeds_read_no_mbed_htm(self): Bootloader CRC: 0xb92403e6 Interface CRC: 0x434eddd1 Remount count: 0 -''' +""" + def _handle_open(*args, **kwargs): - if args[0].lower() == os.path.join('dummy_mount_point', 'mbed.htm'): + if args[0].lower() == os.path.join("dummy_mount_point", "mbed.htm"): raise OSError("(mocked open) No such file or directory: 'mbed.htm'") else: return DEFAULT m = mock_open(read_data=details_txt_contents) - with patch("mbed_lstools.lstools_base.MbedLsToolsBase.mount_point_ready") as _mpr,\ - patch("mbed_os_tools.detect.lstools_base.listdir") as _listdir,\ - patch('mbed_os_tools.detect.lstools_base.open', m, create=True) as mocked_open: + with patch("mbed_lstools.lstools_base.MbedLsToolsBase.mount_point_ready") as _mpr, patch( + "mbed_os_tools.detect.lstools_base.listdir" + ) as _listdir, patch("mbed_os_tools.detect.lstools_base.open", m, create=True) as mocked_open: mocked_open.side_effect = _handle_open _mpr.return_value = True - _listdir.return_value = ['PRODINFO.HTM', 'DETAILS.TXT'] + _listdir.return_value = ["PRODINFO.HTM", "DETAILS.TXT"] to_check = self.base.list_mbeds() self.assertEqual(len(to_check), 1) - m.assert_called_once_with(os.path.join('dummy_mount_point', 'DETAILS.TXT'), 'r') - self.assertEqual(to_check[0]['target_id'], '0240000032044e4500257009997b00386781000097969900') + m.assert_called_once_with(os.path.join("dummy_mount_point", "DETAILS.TXT"), "r") + self.assertEqual(to_check[0]["target_id"], "0240000032044e4500257009997b00386781000097969900") def test_list_mbeds_read_details_txt_failure(self): def _test(mock): - self.base.return_value = [{'mount_point': 'dummy_mount_point', - 'target_id_usb_id': u'0240DEADBEEF', - 'serial_port': "dummy_serial_port"}] - with patch("mbed_lstools.lstools_base.MbedLsToolsBase.mount_point_ready") as _mpr,\ - patch("mbed_os_tools.detect.lstools_base.listdir") as _listdir,\ - patch("mbed_lstools.lstools_base.MbedLsToolsBase._update_device_from_htm") as _htm,\ - patch('mbed_os_tools.detect.lstools_base.open', mock, create=True): + self.base.return_value = [ + { + "mount_point": "dummy_mount_point", + "target_id_usb_id": "0240DEADBEEF", + "serial_port": "dummy_serial_port", + } + ] + with patch("mbed_lstools.lstools_base.MbedLsToolsBase.mount_point_ready") as _mpr, patch( + "mbed_os_tools.detect.lstools_base.listdir" + ) as _listdir, patch("mbed_lstools.lstools_base.MbedLsToolsBase._update_device_from_htm") as _htm, patch( + "mbed_os_tools.detect.lstools_base.open", mock, create=True + ): _mpr.return_value = True _htm.side_effect = None - _listdir.return_value = ['MBED.HTM', 'DETAILS.TXT'] + _listdir.return_value = ["MBED.HTM", "DETAILS.TXT"] to_check = self.base.list_mbeds(read_details_txt=True) - mock.assert_called_once_with(os.path.join('dummy_mount_point', 'DETAILS.TXT'), 'r') + mock.assert_called_once_with(os.path.join("dummy_mount_point", "DETAILS.TXT"), "r") self.assertEqual(len(to_check), 0) m = mock_open() @@ -216,18 +237,19 @@ def _test(mock): def test_list_mbeds_unmount_mid_read_list_unmounted(self): self.base.list_unmounted = True - self.base.return_value = [{'mount_point': 'dummy_mount_point', - 'target_id_usb_id': u'0240DEADBEEF', - 'serial_port': "dummy_serial_port"}] - with patch("mbed_lstools.lstools_base.MbedLsToolsBase.mount_point_ready") as _mpr,\ - patch("mbed_os_tools.detect.lstools_base.listdir") as _listdir: + self.base.return_value = [ + {"mount_point": "dummy_mount_point", "target_id_usb_id": "0240DEADBEEF", "serial_port": "dummy_serial_port"} + ] + with patch("mbed_lstools.lstools_base.MbedLsToolsBase.mount_point_ready") as _mpr, patch( + "mbed_os_tools.detect.lstools_base.listdir" + ) as _listdir: _mpr.return_value = True _listdir.side_effect = OSError to_check = self.base.list_mbeds() self.assertEqual(len(to_check), 1) - self.assertEqual(to_check[0]['mount_point'], None) - self.assertEqual(to_check[0]['device_type'], 'unknown') - self.assertEqual(to_check[0]['platform_name'], 'K64F') + self.assertEqual(to_check[0]["mount_point"], None) + self.assertEqual(to_check[0]["device_type"], "unknown") + self.assertEqual(to_check[0]["platform_name"], "K64F") def test_list_manufacture_ids(self): table_str = self.base.list_manufacture_ids() @@ -235,130 +257,132 @@ def test_list_manufacture_ids(self): def test_mock_manufacture_ids_default_multiple(self): # oper='+' - for mid, platform_name in [('0341', 'TEST_PLATFORM_NAME_1'), - ('0342', 'TEST_PLATFORM_NAME_2'), - ('0343', 'TEST_PLATFORM_NAME_3')]: + for mid, platform_name in [ + ("0341", "TEST_PLATFORM_NAME_1"), + ("0342", "TEST_PLATFORM_NAME_2"), + ("0343", "TEST_PLATFORM_NAME_3"), + ]: self.base.mock_manufacture_id(mid, platform_name) self.assertEqual(platform_name, self.base.plat_db.get(mid)) def test_mock_manufacture_ids_minus(self): # oper='+' - for mid, platform_name in [('0341', 'TEST_PLATFORM_NAME_1'), - ('0342', 'TEST_PLATFORM_NAME_2'), - ('0343', 'TEST_PLATFORM_NAME_3')]: + for mid, platform_name in [ + ("0341", "TEST_PLATFORM_NAME_1"), + ("0342", "TEST_PLATFORM_NAME_2"), + ("0343", "TEST_PLATFORM_NAME_3"), + ]: self.base.mock_manufacture_id(mid, platform_name) self.assertEqual(platform_name, self.base.plat_db.get(mid)) # oper='-' - mock_ids = self.base.mock_manufacture_id('0342', '', oper='-') - self.assertEqual('TEST_PLATFORM_NAME_1', self.base.plat_db.get("0341")) + mock_ids = self.base.mock_manufacture_id("0342", "", oper="-") + self.assertEqual("TEST_PLATFORM_NAME_1", self.base.plat_db.get("0341")) self.assertEqual(None, self.base.plat_db.get("0342")) - self.assertEqual('TEST_PLATFORM_NAME_3', self.base.plat_db.get("0343")) + self.assertEqual("TEST_PLATFORM_NAME_3", self.base.plat_db.get("0343")) def test_mock_manufacture_ids_star(self): # oper='+' - for mid, platform_name in [('0341', 'TEST_PLATFORM_NAME_1'), - ('0342', 'TEST_PLATFORM_NAME_2'), - ('0343', 'TEST_PLATFORM_NAME_3')]: + for mid, platform_name in [ + ("0341", "TEST_PLATFORM_NAME_1"), + ("0342", "TEST_PLATFORM_NAME_2"), + ("0343", "TEST_PLATFORM_NAME_3"), + ]: self.base.mock_manufacture_id(mid, platform_name) self.assertEqual(platform_name, self.base.plat_db.get(mid)) # oper='-' - self.base.mock_manufacture_id('*', '', oper='-') + self.base.mock_manufacture_id("*", "", oper="-") self.assertEqual(None, self.base.plat_db.get("0341")) self.assertEqual(None, self.base.plat_db.get("0342")) self.assertEqual(None, self.base.plat_db.get("0343")) def test_update_device_from_fs_mid_unmount(self): - dummy_mount = 'dummy_mount' - device = { - 'mount_point': dummy_mount - } + dummy_mount = "dummy_mount" + device = {"mount_point": dummy_mount} with patch("mbed_os_tools.detect.lstools_base.listdir") as _listdir: _listdir.side_effect = OSError self.base._update_device_from_fs(device, False) - self.assertEqual(device['mount_point'], None) + self.assertEqual(device["mount_point"], None) def test_detect_device_test(self): - device_type = self.base._detect_device_type({ - 'vendor_id': '0483' - }) - self.assertEqual(device_type, 'stlink') + device_type = self.base._detect_device_type({"vendor_id": "0483"}) + self.assertEqual(device_type, "stlink") - device_type = self.base._detect_device_type({ - 'vendor_id': '0d28' - }) - self.assertEqual(device_type, 'daplink') + device_type = self.base._detect_device_type({"vendor_id": "0d28"}) + self.assertEqual(device_type, "daplink") - device_type = self.base._detect_device_type({ - 'vendor_id': '1366' - }) - self.assertEqual(device_type, 'jlink') + device_type = self.base._detect_device_type({"vendor_id": "1366"}) + self.assertEqual(device_type, "jlink") def test_device_type_unmounted(self): self.base.list_unmounted = True - self.base.return_value = [{'mount_point': None, - 'target_id_usb_id': u'0240DEADBEEF', - 'serial_port': "dummy_serial_port", - 'vendor_id': '0d28', - 'product_id': '0204'}] - with patch("mbed_os_tools.detect.platform_database.PlatformDatabase.get") as _get,\ - patch("mbed_os_tools.detect.lstools_base.listdir") as _listdir: - _get.return_value = { - 'platform_name': 'foo_target' + self.base.return_value = [ + { + "mount_point": None, + "target_id_usb_id": "0240DEADBEEF", + "serial_port": "dummy_serial_port", + "vendor_id": "0d28", + "product_id": "0204", } + ] + with patch("mbed_os_tools.detect.platform_database.PlatformDatabase.get") as _get, patch( + "mbed_os_tools.detect.lstools_base.listdir" + ) as _listdir: + _get.return_value = {"platform_name": "foo_target"} to_check = self.base.list_mbeds() - #_get.assert_any_call('0240', device_type='daplink', verbose_data=True) + # _get.assert_any_call('0240', device_type='daplink', verbose_data=True) self.assertEqual(len(to_check), 1) - self.assertEqual(to_check[0]['target_id'], "0240DEADBEEF") - self.assertEqual(to_check[0]['platform_name'], 'foo_target') - self.assertEqual(to_check[0]['device_type'], 'daplink') + self.assertEqual(to_check[0]["target_id"], "0240DEADBEEF") + self.assertEqual(to_check[0]["platform_name"], "foo_target") + self.assertEqual(to_check[0]["device_type"], "daplink") def test_update_device_details_jlink(self): - jlink_html_contents = ('' - 'NXP Product Page') + jlink_html_contents = ( + '' + "NXP Product Page" + ) _open = mock_open(read_data=jlink_html_contents) - dummy_mount_point = 'dummy' - base_device = { - 'mount_point': dummy_mount_point - } + dummy_mount_point = "dummy" + base_device = {"mount_point": dummy_mount_point} - with patch('mbed_os_tools.detect.lstools_base.open', _open, create=True): + with patch("mbed_os_tools.detect.lstools_base.open", _open, create=True): device = deepcopy(base_device) - device['directory_entries'] = ['Board.html', 'User Guide.html'] + device["directory_entries"] = ["Board.html", "User Guide.html"] self.base._update_device_details_jlink(device, False) - self.assertEqual(device['url'], 'http://www.nxp.com/FRDM-KL27Z') - self.assertEqual(device['platform_name'], 'KL27Z') - _open.assert_called_once_with(os.path.join(dummy_mount_point, 'Board.html'), 'r') + self.assertEqual(device["url"], "http://www.nxp.com/FRDM-KL27Z") + self.assertEqual(device["platform_name"], "KL27Z") + _open.assert_called_once_with(os.path.join(dummy_mount_point, "Board.html"), "r") _open.reset_mock() device = deepcopy(base_device) - device['directory_entries'] = ['User Guide.html'] + device["directory_entries"] = ["User Guide.html"] self.base._update_device_details_jlink(device, False) - self.assertEqual(device['url'], 'http://www.nxp.com/FRDM-KL27Z') - self.assertEqual(device['platform_name'], 'KL27Z') - _open.assert_called_once_with(os.path.join(dummy_mount_point, 'User Guide.html'), 'r') + self.assertEqual(device["url"], "http://www.nxp.com/FRDM-KL27Z") + self.assertEqual(device["platform_name"], "KL27Z") + _open.assert_called_once_with(os.path.join(dummy_mount_point, "User Guide.html"), "r") _open.reset_mock() device = deepcopy(base_device) - device['directory_entries'] = ['unhelpful_file.html'] + device["directory_entries"] = ["unhelpful_file.html"] self.base._update_device_details_jlink(device, False) _open.assert_not_called() def test_fs_never(self): device = { - 'target_id_usb_id': '024075309420ABCE', - 'mount_point': 'invalid_mount_point', - 'serial_port': 'invalid_serial_port' + "target_id_usb_id": "024075309420ABCE", + "mount_point": "invalid_mount_point", + "serial_port": "invalid_serial_port", } self.base.return_value = [device] - with patch("mbed_lstools.lstools_base.MbedLsToolsBase._update_device_from_fs") as _up_fs,\ - patch("mbed_lstools.lstools_base.MbedLsToolsBase.mount_point_ready") as mount_point_ready: + with patch("mbed_lstools.lstools_base.MbedLsToolsBase._update_device_from_fs") as _up_fs, patch( + "mbed_lstools.lstools_base.MbedLsToolsBase.mount_point_ready" + ) as mount_point_ready: mount_point_ready.return_value = True filter = None @@ -366,22 +390,22 @@ def test_fs_never(self): ret_with_details = self.base.list_mbeds(FSInteraction.Never, filter, read_details_txt=True) self.assertIsNotNone(ret[0]) self.assertIsNotNone(ret_with_details[0]) - self.assertEqual(ret[0]['target_id'], ret[0]['target_id_usb_id']) - self.assertEqual(ret[0]['platform_name'], "K64F") + self.assertEqual(ret[0]["target_id"], ret[0]["target_id_usb_id"]) + self.assertEqual(ret[0]["platform_name"], "K64F") self.assertEqual(ret[0], ret_with_details[0]) _up_fs.assert_not_called() - filter_in = lambda m: m['platform_name'] == 'K64F' + filter_in = lambda m: m["platform_name"] == "K64F" ret = self.base.list_mbeds(FSInteraction.Never, filter_in, read_details_txt=False) ret_with_details = self.base.list_mbeds(FSInteraction.Never, filter_in, read_details_txt=True) self.assertIsNotNone(ret[0]) self.assertIsNotNone(ret_with_details[0]) - self.assertEqual(ret[0]['target_id'], ret[0]['target_id_usb_id']) - self.assertEqual(ret[0]['platform_name'], "K64F") + self.assertEqual(ret[0]["target_id"], ret[0]["target_id_usb_id"]) + self.assertEqual(ret[0]["platform_name"], "K64F") self.assertEqual(ret[0], ret_with_details[0]) _up_fs.assert_not_called() - filter_out = lambda m: m['platform_name'] != 'K64F' + filter_out = lambda m: m["platform_name"] != "K64F" ret = self.base.list_mbeds(FSInteraction.Never, filter_out, read_details_txt=False) ret_with_details = self.base.list_mbeds(FSInteraction.Never, filter_out, read_details_txt=True) _up_fs.assert_not_called() @@ -391,20 +415,19 @@ def test_fs_never(self): def test_fs_after(self): device = { - 'target_id_usb_id': '024075309420ABCE', - 'mount_point': 'invalid_mount_point', - 'serial_port': 'invalid_serial_port' + "target_id_usb_id": "024075309420ABCE", + "mount_point": "invalid_mount_point", + "serial_port": "invalid_serial_port", } - with patch("mbed_lstools.lstools_base.MbedLsToolsBase._read_htm_ids") as _read_htm,\ - patch("mbed_lstools.lstools_base.MbedLsToolsBase._details_txt") as _up_details,\ - patch("mbed_lstools.lstools_base.MbedLsToolsBase.mount_point_ready") as mount_point_ready,\ - patch("mbed_os_tools.detect.lstools_base.listdir") as _listdir: + with patch("mbed_lstools.lstools_base.MbedLsToolsBase._read_htm_ids") as _read_htm, patch( + "mbed_lstools.lstools_base.MbedLsToolsBase._details_txt" + ) as _up_details, patch( + "mbed_lstools.lstools_base.MbedLsToolsBase.mount_point_ready" + ) as mount_point_ready, patch("mbed_os_tools.detect.lstools_base.listdir") as _listdir: new_device_id = "00017531642046" _read_htm.return_value = (new_device_id, {}) - _listdir.return_value = ['mbed.htm', 'details.txt'] - _up_details.return_value = { - 'automation_allowed': '0' - } + _listdir.return_value = ["mbed.htm", "details.txt"] + _up_details.return_value = {"automation_allowed": "0"} mount_point_ready.return_value = True filter = None @@ -416,46 +439,42 @@ def test_fs_after(self): self.assertIsNotNone(ret[0]) self.assertIsNotNone(ret_with_details[0]) - self.assertEqual(ret[0]['target_id'], new_device_id) - self.assertEqual(ret_with_details[0]['daplink_automation_allowed'], '0') + self.assertEqual(ret[0]["target_id"], new_device_id) + self.assertEqual(ret_with_details[0]["daplink_automation_allowed"], "0") # Below is the recommended replacement for assertDictContainsSubset(). # See: https://stackoverflow.com/a/59777678/7083698 self.assertEqual(ret_with_details[0], {**ret_with_details[0], **ret[0]}) - _read_htm.assert_called_with(device['mount_point']) - _up_details.assert_called_with(device['mount_point']) + _read_htm.assert_called_with(device["mount_point"]) + _up_details.assert_called_with(device["mount_point"]) _read_htm.reset_mock() _up_details.reset_mock() - filter_in = lambda m: m['target_id'] == device['target_id_usb_id'] - filter_details = lambda m: m.get('daplink_automation_allowed', None) == '0' + filter_in = lambda m: m["target_id"] == device["target_id_usb_id"] + filter_details = lambda m: m.get("daplink_automation_allowed", None) == "0" self.base.return_value = [deepcopy(device)] - ret = self.base.list_mbeds( - FSInteraction.AfterFilter, filter_in, False, False) + ret = self.base.list_mbeds(FSInteraction.AfterFilter, filter_in, False, False) self.base.return_value = [deepcopy(device)] - ret_with_details = self.base.list_mbeds( - FSInteraction.AfterFilter, filter_details, False, True) + ret_with_details = self.base.list_mbeds(FSInteraction.AfterFilter, filter_details, False, True) self.assertIsNotNone(ret[0]) self.assertEqual(ret_with_details, []) - self.assertEqual(ret[0]['target_id'], new_device_id) - _read_htm.assert_called_with(device['mount_point']) + self.assertEqual(ret[0]["target_id"], new_device_id) + _read_htm.assert_called_with(device["mount_point"]) _up_details.assert_not_called() _read_htm.reset_mock() _up_details.reset_mock() - filter_out = lambda m: m['target_id'] == new_device_id + filter_out = lambda m: m["target_id"] == new_device_id self.base.return_value = [deepcopy(device)] - ret = self.base.list_mbeds( - FSInteraction.AfterFilter, filter_out, False, False) + ret = self.base.list_mbeds(FSInteraction.AfterFilter, filter_out, False, False) self.base.return_value = [deepcopy(device)] - ret_with_details = self.base.list_mbeds( - FSInteraction.AfterFilter, filter_out, False, True) + ret_with_details = self.base.list_mbeds(FSInteraction.AfterFilter, filter_out, False, True) self.assertEqual(ret, []) self.assertEqual(ret_with_details, []) @@ -465,98 +484,87 @@ def test_fs_after(self): def test_get_supported_platforms(self): supported_platforms = self.base.get_supported_platforms() self.assertTrue(isinstance(supported_platforms, dict)) - self.assertEqual(supported_platforms['0240'], 'K64F') + self.assertEqual(supported_platforms["0240"], "K64F") def test_fs_before(self): device = { - 'target_id_usb_id': '024075309420ABCE', - 'mount_point': 'invalid_mount_point', - 'serial_port': 'invalid_serial_port' + "target_id_usb_id": "024075309420ABCE", + "mount_point": "invalid_mount_point", + "serial_port": "invalid_serial_port", } - with patch("mbed_lstools.lstools_base.MbedLsToolsBase._read_htm_ids") as _read_htm,\ - patch("mbed_lstools.lstools_base.MbedLsToolsBase._details_txt") as _up_details,\ - patch("mbed_lstools.lstools_base.MbedLsToolsBase.mount_point_ready") as mount_point_ready,\ - patch("mbed_os_tools.detect.lstools_base.listdir") as _listdir: - new_device_id = u'00017575430420' + with patch("mbed_lstools.lstools_base.MbedLsToolsBase._read_htm_ids") as _read_htm, patch( + "mbed_lstools.lstools_base.MbedLsToolsBase._details_txt" + ) as _up_details, patch( + "mbed_lstools.lstools_base.MbedLsToolsBase.mount_point_ready" + ) as mount_point_ready, patch("mbed_os_tools.detect.lstools_base.listdir") as _listdir: + new_device_id = "00017575430420" _read_htm.return_value = (new_device_id, {}) - _listdir.return_value = ['mbed.htm', 'details.txt'] - _up_details.return_value = { - 'automation_allowed': '0' - } + _listdir.return_value = ["mbed.htm", "details.txt"] + _up_details.return_value = {"automation_allowed": "0"} mount_point_ready.return_value = True filter = None self.base.return_value = [deepcopy(device)] - ret = self.base.list_mbeds( - FSInteraction.BeforeFilter, filter, False, False) + ret = self.base.list_mbeds(FSInteraction.BeforeFilter, filter, False, False) _up_details.assert_not_called() self.base.return_value = [deepcopy(device)] - ret_with_details = self.base.list_mbeds( - FSInteraction.BeforeFilter, filter, False, True) + ret_with_details = self.base.list_mbeds(FSInteraction.BeforeFilter, filter, False, True) self.assertIsNotNone(ret[0]) self.assertIsNotNone(ret_with_details[0]) - self.assertEqual(ret[0]['target_id'], new_device_id) - self.assertEqual(ret_with_details[0]['daplink_automation_allowed'], '0') + self.assertEqual(ret[0]["target_id"], new_device_id) + self.assertEqual(ret_with_details[0]["daplink_automation_allowed"], "0") self.assertEqual(ret_with_details[0], {**ret_with_details[0], **ret[0]}) - _read_htm.assert_called_with(device['mount_point']) - _up_details.assert_called_with(device['mount_point']) + _read_htm.assert_called_with(device["mount_point"]) + _up_details.assert_called_with(device["mount_point"]) _read_htm.reset_mock() _up_details.reset_mock() - filter_in = lambda m: m['target_id'] == '00017575430420' - filter_in_details = lambda m: m['daplink_automation_allowed'] == '0' + filter_in = lambda m: m["target_id"] == "00017575430420" + filter_in_details = lambda m: m["daplink_automation_allowed"] == "0" self.base.return_value = [deepcopy(device)] - ret = self.base.list_mbeds( - FSInteraction.BeforeFilter, filter_in, False, False) + ret = self.base.list_mbeds(FSInteraction.BeforeFilter, filter_in, False, False) _up_details.assert_not_called() self.base.return_value = [deepcopy(device)] - ret_with_details = self.base.list_mbeds( - FSInteraction.BeforeFilter, filter_in_details, False, True) + ret_with_details = self.base.list_mbeds(FSInteraction.BeforeFilter, filter_in_details, False, True) self.assertIsNotNone(ret[0]) self.assertIsNotNone(ret_with_details[0]) - self.assertEqual(ret[0]['target_id'], new_device_id) - self.assertEqual(ret_with_details[0]['daplink_automation_allowed'], '0') + self.assertEqual(ret[0]["target_id"], new_device_id) + self.assertEqual(ret_with_details[0]["daplink_automation_allowed"], "0") self.assertEqual(ret_with_details[0], {**ret_with_details[0], **ret[0]}) - _read_htm.assert_called_with(device['mount_point']) - _up_details.assert_called_with(device['mount_point']) + _read_htm.assert_called_with(device["mount_point"]) + _up_details.assert_called_with(device["mount_point"]) _read_htm.reset_mock() _up_details.reset_mock() - filter_out = lambda m: m['target_id'] == '024075309420ABCE' - filter_out_details = lambda m: m['daplink_automation_allowed'] == '1' - ret = self.base.list_mbeds( - FSInteraction.BeforeFilter, filter_out, False, False) + filter_out = lambda m: m["target_id"] == "024075309420ABCE" + filter_out_details = lambda m: m["daplink_automation_allowed"] == "1" + ret = self.base.list_mbeds(FSInteraction.BeforeFilter, filter_out, False, False) _up_details.assert_not_called() self.base.return_value = [deepcopy(device)] - ret_with_details = self.base.list_mbeds( - FSInteraction.BeforeFilter, filter_out_details, False, True) + ret_with_details = self.base.list_mbeds(FSInteraction.BeforeFilter, filter_out_details, False, True) self.assertEqual(ret, []) self.assertEqual(ret_with_details, []) - _read_htm.assert_called_with(device['mount_point']) + _read_htm.assert_called_with(device["mount_point"]) + class RetargetTestCase(unittest.TestCase): - """ Test cases that makes use of retargetting - """ + """Test cases that makes use of retargetting""" def setUp(self): - retarget_data = { - '0240DEADBEEF': { - 'serial_port' : 'valid' - } - } + retarget_data = {"0240DEADBEEF": {"serial_port": "valid"}} _open = mock_open(read_data=json.dumps(retarget_data)) - with patch('os.path.isfile') as _isfile,\ - patch('mbed_os_tools.detect.lstools_base.isfile') as _isfile,\ - patch('mbed_os_tools.detect.lstools_base.open', _open, create=True): + with patch("os.path.isfile") as _isfile, patch("mbed_os_tools.detect.lstools_base.isfile") as _isfile, patch( + "mbed_os_tools.detect.lstools_base.open", _open, create=True + ): _isfile.return_value = True self.base = DummyLsTools() _open.assert_called() @@ -565,22 +573,22 @@ def tearDown(self): pass def test_list_mbeds_valid_platform(self): - self.base.return_value = [{'mount_point': 'dummy_mount_point', - 'target_id_usb_id': u'0240DEADBEEF', - 'serial_port': None}] - with patch('mbed_lstools.lstools_base.MbedLsToolsBase._read_htm_ids') as _read_htm,\ - patch('mbed_lstools.lstools_base.MbedLsToolsBase.mount_point_ready') as _mpr,\ - patch('mbed_os_tools.detect.platform_database.PlatformDatabase.get') as _get,\ - patch("mbed_os_tools.detect.lstools_base.listdir") as _listdir: + self.base.return_value = [ + {"mount_point": "dummy_mount_point", "target_id_usb_id": "0240DEADBEEF", "serial_port": None} + ] + with patch("mbed_lstools.lstools_base.MbedLsToolsBase._read_htm_ids") as _read_htm, patch( + "mbed_lstools.lstools_base.MbedLsToolsBase.mount_point_ready" + ) as _mpr, patch("mbed_os_tools.detect.platform_database.PlatformDatabase.get") as _get, patch( + "mbed_os_tools.detect.lstools_base.listdir" + ) as _listdir: _mpr.return_value = True - _read_htm.return_value = (u'0240DEADBEEF', {}) - _get.return_value = { - 'platform_name': 'foo_target' - } - _listdir.return_value = ['mbed.htm'] + _read_htm.return_value = ("0240DEADBEEF", {}) + _get.return_value = {"platform_name": "foo_target"} + _listdir.return_value = ["mbed.htm"] to_check = self.base.list_mbeds() self.assertEqual(len(to_check), 1) - self.assertEqual(to_check[0]['serial_port'], 'valid') + self.assertEqual(to_check[0]["serial_port"], "valid") + -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tools/python_tests/mbed_lstools/os_darwin.py b/tools/python_tests/mbed_lstools/os_darwin.py index d112243b957..6be67bf2707 100644 --- a/tools/python_tests/mbed_lstools/os_darwin.py +++ b/tools/python_tests/mbed_lstools/os_darwin.py @@ -23,9 +23,9 @@ from mbed_lstools.darwin import MbedLsToolsDarwin + class DarwinTestCase(unittest.TestCase): - """Tests for the Darwin port - """ + """Tests for the Darwin port""" def setUp(self): with patch("platform.mac_ver") as _pv: @@ -37,135 +37,187 @@ def tearDown(self): def test_a_k64f(self): disks = { - 'AllDisks': ['disk0', 'disk0s1', 'disk0s2', 'disk0s3', 'disk1', 'disk2'], - 'AllDisksAndPartitions': [{ 'Content': 'GUID_partition_scheme', - 'DeviceIdentifier': 'disk0', - 'Partitions': [ - { 'Content': 'EFI', - 'DeviceIdentifier': 'disk0s1', - 'DiskUUID': 'nope', - 'Size': 209715200, - 'VolumeName': 'EFI', - 'VolumeUUID': 'nu-uh'}, - { 'Content': 'Apple_CoreStorage', - 'DeviceIdentifier': 'disk0s2', - 'DiskUUID': 'nodda', - 'Size': 250006216704}, - { 'Content': 'Apple_Boot', - 'DeviceIdentifier': 'disk0s3', - 'DiskUUID': 'no soup for you!', - 'Size': 650002432, - 'VolumeName': 'Recovery HD', - 'VolumeUUID': 'Id rather not'}], - 'Size': 251000193024}, - { 'Content': 'Apple_HFS', - 'DeviceIdentifier': 'disk1', - 'MountPoint': '/', - 'Size': 249653772288, - 'VolumeName': 'Mac HD'}, - { 'Content': '', - 'DeviceIdentifier': 'disk2', - 'MountPoint': '/Volumes/DAPLINK', - 'Size': 67174400, - 'VolumeName': 'DAPLINK'}], - 'VolumesFromDisks': ['Mac HD', 'DAPLINK'], - 'WholeDisks': ['disk0', 'disk1', 'disk2'] - } - usb_tree = [{ - 'IORegistryEntryName': 'DAPLink CMSIS-DAP', - 'USB Serial Number': '0240000034544e45003a00048e3800525a91000097969900', - 'idProduct': 516, - 'idVendor': 3368, - 'IORegistryEntryChildren': [ - {'IORegistryEntryName': 'AppleUSBHostLegacyClient'}, - {'IORegistryEntryName': 'AppleUSBHostCompositeDevice'}, - {'IORegistryEntryName': 'USB_MSC', - 'idProduct': 516, - 'idVendor': 3368, - 'IORegistryEntryChildren': [ - {'IORegistryEntryName': 'IOUSBMassStorageInterfaceNub', - 'idProduct': 516, - 'idVendor': 3368, - 'IORegistryEntryChildren': [ - {'IORegistryEntryName': 'IOUSBMassStorageDriverNub', - 'idProduct': 516, - 'idVendor': 3368, - 'IORegistryEntryChildren': [ - {'IORegistryEntryName': 'IOUSBMassStorageDriver', - 'IORegistryEntryChildren': [ - {'IORegistryEntryName': 'IOSCSILogicalUnitNub', - 'IORegistryEntryChildren': [ - {'IORegistryEntryName': 'IOSCSIPeripheralDeviceType00', - 'IORegistryEntryChildren': [ - {'IORegistryEntryName': 'IOBlockStorageServices', - 'IORegistryEntryChildren': [ - {'IORegistryEntryName': 'IOBlockStorageDriver', - 'IORegistryEntryChildren': [ - {'BSD Name': 'disk2', - 'IORegistryEntryName': 'MBED VFS Media', - 'IORegistryEntryChildren': [ - {'IORegistryEntryName': 'IOMediaBSDClient'}], - }], - }], - }], - }], - }], - }], - }], - }], - }, - {'IORegistryEntryName': 'CMSIS-DAP', - 'idProduct': 516, - 'idVendor': 3368, - 'IORegistryEntryChildren': [ - {'IORegistryEntryName': 'IOUSBHostHIDDevice', - 'IORegistryEntryChildren': [ - {'IORegistryEntryName': 'IOHIDInterface'}, - {'IORegistryEntryName': 'IOHIDLibUserClient'}, - {'IORegistryEntryName': 'IOHIDLibUserClient'}], - }], - }, - {'IORegistryEntryName': 'mbed Serial Port', - 'idProduct': 516, - 'idVendor': 3368, - 'IORegistryEntryChildren': [ - { 'IORegistryEntryName': 'AppleUSBACMControl'}], + "AllDisks": ["disk0", "disk0s1", "disk0s2", "disk0s3", "disk1", "disk2"], + "AllDisksAndPartitions": [ + { + "Content": "GUID_partition_scheme", + "DeviceIdentifier": "disk0", + "Partitions": [ + { + "Content": "EFI", + "DeviceIdentifier": "disk0s1", + "DiskUUID": "nope", + "Size": 209715200, + "VolumeName": "EFI", + "VolumeUUID": "nu-uh", + }, + { + "Content": "Apple_CoreStorage", + "DeviceIdentifier": "disk0s2", + "DiskUUID": "nodda", + "Size": 250006216704, + }, + { + "Content": "Apple_Boot", + "DeviceIdentifier": "disk0s3", + "DiskUUID": "no soup for you!", + "Size": 650002432, + "VolumeName": "Recovery HD", + "VolumeUUID": "Id rather not", + }, + ], + "Size": 251000193024, + }, + { + "Content": "Apple_HFS", + "DeviceIdentifier": "disk1", + "MountPoint": "/", + "Size": 249653772288, + "VolumeName": "Mac HD", + }, + { + "Content": "", + "DeviceIdentifier": "disk2", + "MountPoint": "/Volumes/DAPLINK", + "Size": 67174400, + "VolumeName": "DAPLINK", }, - {'IORegistryEntryName': 'mbed Serial Port', - 'idProduct': 516, - 'idVendor': 3368, - 'IORegistryEntryChildren': [ - {'IORegistryEntryName': 'AppleUSBACMData', - 'idProduct': 516, - 'idVendor': 3368, - 'IORegistryEntryChildren': [ - {'IORegistryEntryName': 'IOModemSerialStreamSync', - 'IORegistryEntryChildren': [ - {'IODialinDevice': '/dev/tty.usbmodem1422', - 'IORegistryEntryName': 'IOSerialBSDClient'}], - }], - }], - }], + ], + "VolumesFromDisks": ["Mac HD", "DAPLINK"], + "WholeDisks": ["disk0", "disk1", "disk2"], + } + usb_tree = [ + { + "IORegistryEntryName": "DAPLink CMSIS-DAP", + "USB Serial Number": "0240000034544e45003a00048e3800525a91000097969900", + "idProduct": 516, + "idVendor": 3368, + "IORegistryEntryChildren": [ + {"IORegistryEntryName": "AppleUSBHostLegacyClient"}, + {"IORegistryEntryName": "AppleUSBHostCompositeDevice"}, + { + "IORegistryEntryName": "USB_MSC", + "idProduct": 516, + "idVendor": 3368, + "IORegistryEntryChildren": [ + { + "IORegistryEntryName": "IOUSBMassStorageInterfaceNub", + "idProduct": 516, + "idVendor": 3368, + "IORegistryEntryChildren": [ + { + "IORegistryEntryName": "IOUSBMassStorageDriverNub", + "idProduct": 516, + "idVendor": 3368, + "IORegistryEntryChildren": [ + { + "IORegistryEntryName": "IOUSBMassStorageDriver", + "IORegistryEntryChildren": [ + { + "IORegistryEntryName": "IOSCSILogicalUnitNub", + "IORegistryEntryChildren": [ + { + "IORegistryEntryName": "IOSCSIPeripheralDeviceType00", + "IORegistryEntryChildren": [ + { + "IORegistryEntryName": "IOBlockStorageServices", + "IORegistryEntryChildren": [ + { + "IORegistryEntryName": "IOBlockStorageDriver", + "IORegistryEntryChildren": [ + { + "BSD Name": "disk2", + "IORegistryEntryName": "MBED VFS Media", + "IORegistryEntryChildren": [ + { + "IORegistryEntryName": "IOMediaBSDClient" + } + ], + } + ], + } + ], + } + ], + } + ], + } + ], + } + ], + } + ], + } + ], + }, + { + "IORegistryEntryName": "CMSIS-DAP", + "idProduct": 516, + "idVendor": 3368, + "IORegistryEntryChildren": [ + { + "IORegistryEntryName": "IOUSBHostHIDDevice", + "IORegistryEntryChildren": [ + {"IORegistryEntryName": "IOHIDInterface"}, + {"IORegistryEntryName": "IOHIDLibUserClient"}, + {"IORegistryEntryName": "IOHIDLibUserClient"}, + ], + } + ], + }, + { + "IORegistryEntryName": "mbed Serial Port", + "idProduct": 516, + "idVendor": 3368, + "IORegistryEntryChildren": [{"IORegistryEntryName": "AppleUSBACMControl"}], + }, + { + "IORegistryEntryName": "mbed Serial Port", + "idProduct": 516, + "idVendor": 3368, + "IORegistryEntryChildren": [ + { + "IORegistryEntryName": "AppleUSBACMData", + "idProduct": 516, + "idVendor": 3368, + "IORegistryEntryChildren": [ + { + "IORegistryEntryName": "IOModemSerialStreamSync", + "IORegistryEntryChildren": [ + { + "IODialinDevice": "/dev/tty.usbmodem1422", + "IORegistryEntryName": "IOSerialBSDClient", + } + ], + } + ], + } + ], + }, + ], } ] with patch("subprocess.Popen") as _popen: + def do_popen(command, *args, **kwargs): to_ret = MagicMock() to_ret.wait.return_value = 0 to_ret.stdout = BytesIO() - plistlib.dump( - {'diskutil': disks, - 'ioreg': usb_tree}[command[0]], - to_ret.stdout) + plistlib.dump({"diskutil": disks, "ioreg": usb_tree}[command[0]], to_ret.stdout) to_ret.stdout.seek(0) to_ret.communicate.return_value = (to_ret.stdout.getvalue(), "") return to_ret + _popen.side_effect = do_popen candidates = self.darwin.find_candidates() - self.assertIn({'mount_point': '/Volumes/DAPLINK', - 'serial_port': '/dev/tty.usbmodem1422', - 'target_id_usb_id': '0240000034544e45003a00048e3800525a91000097969900', - 'vendor_id': '0d28', - 'product_id': '0204'}, - candidates) + self.assertIn( + { + "mount_point": "/Volumes/DAPLINK", + "serial_port": "/dev/tty.usbmodem1422", + "target_id_usb_id": "0240000034544e45003a00048e3800525a91000097969900", + "vendor_id": "0d28", + "product_id": "0204", + }, + candidates, + ) diff --git a/tools/python_tests/mbed_lstools/os_linux_generic.py b/tools/python_tests/mbed_lstools/os_linux_generic.py index 53c7fa4499a..4e7034b39f9 100644 --- a/tools/python_tests/mbed_lstools/os_linux_generic.py +++ b/tools/python_tests/mbed_lstools/os_linux_generic.py @@ -1,4 +1,4 @@ -''' +""" mbed SDK Copyright (c) 2011-2018 ARM Limited @@ -13,7 +13,7 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. -''' +""" import unittest import sys @@ -23,8 +23,7 @@ class LinuxPortTestCase(unittest.TestCase): - ''' Basic test cases checking trivial asserts - ''' + """Basic test cases checking trivial asserts""" def setUp(self): self.linux_generic = MbedLsToolsLinuxGeneric() @@ -33,513 +32,486 @@ def tearDown(self): pass vfat_devices = [ - b'/dev/sdb on /media/usb0 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)', - b'/dev/sdd on /media/usb2 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)', - b'/dev/sde on /media/usb3 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)', - b'/dev/sdc on /media/usb1 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)' + b"/dev/sdb on /media/usb0 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)", + b"/dev/sdd on /media/usb2 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)", + b"/dev/sde on /media/usb3 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)", + b"/dev/sdc on /media/usb1 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)", ] def test_get_mount_point_basic(self): - with patch('mbed_lstools.linux.MbedLsToolsLinuxGeneric._run_cli_process') as _cliproc: - _cliproc.return_value = (b'\n'.join(self.vfat_devices), None, 0) + with patch("mbed_lstools.linux.MbedLsToolsLinuxGeneric._run_cli_process") as _cliproc: + _cliproc.return_value = (b"\n".join(self.vfat_devices), None, 0) mount_dict = dict(self.linux_generic._fat_mounts()) - _cliproc.assert_called_once_with('mount') - self.assertEqual('/media/usb0', mount_dict['/dev/sdb']) - self.assertEqual('/media/usb2', mount_dict['/dev/sdd']) - self.assertEqual('/media/usb3', mount_dict['/dev/sde']) - self.assertEqual('/media/usb1', mount_dict['/dev/sdc']) - + _cliproc.assert_called_once_with("mount") + self.assertEqual("/media/usb0", mount_dict["/dev/sdb"]) + self.assertEqual("/media/usb2", mount_dict["/dev/sdd"]) + self.assertEqual("/media/usb3", mount_dict["/dev/sde"]) + self.assertEqual("/media/usb1", mount_dict["/dev/sdc"]) vfat_devices_ext = [ - b'/dev/sdb on /media/MBED_xxx type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)', - b'/dev/sdd on /media/MBED___x type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)', - b'/dev/sde on /media/MBED-xxx type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)', - b'/dev/sdc on /media/MBED_x-x type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)', - - b'/dev/sda on /mnt/NUCLEO type vfat (rw,relatime,uid=999,fmask=0133,dmask=0022,codepage=437,iocharset=ascii,shortname=mixed,utf8,flush,errors=remount-ro,uhelper=ldm)', - b'/dev/sdf on /mnt/NUCLEO_ type vfat (rw,relatime,uid=999,fmask=0133,dmask=0022,codepage=437,iocharset=ascii,shortname=mixed,utf8,flush,errors=remount-ro,uhelper=ldm)', - b'/dev/sdg on /mnt/DAPLINK type vfat (rw,relatime,sync,uid=999,fmask=0022,dmask=0022,codepage=437,iocharset=ascii,shortname=mixed,errors=remount-ro,uhelper=ldm)', - b'/dev/sdh on /mnt/DAPLINK_ type vfat (rw,relatime,sync,uid=999,fmask=0022,dmask=0022,codepage=437,iocharset=ascii,shortname=mixed,errors=remount-ro,uhelper=ldm)', - b'/dev/sdi on /mnt/DAPLINK__ type vfat (rw,relatime,sync,uid=999,fmask=0022,dmask=0022,codepage=437,iocharset=ascii,shortname=mixed,errors=remount-ro,uhelper=ldm)', + b"/dev/sdb on /media/MBED_xxx type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)", + b"/dev/sdd on /media/MBED___x type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)", + b"/dev/sde on /media/MBED-xxx type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)", + b"/dev/sdc on /media/MBED_x-x type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)", + b"/dev/sda on /mnt/NUCLEO type vfat (rw,relatime,uid=999,fmask=0133,dmask=0022,codepage=437,iocharset=ascii,shortname=mixed,utf8,flush,errors=remount-ro,uhelper=ldm)", + b"/dev/sdf on /mnt/NUCLEO_ type vfat (rw,relatime,uid=999,fmask=0133,dmask=0022,codepage=437,iocharset=ascii,shortname=mixed,utf8,flush,errors=remount-ro,uhelper=ldm)", + b"/dev/sdg on /mnt/DAPLINK type vfat (rw,relatime,sync,uid=999,fmask=0022,dmask=0022,codepage=437,iocharset=ascii,shortname=mixed,errors=remount-ro,uhelper=ldm)", + b"/dev/sdh on /mnt/DAPLINK_ type vfat (rw,relatime,sync,uid=999,fmask=0022,dmask=0022,codepage=437,iocharset=ascii,shortname=mixed,errors=remount-ro,uhelper=ldm)", + b"/dev/sdi on /mnt/DAPLINK__ type vfat (rw,relatime,sync,uid=999,fmask=0022,dmask=0022,codepage=437,iocharset=ascii,shortname=mixed,errors=remount-ro,uhelper=ldm)", ] def test_get_mount_point_ext(self): - with patch('mbed_lstools.linux.MbedLsToolsLinuxGeneric._run_cli_process') as _cliproc: - _cliproc.return_value = (b'\n'.join(self.vfat_devices_ext), None, 0) + with patch("mbed_lstools.linux.MbedLsToolsLinuxGeneric._run_cli_process") as _cliproc: + _cliproc.return_value = (b"\n".join(self.vfat_devices_ext), None, 0) mount_dict = dict(self.linux_generic._fat_mounts()) - _cliproc.assert_called_once_with('mount') - self.assertEqual('/media/MBED_xxx', mount_dict['/dev/sdb']) - self.assertEqual('/media/MBED___x', mount_dict['/dev/sdd']) - self.assertEqual('/media/MBED-xxx', mount_dict['/dev/sde']) - self.assertEqual('/media/MBED_x-x', mount_dict['/dev/sdc']) - - self.assertEqual('/mnt/NUCLEO', mount_dict['/dev/sda']) - self.assertEqual('/mnt/NUCLEO_', mount_dict['/dev/sdf']) - self.assertEqual('/mnt/DAPLINK', mount_dict['/dev/sdg']) - self.assertEqual('/mnt/DAPLINK_', mount_dict['/dev/sdh']) - self.assertEqual('/mnt/DAPLINK__', mount_dict['/dev/sdi']) + _cliproc.assert_called_once_with("mount") + self.assertEqual("/media/MBED_xxx", mount_dict["/dev/sdb"]) + self.assertEqual("/media/MBED___x", mount_dict["/dev/sdd"]) + self.assertEqual("/media/MBED-xxx", mount_dict["/dev/sde"]) + self.assertEqual("/media/MBED_x-x", mount_dict["/dev/sdc"]) + + self.assertEqual("/mnt/NUCLEO", mount_dict["/dev/sda"]) + self.assertEqual("/mnt/NUCLEO_", mount_dict["/dev/sdf"]) + self.assertEqual("/mnt/DAPLINK", mount_dict["/dev/sdg"]) + self.assertEqual("/mnt/DAPLINK_", mount_dict["/dev/sdh"]) + self.assertEqual("/mnt/DAPLINK__", mount_dict["/dev/sdi"]) def find_candidates_with_patch(self, mount_list, link_dict, listdir_dict, open_dict): - if not getattr(sys.modules['os'], 'readlink', None): - sys.modules['os'].readlink = None + if not getattr(sys.modules["os"], "readlink", None): + sys.modules["os"].readlink = None - def do_open(path, mode='r'): - path = path.replace('\\', '/') + def do_open(path, mode="r"): + path = path.replace("\\", "/") file_object = mock_open(read_data=open_dict[path]).return_value file_object.__iter__.return_value = open_dict[path].splitlines(True) return file_object - with patch('mbed_lstools.linux.MbedLsToolsLinuxGeneric._run_cli_process') as _cliproc,\ - patch('os.readlink') as _readlink,\ - patch('os.listdir') as _listdir,\ - patch('os.path.abspath') as _abspath,\ - patch('mbed_os_tools.detect.linux.open', do_open) as _,\ - patch('os.path.isdir') as _isdir: + with patch("mbed_lstools.linux.MbedLsToolsLinuxGeneric._run_cli_process") as _cliproc, patch( + "os.readlink" + ) as _readlink, patch("os.listdir") as _listdir, patch("os.path.abspath") as _abspath, patch( + "mbed_os_tools.detect.linux.open", do_open + ) as _, patch("os.path.isdir") as _isdir: _isdir.return_value = True - _cliproc.return_value = (b'\n'.join(mount_list), None, 0) + _cliproc.return_value = (b"\n".join(mount_list), None, 0) + def do_readlink(link): # Fix for testing on Windows - link = link.replace('\\', '/') + link = link.replace("\\", "/") return link_dict[link] + _readlink.side_effect = do_readlink + def do_listdir(dir): # Fix for testing on Windows - dir = dir.replace('\\', '/') + dir = dir.replace("\\", "/") return listdir_dict[dir] + _listdir.side_effect = do_listdir + def do_abspath(dir): - _, path = os.path.splitdrive( - os.path.normpath(os.path.join(os.getcwd(), dir))) - path = path.replace('\\', '/') + _, path = os.path.splitdrive(os.path.normpath(os.path.join(os.getcwd(), dir))) + path = path.replace("\\", "/") return path + _abspath.side_effect = do_abspath ret_val = self.linux_generic.find_candidates() - _cliproc.assert_called_once_with('mount') + _cliproc.assert_called_once_with("mount") return ret_val - listdir_dict_rpi = { - '/dev/disk/by-id': [ - 'usb-MBED_VFS_0240000028634e4500135006691700105f21000097969900-0:0', - 'usb-MBED_VFS_0240000028884e450018700f6bf000338021000097969900-0:0', - 'usb-MBED_VFS_0240000028884e45001f700f6bf000118021000097969900-0:0', - 'usb-MBED_VFS_0240000028884e450036700f6bf000118021000097969900-0:0', - 'usb-MBED_VFS_0240000029164e45001b0012706e000df301000097969900-0:0', - 'usb-MBED_VFS_0240000029164e45002f0012706e0006f301000097969900-0:0', - 'usb-MBED_VFS_9900000031864e45000a100e0000003c0000000097969901-0:0' - ], - '/dev/serial/by-id': [ - 'usb-ARM_DAPLink_CMSIS-DAP_0240000028634e4500135006691700105f21000097969900-if01', - 'usb-ARM_DAPLink_CMSIS-DAP_0240000028884e450018700f6bf000338021000097969900-if01', - 'usb-ARM_DAPLink_CMSIS-DAP_0240000028884e450036700f6bf000118021000097969900-if01', - 'usb-ARM_DAPLink_CMSIS-DAP_0240000029164e45001b0012706e000df301000097969900-if01', - 'usb-ARM_BBC_micro:bit_CMSIS-DAP_9900000031864e45000a100e0000003c0000000097969901-if01' + "/dev/disk/by-id": [ + "usb-MBED_VFS_0240000028634e4500135006691700105f21000097969900-0:0", + "usb-MBED_VFS_0240000028884e450018700f6bf000338021000097969900-0:0", + "usb-MBED_VFS_0240000028884e45001f700f6bf000118021000097969900-0:0", + "usb-MBED_VFS_0240000028884e450036700f6bf000118021000097969900-0:0", + "usb-MBED_VFS_0240000029164e45001b0012706e000df301000097969900-0:0", + "usb-MBED_VFS_0240000029164e45002f0012706e0006f301000097969900-0:0", + "usb-MBED_VFS_9900000031864e45000a100e0000003c0000000097969901-0:0", ], - '/sys/class/block': [ - 'sdb', - 'sdc', - 'sdd', - 'sde', - 'sdf', - 'sdg', - 'sdh', + "/dev/serial/by-id": [ + "usb-ARM_DAPLink_CMSIS-DAP_0240000028634e4500135006691700105f21000097969900-if01", + "usb-ARM_DAPLink_CMSIS-DAP_0240000028884e450018700f6bf000338021000097969900-if01", + "usb-ARM_DAPLink_CMSIS-DAP_0240000028884e450036700f6bf000118021000097969900-if01", + "usb-ARM_DAPLink_CMSIS-DAP_0240000029164e45001b0012706e000df301000097969900-if01", + "usb-ARM_BBC_micro:bit_CMSIS-DAP_9900000031864e45000a100e0000003c0000000097969901-if01", ], - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-1/1-1.2/1-1.2.6': [ - 'idVendor', - 'idProduct' - ], - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-3': [ - 'idVendor', - 'idProduct' - ], - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-4': [ - 'idVendor', - 'idProduct' - ], - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-5': [ - 'idVendor', - 'idProduct' - ], - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-6': [ - 'idVendor', - 'idProduct' - ], - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-7': [ - 'idVendor', - 'idProduct' - ], - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-8': [ - 'idVendor', - 'idProduct' - ] + "/sys/class/block": ["sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh"], + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-1/1-1.2/1-1.2.6": ["idVendor", "idProduct"], + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-3": ["idVendor", "idProduct"], + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-4": ["idVendor", "idProduct"], + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-5": ["idVendor", "idProduct"], + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-6": ["idVendor", "idProduct"], + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-7": ["idVendor", "idProduct"], + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-8": ["idVendor", "idProduct"], } open_dict_rpi = { - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-1/1-1.2/1-1.2.6/idVendor': '0d28\n', - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-1/1-1.2/1-1.2.6/idProduct': '0204\n', - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-3/idVendor': '0d28\n', - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-3/idProduct': '0204\n', - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-4/idVendor': '0d28\n', - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-4/idProduct': '0204\n', - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-5/idVendor': '0d28\n', - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-5/idProduct': '0204\n', - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-6/idVendor': '0d28\n', - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-6/idProduct': '0204\n', - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-7/idVendor': '0d28\n', - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-7/idProduct': '0204\n', - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-8/idVendor': '0d28\n', - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-8/idProduct': '0204\n' + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-1/1-1.2/1-1.2.6/idVendor": "0d28\n", + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-1/1-1.2/1-1.2.6/idProduct": "0204\n", + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-3/idVendor": "0d28\n", + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-3/idProduct": "0204\n", + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-4/idVendor": "0d28\n", + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-4/idProduct": "0204\n", + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-5/idVendor": "0d28\n", + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-5/idProduct": "0204\n", + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-6/idVendor": "0d28\n", + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-6/idProduct": "0204\n", + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-7/idVendor": "0d28\n", + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-7/idProduct": "0204\n", + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-8/idVendor": "0d28\n", + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-8/idProduct": "0204\n", } link_dict_rpi = { - '/dev/disk/by-id/usb-MBED_VFS_0240000028634e4500135006691700105f21000097969900-0:0': '../../sdb', - '/dev/disk/by-id/usb-MBED_VFS_0240000028884e450018700f6bf000338021000097969900-0:0': '../../sdc', - '/dev/disk/by-id/usb-MBED_VFS_0240000028884e45001f700f6bf000118021000097969900-0:0': '../../sdd', - '/dev/disk/by-id/usb-MBED_VFS_0240000028884e450036700f6bf000118021000097969900-0:0': '../../sde', - '/dev/disk/by-id/usb-MBED_VFS_0240000029164e45001b0012706e000df301000097969900-0:0': '../../sdf', - '/dev/disk/by-id/usb-MBED_VFS_0240000029164e45002f0012706e0006f301000097969900-0:0': '../../sdg', - '/dev/disk/by-id/usb-MBED_VFS_9900000031864e45000a100e0000003c0000000097969901-0:0': '../../sdh', - '/dev/serial/by-id/usb-ARM_DAPLink_CMSIS-DAP_0240000028634e4500135006691700105f21000097969900-if01': '../../ttyACM0', - '/dev/serial/by-id/usb-ARM_DAPLink_CMSIS-DAP_0240000028884e450018700f6bf000338021000097969900-if01': '../../ttyACM1', - '/dev/serial/by-id/usb-ARM_DAPLink_CMSIS-DAP_0240000028884e450036700f6bf000118021000097969900-if01': '../../ttyACM3', - '/dev/serial/by-id/usb-ARM_DAPLink_CMSIS-DAP_0240000029164e45001b0012706e000df301000097969900-if01': '../../ttyACM2', - '/dev/serial/by-id/usb-ARM_BBC_micro:bit_CMSIS-DAP_9900000031864e45000a100e0000003c0000000097969901-if01': '../../ttyACM4', - '/sys/class/block/sdb': '../../devices/pci0000:00/0000:00:06.0/usb1/1-1/1-1.2/1-1.2.6/1-1.2.6:1.0/host8568/target8568:0:0/8568:0:0:0/block/sdb', - '/sys/class/block/sdc': '../../devices/pci0000:00/0000:00:06.0/usb1/1-3/1-3:1.0/host4/target4:0:0/4:0:0:0/block/sdc', - '/sys/class/block/sdd': '../../devices/pci0000:00/0000:00:06.0/usb1/1-4/1-4:1.0/host5/target5:0:0/5:0:0:0/block/sdd', - '/sys/class/block/sde': '../../devices/pci0000:00/0000:00:06.0/usb1/1-5/1-5:1.0/host6/target6:0:0/6:0:0:0/block/sde', - '/sys/class/block/sdf': '../../devices/pci0000:00/0000:00:06.0/usb1/1-6/1-6:1.0/host7/target7:0:0/7:0:0:0/block/sdf', - '/sys/class/block/sdg': '../../devices/pci0000:00/0000:00:06.0/usb1/1-7/1-7:1.0/host8/target8:0:0/8:0:0:0/block/sdg', - '/sys/class/block/sdh': '../../devices/pci0000:00/0000:00:06.0/usb1/1-8/1-7:1.0/host9/target9:0:0/9:0:0:0/block/sdh' + "/dev/disk/by-id/usb-MBED_VFS_0240000028634e4500135006691700105f21000097969900-0:0": "../../sdb", + "/dev/disk/by-id/usb-MBED_VFS_0240000028884e450018700f6bf000338021000097969900-0:0": "../../sdc", + "/dev/disk/by-id/usb-MBED_VFS_0240000028884e45001f700f6bf000118021000097969900-0:0": "../../sdd", + "/dev/disk/by-id/usb-MBED_VFS_0240000028884e450036700f6bf000118021000097969900-0:0": "../../sde", + "/dev/disk/by-id/usb-MBED_VFS_0240000029164e45001b0012706e000df301000097969900-0:0": "../../sdf", + "/dev/disk/by-id/usb-MBED_VFS_0240000029164e45002f0012706e0006f301000097969900-0:0": "../../sdg", + "/dev/disk/by-id/usb-MBED_VFS_9900000031864e45000a100e0000003c0000000097969901-0:0": "../../sdh", + "/dev/serial/by-id/usb-ARM_DAPLink_CMSIS-DAP_0240000028634e4500135006691700105f21000097969900-if01": "../../ttyACM0", + "/dev/serial/by-id/usb-ARM_DAPLink_CMSIS-DAP_0240000028884e450018700f6bf000338021000097969900-if01": "../../ttyACM1", + "/dev/serial/by-id/usb-ARM_DAPLink_CMSIS-DAP_0240000028884e450036700f6bf000118021000097969900-if01": "../../ttyACM3", + "/dev/serial/by-id/usb-ARM_DAPLink_CMSIS-DAP_0240000029164e45001b0012706e000df301000097969900-if01": "../../ttyACM2", + "/dev/serial/by-id/usb-ARM_BBC_micro:bit_CMSIS-DAP_9900000031864e45000a100e0000003c0000000097969901-if01": "../../ttyACM4", + "/sys/class/block/sdb": "../../devices/pci0000:00/0000:00:06.0/usb1/1-1/1-1.2/1-1.2.6/1-1.2.6:1.0/host8568/target8568:0:0/8568:0:0:0/block/sdb", + "/sys/class/block/sdc": "../../devices/pci0000:00/0000:00:06.0/usb1/1-3/1-3:1.0/host4/target4:0:0/4:0:0:0/block/sdc", + "/sys/class/block/sdd": "../../devices/pci0000:00/0000:00:06.0/usb1/1-4/1-4:1.0/host5/target5:0:0/5:0:0:0/block/sdd", + "/sys/class/block/sde": "../../devices/pci0000:00/0000:00:06.0/usb1/1-5/1-5:1.0/host6/target6:0:0/6:0:0:0/block/sde", + "/sys/class/block/sdf": "../../devices/pci0000:00/0000:00:06.0/usb1/1-6/1-6:1.0/host7/target7:0:0/7:0:0:0/block/sdf", + "/sys/class/block/sdg": "../../devices/pci0000:00/0000:00:06.0/usb1/1-7/1-7:1.0/host8/target8:0:0/8:0:0:0/block/sdg", + "/sys/class/block/sdh": "../../devices/pci0000:00/0000:00:06.0/usb1/1-8/1-7:1.0/host9/target9:0:0/9:0:0:0/block/sdh", } mount_list_rpi = [ - b'/dev/sdb on /media/usb0 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)', - b'/dev/sdc on /media/usb1 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)', - b'/dev/sdd on /media/usb2 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)', - b'/dev/sde on /media/usb3 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)', - b'/dev/sdf on /media/usb4 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)', - b'/dev/sdg on /media/usb5 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)', - b'/dev/sdh on /media/usb6 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)' + b"/dev/sdb on /media/usb0 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)", + b"/dev/sdc on /media/usb1 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)", + b"/dev/sdd on /media/usb2 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)", + b"/dev/sde on /media/usb3 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)", + b"/dev/sdf on /media/usb4 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)", + b"/dev/sdg on /media/usb5 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)", + b"/dev/sdh on /media/usb6 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)", ] + def test_get_detected_rpi(self): mbed_det = self.find_candidates_with_patch( - self.mount_list_rpi, self.link_dict_rpi, self.listdir_dict_rpi, self.open_dict_rpi) - - self.assertIn({ - 'mount_point': '/media/usb0', - 'serial_port': '/dev/ttyACM0', - 'target_id_usb_id': '0240000028634e4500135006691700105f21000097969900', - 'vendor_id': '0d28', - 'product_id': '0204' - }, mbed_det) - self.assertIn({ - 'mount_point': '/media/usb1', - 'serial_port': '/dev/ttyACM1', - 'target_id_usb_id': '0240000028884e450018700f6bf000338021000097969900', - 'vendor_id': '0d28', - 'product_id': '0204' - }, mbed_det) - self.assertIn({ - 'mount_point': '/media/usb4', - 'serial_port': '/dev/ttyACM2', - 'target_id_usb_id': '0240000029164e45001b0012706e000df301000097969900', - 'vendor_id': '0d28', - 'product_id': '0204' - }, mbed_det) - self.assertIn({ - 'mount_point': '/media/usb3', - 'serial_port': '/dev/ttyACM3', - 'target_id_usb_id': '0240000028884e450036700f6bf000118021000097969900', - 'vendor_id': '0d28', - 'product_id': '0204' - }, mbed_det) - self.assertIn({ - 'mount_point': '/media/usb6', - 'serial_port': '/dev/ttyACM4', - 'target_id_usb_id': '9900000031864e45000a100e0000003c0000000097969901', - 'vendor_id': '0d28', - 'product_id': '0204' - }, mbed_det) - + self.mount_list_rpi, self.link_dict_rpi, self.listdir_dict_rpi, self.open_dict_rpi + ) + + self.assertIn( + { + "mount_point": "/media/usb0", + "serial_port": "/dev/ttyACM0", + "target_id_usb_id": "0240000028634e4500135006691700105f21000097969900", + "vendor_id": "0d28", + "product_id": "0204", + }, + mbed_det, + ) + self.assertIn( + { + "mount_point": "/media/usb1", + "serial_port": "/dev/ttyACM1", + "target_id_usb_id": "0240000028884e450018700f6bf000338021000097969900", + "vendor_id": "0d28", + "product_id": "0204", + }, + mbed_det, + ) + self.assertIn( + { + "mount_point": "/media/usb4", + "serial_port": "/dev/ttyACM2", + "target_id_usb_id": "0240000029164e45001b0012706e000df301000097969900", + "vendor_id": "0d28", + "product_id": "0204", + }, + mbed_det, + ) + self.assertIn( + { + "mount_point": "/media/usb3", + "serial_port": "/dev/ttyACM3", + "target_id_usb_id": "0240000028884e450036700f6bf000118021000097969900", + "vendor_id": "0d28", + "product_id": "0204", + }, + mbed_det, + ) + self.assertIn( + { + "mount_point": "/media/usb6", + "serial_port": "/dev/ttyACM4", + "target_id_usb_id": "9900000031864e45000a100e0000003c0000000097969901", + "vendor_id": "0d28", + "product_id": "0204", + }, + mbed_det, + ) listdir_dict_1 = { - '/dev/disk/by-id': [ - '/dev/disk/by-id/ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM', - '/dev/disk/by-id/ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM-part1', - '/dev/disk/by-id/ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM-part2', - '/dev/disk/by-id/ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM-part5', - '/dev/disk/by-id/ata-TSSTcorpDVD-ROM_TS-H352C', - '/dev/disk/by-id/usb-MBED_FDi_sk_A000000001-0:0', - '/dev/disk/by-id/usb-MBED_microcontroller_0240020152986E5EAF6693E6-0:0', - '/dev/disk/by-id/wwn-0x5000cca30ccffb77', - '/dev/disk/by-id/wwn-0x5000cca30ccffb77-part1', - '/dev/disk/by-id/wwn-0x5000cca30ccffb77-part2', - '/dev/disk/by-id/wwn-0x5000cca30ccffb77-part5', - ], - '/dev/serial/by-id': [ - '/dev/serial/by-id/usb-MBED_MBED_CMSIS-DAP_0240020152986E5EAF6693E6-if01', - '/dev/serial/by-id/usb-MBED_MBED_CMSIS-DAP_A000000001-if01', + "/dev/disk/by-id": [ + "/dev/disk/by-id/ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM", + "/dev/disk/by-id/ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM-part1", + "/dev/disk/by-id/ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM-part2", + "/dev/disk/by-id/ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM-part5", + "/dev/disk/by-id/ata-TSSTcorpDVD-ROM_TS-H352C", + "/dev/disk/by-id/usb-MBED_FDi_sk_A000000001-0:0", + "/dev/disk/by-id/usb-MBED_microcontroller_0240020152986E5EAF6693E6-0:0", + "/dev/disk/by-id/wwn-0x5000cca30ccffb77", + "/dev/disk/by-id/wwn-0x5000cca30ccffb77-part1", + "/dev/disk/by-id/wwn-0x5000cca30ccffb77-part2", + "/dev/disk/by-id/wwn-0x5000cca30ccffb77-part5", ], - '/sys/class/block': [ - 'sdb', - 'sdc' + "/dev/serial/by-id": [ + "/dev/serial/by-id/usb-MBED_MBED_CMSIS-DAP_0240020152986E5EAF6693E6-if01", + "/dev/serial/by-id/usb-MBED_MBED_CMSIS-DAP_A000000001-if01", ], - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-2': [ - 'idVendor', - 'idProduct' - ], - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-3': [ - 'idVendor', - 'idProduct' - ] + "/sys/class/block": ["sdb", "sdc"], + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-2": ["idVendor", "idProduct"], + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-3": ["idVendor", "idProduct"], } link_dict_1 = { - '/dev/disk/by-id/ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM': '../../sda', - '/dev/disk/by-id/ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM-part1': '../../sda1', - '/dev/disk/by-id/ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM-part2': '../../sda2', - '/dev/disk/by-id/ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM-part5': '../../sda5', - '/dev/disk/by-id/ata-TSSTcorpDVD-ROM_TS-H352C': '../../sr0', - '/dev/disk/by-id/usb-MBED_FDi_sk_A000000001-0:0': '../../sdc', - '/dev/disk/by-id/usb-MBED_microcontroller_0240020152986E5EAF6693E6-0:0': '../../sdb', - '/dev/disk/by-id/wwn-0x5000cca30ccffb77': '../../sda', - '/dev/disk/by-id/wwn-0x5000cca30ccffb77-part1': '../../sda1', - '/dev/disk/by-id/wwn-0x5000cca30ccffb77-part2': '../../sda2', - '/dev/disk/by-id/wwn-0x5000cca30ccffb77-part5': '../../sda5', - '/dev/serial/by-id/usb-MBED_MBED_CMSIS-DAP_0240020152986E5EAF6693E6-if01': '../../ttyACM1', - '/dev/serial/by-id/usb-MBED_MBED_CMSIS-DAP_A000000001-if01': '../../ttyACM0', - '/sys/class/block/sdb': '../../devices/pci0000:00/0000:00:06.0/usb1/1-2/1-2:1.0/host3/target3:0:0/3:0:0:0/block/sdb', - '/sys/class/block/sdc': '../../devices/pci0000:00/0000:00:06.0/usb1/1-3/1-3:1.0/host4/target4:0:0/4:0:0:0/block/sdc' + "/dev/disk/by-id/ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM": "../../sda", + "/dev/disk/by-id/ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM-part1": "../../sda1", + "/dev/disk/by-id/ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM-part2": "../../sda2", + "/dev/disk/by-id/ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM-part5": "../../sda5", + "/dev/disk/by-id/ata-TSSTcorpDVD-ROM_TS-H352C": "../../sr0", + "/dev/disk/by-id/usb-MBED_FDi_sk_A000000001-0:0": "../../sdc", + "/dev/disk/by-id/usb-MBED_microcontroller_0240020152986E5EAF6693E6-0:0": "../../sdb", + "/dev/disk/by-id/wwn-0x5000cca30ccffb77": "../../sda", + "/dev/disk/by-id/wwn-0x5000cca30ccffb77-part1": "../../sda1", + "/dev/disk/by-id/wwn-0x5000cca30ccffb77-part2": "../../sda2", + "/dev/disk/by-id/wwn-0x5000cca30ccffb77-part5": "../../sda5", + "/dev/serial/by-id/usb-MBED_MBED_CMSIS-DAP_0240020152986E5EAF6693E6-if01": "../../ttyACM1", + "/dev/serial/by-id/usb-MBED_MBED_CMSIS-DAP_A000000001-if01": "../../ttyACM0", + "/sys/class/block/sdb": "../../devices/pci0000:00/0000:00:06.0/usb1/1-2/1-2:1.0/host3/target3:0:0/3:0:0:0/block/sdb", + "/sys/class/block/sdc": "../../devices/pci0000:00/0000:00:06.0/usb1/1-3/1-3:1.0/host4/target4:0:0/4:0:0:0/block/sdc", } open_dict_1 = { - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-2/idVendor': '0d28\n', - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-2/idProduct': '0204\n', - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-3/idVendor': '0d28\n', - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-3/idProduct': '0204\n' + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-2/idVendor": "0d28\n", + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-2/idProduct": "0204\n", + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-3/idVendor": "0d28\n", + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-3/idProduct": "0204\n", } mount_list_1 = [ - b'/dev/sdb on /media/usb0 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)', - b'/dev/sdc on /media/usb1 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)' + b"/dev/sdb on /media/usb0 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)", + b"/dev/sdc on /media/usb1 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)", ] + def test_get_detected_1_k64f(self): mbed_det = self.find_candidates_with_patch( - self.mount_list_1, self.link_dict_1, self.listdir_dict_1, self.open_dict_1) - self.assertIn({ - 'mount_point': '/media/usb0', - 'serial_port': '/dev/ttyACM1', - 'target_id_usb_id': '0240020152986E5EAF6693E6', - 'vendor_id': '0d28', - 'product_id': '0204' - }, mbed_det) - - self.assertIn({ - 'mount_point': '/media/usb1', - 'serial_port': '/dev/ttyACM0', - 'target_id_usb_id': 'A000000001', - 'vendor_id': '0d28', - 'product_id': '0204' - }, mbed_det) - + self.mount_list_1, self.link_dict_1, self.listdir_dict_1, self.open_dict_1 + ) + self.assertIn( + { + "mount_point": "/media/usb0", + "serial_port": "/dev/ttyACM1", + "target_id_usb_id": "0240020152986E5EAF6693E6", + "vendor_id": "0d28", + "product_id": "0204", + }, + mbed_det, + ) + + self.assertIn( + { + "mount_point": "/media/usb1", + "serial_port": "/dev/ttyACM0", + "target_id_usb_id": "A000000001", + "vendor_id": "0d28", + "product_id": "0204", + }, + mbed_det, + ) listdir_dict_2 = { - '/dev/disk/by-id': [ - 'ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM', - 'ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM-part1', - 'ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM-part2', - 'ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM-part5', - 'ata-TSSTcorpDVD-ROM_TS-H352C', - 'usb-MBED_FDi_sk_A000000001-0:0', - 'usb-MBED_microcontroller_02400201489A1E6CB564E3D4-0:0', - 'usb-MBED_microcontroller_0240020152986E5EAF6693E6-0:0', - 'usb-MBED_microcontroller_0240020152A06E54AF5E93EC-0:0', - 'usb-MBED_microcontroller_0672FF485649785087171742-0:0', - 'wwn-0x5000cca30ccffb77', - 'wwn-0x5000cca30ccffb77-part1', - 'wwn-0x5000cca30ccffb77-part2', - 'wwn-0x5000cca30ccffb77-part5' - ], - '/dev/serial/by-id': [ - 'usb-MBED_MBED_CMSIS-DAP_02400201489A1E6CB564E3D4-if01', - 'usb-MBED_MBED_CMSIS-DAP_0240020152986E5EAF6693E6-if01', - 'usb-MBED_MBED_CMSIS-DAP_0240020152A06E54AF5E93EC-if01', - 'usb-MBED_MBED_CMSIS-DAP_A000000001-if01', - 'usb-STMicroelectronics_STM32_STLink_0672FF485649785087171742-if02' - ], - '/sys/class/block': [ - 'sdb', - 'sdc', - 'sdd', - 'sde', - 'sdf' + "/dev/disk/by-id": [ + "ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM", + "ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM-part1", + "ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM-part2", + "ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM-part5", + "ata-TSSTcorpDVD-ROM_TS-H352C", + "usb-MBED_FDi_sk_A000000001-0:0", + "usb-MBED_microcontroller_02400201489A1E6CB564E3D4-0:0", + "usb-MBED_microcontroller_0240020152986E5EAF6693E6-0:0", + "usb-MBED_microcontroller_0240020152A06E54AF5E93EC-0:0", + "usb-MBED_microcontroller_0672FF485649785087171742-0:0", + "wwn-0x5000cca30ccffb77", + "wwn-0x5000cca30ccffb77-part1", + "wwn-0x5000cca30ccffb77-part2", + "wwn-0x5000cca30ccffb77-part5", ], - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-2': [ - 'idVendor', - 'idProduct' + "/dev/serial/by-id": [ + "usb-MBED_MBED_CMSIS-DAP_02400201489A1E6CB564E3D4-if01", + "usb-MBED_MBED_CMSIS-DAP_0240020152986E5EAF6693E6-if01", + "usb-MBED_MBED_CMSIS-DAP_0240020152A06E54AF5E93EC-if01", + "usb-MBED_MBED_CMSIS-DAP_A000000001-if01", + "usb-STMicroelectronics_STM32_STLink_0672FF485649785087171742-if02", ], - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-3': [ - 'idVendor', - 'idProduct' - ], - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-4': [ - 'idVendor', - 'idProduct' - ], - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-5': [ - 'idVendor', - 'idProduct' - ], - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-6': [ - 'idVendor', - 'idProduct' - ] + "/sys/class/block": ["sdb", "sdc", "sdd", "sde", "sdf"], + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-2": ["idVendor", "idProduct"], + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-3": ["idVendor", "idProduct"], + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-4": ["idVendor", "idProduct"], + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-5": ["idVendor", "idProduct"], + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-6": ["idVendor", "idProduct"], } open_dict_2 = { - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-2/idVendor': '0d28\n', - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-2/idProduct': '0204\n', - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-3/idVendor': '0d28\n', - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-3/idProduct': '0204\n', - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-4/idVendor': '0d28\n', - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-4/idProduct': '0204\n', - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-5/idVendor': '0d28\n', - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-5/idProduct': '0204\n', - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-6/idVendor': '0d28\n', - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-6/idProduct': '0204\n' + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-2/idVendor": "0d28\n", + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-2/idProduct": "0204\n", + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-3/idVendor": "0d28\n", + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-3/idProduct": "0204\n", + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-4/idVendor": "0d28\n", + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-4/idProduct": "0204\n", + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-5/idVendor": "0d28\n", + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-5/idProduct": "0204\n", + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-6/idVendor": "0d28\n", + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-6/idProduct": "0204\n", } link_dict_2 = { - '/dev/disk/by-id/ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM': '../../sda', - '/dev/disk/by-id/ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM-part1': '../../sda1', - '/dev/disk/by-id/ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM-part2': '../../sda2', - '/dev/disk/by-id/ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM-part5': '../../sda5', - '/dev/disk/by-id/ata-TSSTcorpDVD-ROM_TS-H352C': '../../sr0', - '/dev/disk/by-id/usb-MBED_FDi_sk_A000000001-0:0': '../../sdc', - '/dev/disk/by-id/usb-MBED_microcontroller_02400201489A1E6CB564E3D4-0:0': '../../sde', - '/dev/disk/by-id/usb-MBED_microcontroller_0240020152986E5EAF6693E6-0:0': '../../sdb', - '/dev/disk/by-id/usb-MBED_microcontroller_0240020152A06E54AF5E93EC-0:0': '../../sdf', - '/dev/disk/by-id/usb-MBED_microcontroller_0672FF485649785087171742-0:0': '../../sdd', - '/dev/disk/by-id/wwn-0x5000cca30ccffb77': '../../sda', - '/dev/disk/by-id/wwn-0x5000cca30ccffb77-part1': '../../sda1', - '/dev/disk/by-id/wwn-0x5000cca30ccffb77-part2': '../../sda2', - '/dev/disk/by-id/wwn-0x5000cca30ccffb77-part5': '../../sda5', - '/dev/serial/by-id/usb-MBED_MBED_CMSIS-DAP_02400201489A1E6CB564E3D4-if01': '../../ttyACM3', - '/dev/serial/by-id/usb-MBED_MBED_CMSIS-DAP_0240020152986E5EAF6693E6-if01': '../../ttyACM1', - '/dev/serial/by-id/usb-MBED_MBED_CMSIS-DAP_0240020152A06E54AF5E93EC-if01': '../../ttyACM4', - '/dev/serial/by-id/usb-MBED_MBED_CMSIS-DAP_A000000001-if01': '../../ttyACM0', - '/dev/serial/by-id/usb-STMicroelectronics_STM32_STLink_0672FF485649785087171742-if02': '../../ttyACM2', - '/sys/class/block/sdb': '../../devices/pci0000:00/0000:00:06.0/usb1/1-2/1-2:1.0/host3/target3:0:0/3:0:0:0/block/sdb', - '/sys/class/block/sdc': '../../devices/pci0000:00/0000:00:06.0/usb1/1-3/1-3:1.0/host4/target4:0:0/4:0:0:0/block/sdc', - '/sys/class/block/sdd': '../../devices/pci0000:00/0000:00:06.0/usb1/1-4/1-4:1.0/host5/target5:0:0/5:0:0:0/block/sdd', - '/sys/class/block/sde': '../../devices/pci0000:00/0000:00:06.0/usb1/1-5/1-5:1.0/host6/target6:0:0/6:0:0:0/block/sde', - '/sys/class/block/sdf': '../../devices/pci0000:00/0000:00:06.0/usb1/1-6/1-6:1.0/host7/target7:0:0/7:0:0:0/block/sdf' + "/dev/disk/by-id/ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM": "../../sda", + "/dev/disk/by-id/ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM-part1": "../../sda1", + "/dev/disk/by-id/ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM-part2": "../../sda2", + "/dev/disk/by-id/ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM-part5": "../../sda5", + "/dev/disk/by-id/ata-TSSTcorpDVD-ROM_TS-H352C": "../../sr0", + "/dev/disk/by-id/usb-MBED_FDi_sk_A000000001-0:0": "../../sdc", + "/dev/disk/by-id/usb-MBED_microcontroller_02400201489A1E6CB564E3D4-0:0": "../../sde", + "/dev/disk/by-id/usb-MBED_microcontroller_0240020152986E5EAF6693E6-0:0": "../../sdb", + "/dev/disk/by-id/usb-MBED_microcontroller_0240020152A06E54AF5E93EC-0:0": "../../sdf", + "/dev/disk/by-id/usb-MBED_microcontroller_0672FF485649785087171742-0:0": "../../sdd", + "/dev/disk/by-id/wwn-0x5000cca30ccffb77": "../../sda", + "/dev/disk/by-id/wwn-0x5000cca30ccffb77-part1": "../../sda1", + "/dev/disk/by-id/wwn-0x5000cca30ccffb77-part2": "../../sda2", + "/dev/disk/by-id/wwn-0x5000cca30ccffb77-part5": "../../sda5", + "/dev/serial/by-id/usb-MBED_MBED_CMSIS-DAP_02400201489A1E6CB564E3D4-if01": "../../ttyACM3", + "/dev/serial/by-id/usb-MBED_MBED_CMSIS-DAP_0240020152986E5EAF6693E6-if01": "../../ttyACM1", + "/dev/serial/by-id/usb-MBED_MBED_CMSIS-DAP_0240020152A06E54AF5E93EC-if01": "../../ttyACM4", + "/dev/serial/by-id/usb-MBED_MBED_CMSIS-DAP_A000000001-if01": "../../ttyACM0", + "/dev/serial/by-id/usb-STMicroelectronics_STM32_STLink_0672FF485649785087171742-if02": "../../ttyACM2", + "/sys/class/block/sdb": "../../devices/pci0000:00/0000:00:06.0/usb1/1-2/1-2:1.0/host3/target3:0:0/3:0:0:0/block/sdb", + "/sys/class/block/sdc": "../../devices/pci0000:00/0000:00:06.0/usb1/1-3/1-3:1.0/host4/target4:0:0/4:0:0:0/block/sdc", + "/sys/class/block/sdd": "../../devices/pci0000:00/0000:00:06.0/usb1/1-4/1-4:1.0/host5/target5:0:0/5:0:0:0/block/sdd", + "/sys/class/block/sde": "../../devices/pci0000:00/0000:00:06.0/usb1/1-5/1-5:1.0/host6/target6:0:0/6:0:0:0/block/sde", + "/sys/class/block/sdf": "../../devices/pci0000:00/0000:00:06.0/usb1/1-6/1-6:1.0/host7/target7:0:0/7:0:0:0/block/sdf", } mount_list_2 = [ - b'/dev/sdb on /media/usb0 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)', - b'/dev/sdc on /media/usb1 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)', - b'/dev/sdd on /media/usb2 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)', - b'/dev/sde on /media/usb3 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)', - b'/dev/sdf on /media/usb4 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)' + b"/dev/sdb on /media/usb0 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)", + b"/dev/sdc on /media/usb1 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)", + b"/dev/sdd on /media/usb2 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)", + b"/dev/sde on /media/usb3 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)", + b"/dev/sdf on /media/usb4 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)", ] + def test_get_detected_2_k64f(self): mbed_det = self.find_candidates_with_patch( - self.mount_list_2, self.link_dict_2, self.listdir_dict_2, self.open_dict_2) - - self.assertIn({ - 'mount_point': '/media/usb1', - 'serial_port': '/dev/ttyACM0', - 'target_id_usb_id': 'A000000001', - 'vendor_id': '0d28', - 'product_id': '0204' - }, - mbed_det) - self.assertIn({ - 'mount_point': '/media/usb2', - 'serial_port': '/dev/ttyACM2', - 'target_id_usb_id': '0672FF485649785087171742', - 'vendor_id': '0d28', - 'product_id': '0204' - }, - mbed_det) - - self.assertIn({ - 'mount_point': '/media/usb4', - 'serial_port': '/dev/ttyACM4', - 'target_id_usb_id': '0240020152A06E54AF5E93EC', - 'vendor_id': '0d28', - 'product_id': '0204' - }, - mbed_det) - - self.assertIn({ - 'mount_point': '/media/usb3', - 'serial_port': '/dev/ttyACM3', - 'target_id_usb_id': '02400201489A1E6CB564E3D4', - 'vendor_id': '0d28', - 'product_id': '0204' - }, - mbed_det) - - self.assertIn({ - 'mount_point': '/media/usb0', - 'serial_port': '/dev/ttyACM1', - 'target_id_usb_id': '0240020152986E5EAF6693E6', - 'vendor_id': '0d28', - 'product_id': '0204' - }, - mbed_det) - + self.mount_list_2, self.link_dict_2, self.listdir_dict_2, self.open_dict_2 + ) + + self.assertIn( + { + "mount_point": "/media/usb1", + "serial_port": "/dev/ttyACM0", + "target_id_usb_id": "A000000001", + "vendor_id": "0d28", + "product_id": "0204", + }, + mbed_det, + ) + self.assertIn( + { + "mount_point": "/media/usb2", + "serial_port": "/dev/ttyACM2", + "target_id_usb_id": "0672FF485649785087171742", + "vendor_id": "0d28", + "product_id": "0204", + }, + mbed_det, + ) + + self.assertIn( + { + "mount_point": "/media/usb4", + "serial_port": "/dev/ttyACM4", + "target_id_usb_id": "0240020152A06E54AF5E93EC", + "vendor_id": "0d28", + "product_id": "0204", + }, + mbed_det, + ) + + self.assertIn( + { + "mount_point": "/media/usb3", + "serial_port": "/dev/ttyACM3", + "target_id_usb_id": "02400201489A1E6CB564E3D4", + "vendor_id": "0d28", + "product_id": "0204", + }, + mbed_det, + ) + + self.assertIn( + { + "mount_point": "/media/usb0", + "serial_port": "/dev/ttyACM1", + "target_id_usb_id": "0240020152986E5EAF6693E6", + "vendor_id": "0d28", + "product_id": "0204", + }, + mbed_det, + ) listdir_dict_4 = { - '/dev/disk/by-id': [ - 'ata-VMware_Virtual_SATA_CDRW_Drive_00000000000000000001', - 'ata-VMware_Virtual_SATA_CDRW_Drive_01000000000000000001', - 'usb-MBED_VFS_0240000033514e45001f500585d40014e981000097969900-0:0' - ], - '/dev/serial/by-id': [ - 'pci-ARM_DAPLink_CMSIS-DAP_0240000033514e45001f500585d40014e981000097969900-if01' - ], - '/sys/class/block': [ - 'sdb' - ], - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-2': [ - 'idVendor', - 'idProduct' + "/dev/disk/by-id": [ + "ata-VMware_Virtual_SATA_CDRW_Drive_00000000000000000001", + "ata-VMware_Virtual_SATA_CDRW_Drive_01000000000000000001", + "usb-MBED_VFS_0240000033514e45001f500585d40014e981000097969900-0:0", ], + "/dev/serial/by-id": ["pci-ARM_DAPLink_CMSIS-DAP_0240000033514e45001f500585d40014e981000097969900-if01"], + "/sys/class/block": ["sdb"], + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-2": ["idVendor", "idProduct"], } open_dict_4 = { - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-2/idVendor': '0d28\n', - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-2/idProduct': '0204\n' + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-2/idVendor": "0d28\n", + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-2/idProduct": "0204\n", } link_dict_4 = { - '/dev/disk/by-id/ata-VMware_Virtual_SATA_CDRW_Drive_00000000000000000001': '../../sr0', - '/dev/disk/by-id/ata-VMware_Virtual_SATA_CDRW_Drive_01000000000000000001': '../../sr1', - '/dev/disk/by-id/usb-MBED_VFS_0240000033514e45001f500585d40014e981000097969900-0:0': '../../sdb', - '/dev/serial/by-id/pci-ARM_DAPLink_CMSIS-DAP_0240000033514e45001f500585d40014e981000097969900-if01': '../../ttyACM0', - '/sys/class/block/sdb': '../../devices/pci0000:00/0000:00:06.0/usb1/1-2/1-2:1.0/host3/target3:0:0/3:0:0:0/block/sdb' + "/dev/disk/by-id/ata-VMware_Virtual_SATA_CDRW_Drive_00000000000000000001": "../../sr0", + "/dev/disk/by-id/ata-VMware_Virtual_SATA_CDRW_Drive_01000000000000000001": "../../sr1", + "/dev/disk/by-id/usb-MBED_VFS_0240000033514e45001f500585d40014e981000097969900-0:0": "../../sdb", + "/dev/serial/by-id/pci-ARM_DAPLink_CMSIS-DAP_0240000033514e45001f500585d40014e981000097969900-if01": "../../ttyACM0", + "/sys/class/block/sdb": "../../devices/pci0000:00/0000:00:06.0/usb1/1-2/1-2:1.0/host3/target3:0:0/3:0:0:0/block/sdb", } mount_list_4 = [ - b'/dev/sdb on /media/przemek/DAPLINK type vfat (rw,nosuid,nodev,relatime,uid=1000,gid=1000,fmask=0022,dmask=0022,codepage=437,iocharset=iso8859-1,shortname=mixed,showexec,utf8,flush,errors=remount-ro,uhelper=udisks2)' + b"/dev/sdb on /media/przemek/DAPLINK type vfat (rw,nosuid,nodev,relatime,uid=1000,gid=1000,fmask=0022,dmask=0022,codepage=437,iocharset=iso8859-1,shortname=mixed,showexec,utf8,flush,errors=remount-ro,uhelper=udisks2)" ] + def test_get_detected_3_k64f(self): mbed_det = self.find_candidates_with_patch( - self.mount_list_4, self.link_dict_4, self.listdir_dict_4, self.open_dict_4) - - self.assertIn({ - 'mount_point': '/media/przemek/DAPLINK', - 'serial_port': '/dev/ttyACM0', - 'target_id_usb_id': '0240000033514e45001f500585d40014e981000097969900', - 'vendor_id': '0d28', - 'product_id': '0204' - }, - mbed_det) - - -if __name__ == '__main__': + self.mount_list_4, self.link_dict_4, self.listdir_dict_4, self.open_dict_4 + ) + + self.assertIn( + { + "mount_point": "/media/przemek/DAPLINK", + "serial_port": "/dev/ttyACM0", + "target_id_usb_id": "0240000033514e45001f500585d40014e981000097969900", + "vendor_id": "0d28", + "product_id": "0204", + }, + mbed_det, + ) + + +if __name__ == "__main__": unittest.main() diff --git a/tools/python_tests/mbed_lstools/os_win7.py b/tools/python_tests/mbed_lstools/os_win7.py index f2224c8f6bb..3b1389a8305 100644 --- a/tools/python_tests/mbed_lstools/os_win7.py +++ b/tools/python_tests/mbed_lstools/os_win7.py @@ -23,18 +23,19 @@ # Mock the winreg and _winreg module for non-windows python _winreg = MagicMock() -sys.modules['_winreg'] = _winreg -sys.modules['winreg'] = _winreg +sys.modules["_winreg"] = _winreg +sys.modules["winreg"] = _winreg from mbed_lstools.windows import MbedLsToolsWin7, CompatibleIDsNotFoundException + class Win7TestCase(unittest.TestCase): - """ Basic test cases checking trivial asserts - """ + """Basic test cases checking trivial asserts""" def setUp(self): self.lstool = MbedLsToolsWin7() import logging + logging.basicConfig() root_logger = logging.getLogger("mbedls") root_logger.setLevel(logging.DEBUG) @@ -64,20 +65,27 @@ def test_os_supported(self): def test_empty_reg(self): value_dict = { - (None, 'SYSTEM\\MountedDevices'): [ - ('\\DosDevices\\F:', - u'_??_USBSTOR#Disk&Ven_MBED&Prod_VFS&Rev_0.1#9&215b8c47&0&0240000032044e4500257009997b00386781000097969900&0#{53f56307-b6bf-11d0-94f2-00a0c91efb8b}'.encode('utf-16le')), + (None, "SYSTEM\\MountedDevices"): [ + ( + "\\DosDevices\\F:", + "_??_USBSTOR#Disk&Ven_MBED&Prod_VFS&Rev_0.1#9&215b8c47&0&0240000032044e4500257009997b00386781000097969900&0#{53f56307-b6bf-11d0-94f2-00a0c91efb8b}".encode( + "utf-16le" + ), + ) ], - (None, 'SYSTEM\\CurrentControlSet\\Services\\volume\\Enum'): [], - (None, 'SYSTEM\\CurrentControlSet\\Services\\USBSTOR\\Enum'): [] + (None, "SYSTEM\\CurrentControlSet\\Services\\volume\\Enum"): [], + (None, "SYSTEM\\CurrentControlSet\\Services\\USBSTOR\\Enum"): [], } self.setUpRegistry(value_dict, {}) candidates = self.lstool.find_candidates() - self.assertEqual(_winreg.OpenKey.mock_calls, [ - call(_winreg.HKEY_LOCAL_MACHINE, 'SYSTEM\\MountedDevices'), - call(_winreg.HKEY_LOCAL_MACHINE, 'SYSTEM\\CurrentControlSet\\Services\\Disk\\Enum'), - call(_winreg.HKEY_LOCAL_MACHINE, 'SYSTEM\\CurrentControlSet\\Services\\USBSTOR\\Enum') - ]) + self.assertEqual( + _winreg.OpenKey.mock_calls, + [ + call(_winreg.HKEY_LOCAL_MACHINE, "SYSTEM\\MountedDevices"), + call(_winreg.HKEY_LOCAL_MACHINE, "SYSTEM\\CurrentControlSet\\Services\\Disk\\Enum"), + call(_winreg.HKEY_LOCAL_MACHINE, "SYSTEM\\CurrentControlSet\\Services\\USBSTOR\\Enum"), + ], + ) self.assertEqual(candidates, []) def assertNoRegMut(self): @@ -93,84 +101,125 @@ def assertNoRegMut(self): def setUpRegistry(self, value_dict, key_dict): all_keys = set(value_dict.keys()) | set(key_dict.keys()) + def open_key_effect(key, subkey): - if ((key, subkey) in all_keys or key in all_keys): + if (key, subkey) in all_keys or key in all_keys: return key, subkey else: raise OSError((key, subkey)) + _winreg.OpenKey.side_effect = open_key_effect + def enum_value(key, index): try: a, b = value_dict[key][index] return a, b, None except KeyError: raise OSError + _winreg.EnumValue.side_effect = enum_value + def enum_key(key, index): try: return key_dict[key][index] except KeyError: raise OSError + _winreg.EnumKey.side_effect = enum_key + def query_value(key, subkey): try: return value_dict[(key, subkey)] except KeyError: raise OSError + _winreg.QueryValueEx.side_effect = query_value + def query_info_key(key): - return (len(key_dict.get(key, [])), - len(value_dict.get(key, []))) + return (len(key_dict.get(key, [])), len(value_dict.get(key, []))) + _winreg.QueryInfoKey.side_effect = query_info_key def test_one_composite_dev(self): value_dict = { - (None, 'SYSTEM\\MountedDevices'): [ - ('\\DosDevices\\C:', u'NOT A VALID MBED DRIVE'.encode('utf-16le')), - ('\\DosDevices\\F:', - u'_??_USBSTOR#Disk&Ven_MBED&Prod_VFS&Rev_0.1#9&215b8c47&0&0240000032044e4500257009997b00386781000097969900&0#{53f56307-b6bf-11d0-94f2-00a0c91efb8b}'.encode('utf-16le')) + (None, "SYSTEM\\MountedDevices"): [ + ("\\DosDevices\\C:", "NOT A VALID MBED DRIVE".encode("utf-16le")), + ( + "\\DosDevices\\F:", + "_??_USBSTOR#Disk&Ven_MBED&Prod_VFS&Rev_0.1#9&215b8c47&0&0240000032044e4500257009997b00386781000097969900&0#{53f56307-b6bf-11d0-94f2-00a0c91efb8b}".encode( + "utf-16le" + ), + ), ], - (None, 'SYSTEM\\CurrentControlSet\\Services\\Disk\\Enum'): [ - ('0', 'USBSTOR\\Disk&Ven_MBED&Prod_VFS&Rev_0.1\\9&215b8c47&0&0240000032044e4500257009997b00386781000097969900&0') + (None, "SYSTEM\\CurrentControlSet\\Services\\Disk\\Enum"): [ + ( + "0", + "USBSTOR\\Disk&Ven_MBED&Prod_VFS&Rev_0.1\\9&215b8c47&0&0240000032044e4500257009997b00386781000097969900&0", + ) ], - (None, 'SYSTEM\\CurrentControlSet\\Services\\USBSTOR\\Enum'): [ - ('0', 'USB\\VID_0D28&PID_0204&MI_00\\8&26b12a60&0&0000') + (None, "SYSTEM\\CurrentControlSet\\Services\\USBSTOR\\Enum"): [ + ("0", "USB\\VID_0D28&PID_0204&MI_00\\8&26b12a60&0&0000") ], - (None, 'SYSTEM\\CurrentControlSet\\Enum\\USB\\VID_0D28&PID_0204'): [], - (((None, 'SYSTEM\\CurrentControlSet\\Enum\\USB\\VID_0D28&PID_0204'), - '0240000032044e4500257009997b00386781000097969900'), - 'ParentIdPrefix'): ('8&26b12a60&0', None), - (((None, 'SYSTEM\\CurrentControlSet\\Enum\\USB\\VID_0D28&PID_0204'), - '0240000032044e4500257009997b00386781000097969900'), - 'CompatibleIDs'): ([u'USB\\DevClass_00&SubClass_00&Prot_00', u'USB\\DevClass_00&SubClass_00', u'USB\\DevClass_00', u'USB\\COMPOSITE'], 7), - (((None, 'SYSTEM\\CurrentControlSet\\Enum\\USB\\VID_0D28&PID_0204&MI_00'), '8&26b12a60&0&0000'), 'CompatibleIDs'): ([u'USB\\Class_08&SubClass_06&Prot_50', u'USB\\Class_08&SubClass_06', u'USB\\Class_08'], 7), - (((None, 'SYSTEM\\CurrentControlSet\\Enum\\USB\\VID_0D28&PID_0204&MI_01'), - '8&26b12a60&0&0001'), - 'CompatibleIDs'): ([u'USB\\CLASS_02&SUBCLASS_02&PROT_01', u'USB\\CLASS_02&SUBCLASS_02', u'USB\\CLASS_02'], 7), - ((((None, 'SYSTEM\\CurrentControlSet\\Enum\\USB\\VID_0D28&PID_0204&MI_01'), - '8&26b12a60&0&0001'), - 'Device Parameters'), - 'PortName'): ('COM7', None) + (None, "SYSTEM\\CurrentControlSet\\Enum\\USB\\VID_0D28&PID_0204"): [], + ( + ( + (None, "SYSTEM\\CurrentControlSet\\Enum\\USB\\VID_0D28&PID_0204"), + "0240000032044e4500257009997b00386781000097969900", + ), + "ParentIdPrefix", + ): ("8&26b12a60&0", None), + ( + ( + (None, "SYSTEM\\CurrentControlSet\\Enum\\USB\\VID_0D28&PID_0204"), + "0240000032044e4500257009997b00386781000097969900", + ), + "CompatibleIDs", + ): ( + [ + "USB\\DevClass_00&SubClass_00&Prot_00", + "USB\\DevClass_00&SubClass_00", + "USB\\DevClass_00", + "USB\\COMPOSITE", + ], + 7, + ), + ( + ((None, "SYSTEM\\CurrentControlSet\\Enum\\USB\\VID_0D28&PID_0204&MI_00"), "8&26b12a60&0&0000"), + "CompatibleIDs", + ): (["USB\\Class_08&SubClass_06&Prot_50", "USB\\Class_08&SubClass_06", "USB\\Class_08"], 7), + ( + ((None, "SYSTEM\\CurrentControlSet\\Enum\\USB\\VID_0D28&PID_0204&MI_01"), "8&26b12a60&0&0001"), + "CompatibleIDs", + ): (["USB\\CLASS_02&SUBCLASS_02&PROT_01", "USB\\CLASS_02&SUBCLASS_02", "USB\\CLASS_02"], 7), + ( + ( + ((None, "SYSTEM\\CurrentControlSet\\Enum\\USB\\VID_0D28&PID_0204&MI_01"), "8&26b12a60&0&0001"), + "Device Parameters", + ), + "PortName", + ): ("COM7", None), } key_dict = { - (None, 'SYSTEM\\CurrentControlSet\\Enum\\USB\\VID_0D28&PID_0204'): - ['0240000032044e4500257009997b00386781000097969900'], - (None, 'SYSTEM\\CurrentControlSet\\Enum\\USB\\VID_0D28&PID_0204&MI_00'): [], - (None, 'SYSTEM\\CurrentControlSet\\Enum\\USB\\VID_0D28&PID_0204&MI_01'): [], - (((None, 'SYSTEM\\CurrentControlSet\\Enum\\USB\\VID_0D28&PID_0204&MI_01'), - '8&26b12a60&0&0001'), - 'Device Parameters'): [] + (None, "SYSTEM\\CurrentControlSet\\Enum\\USB\\VID_0D28&PID_0204"): [ + "0240000032044e4500257009997b00386781000097969900" + ], + (None, "SYSTEM\\CurrentControlSet\\Enum\\USB\\VID_0D28&PID_0204&MI_00"): [], + (None, "SYSTEM\\CurrentControlSet\\Enum\\USB\\VID_0D28&PID_0204&MI_01"): [], + ( + ((None, "SYSTEM\\CurrentControlSet\\Enum\\USB\\VID_0D28&PID_0204&MI_01"), "8&26b12a60&0&0001"), + "Device Parameters", + ): [], } self.setUpRegistry(value_dict, key_dict) - with patch('mbed_lstools.windows.MbedLsToolsWin7._run_cli_process') as _cliproc: + with patch("mbed_lstools.windows.MbedLsToolsWin7._run_cli_process") as _cliproc: _cliproc.return_value = ("", "", 0) expected_info = { - 'mount_point': 'F:', - 'serial_port': 'COM7', - 'target_id_usb_id': u'0240000032044e4500257009997b00386781000097969900', - 'vendor_id': '0d28', - 'product_id': '0204' + "mount_point": "F:", + "serial_port": "COM7", + "target_id_usb_id": "0240000032044e4500257009997b00386781000097969900", + "vendor_id": "0d28", + "product_id": "0204", } devices = self.lstool.find_candidates() @@ -179,36 +228,49 @@ def test_one_composite_dev(self): def test_one_non_composite_dev(self): value_dict = { - (None, 'SYSTEM\\MountedDevices'): [ - ('\\DosDevices\\C:', u'NOT A VALID MBED DRIVE'.encode('utf-16le')), - ('\\DosDevices\\F:', - u'_??_USBSTOR#Disk&Ven_MBED&Prod_VFS&Rev_0.1#0000000032044e4500257009997b00386781000097969900&0#{53f56307-b6bf-11d0-94f2-00a0c91efb8b}'.encode('utf-16le')) + (None, "SYSTEM\\MountedDevices"): [ + ("\\DosDevices\\C:", "NOT A VALID MBED DRIVE".encode("utf-16le")), + ( + "\\DosDevices\\F:", + "_??_USBSTOR#Disk&Ven_MBED&Prod_VFS&Rev_0.1#0000000032044e4500257009997b00386781000097969900&0#{53f56307-b6bf-11d0-94f2-00a0c91efb8b}".encode( + "utf-16le" + ), + ), ], - (None, 'SYSTEM\\CurrentControlSet\\Services\\Disk\\Enum'): [ - ('0', 'USBSTOR\Disk&Ven_MBED&Prod_VFS&Rev_0.1\\0000000032044e4500257009997b00386781000097969900&0') + (None, "SYSTEM\\CurrentControlSet\\Services\\Disk\\Enum"): [ + ("0", "USBSTOR\Disk&Ven_MBED&Prod_VFS&Rev_0.1\\0000000032044e4500257009997b00386781000097969900&0") ], - (None, 'SYSTEM\\CurrentControlSet\\Services\\USBSTOR\\Enum'): [ - ('0', 'USB\\VID_0D28&PID_0204\\0000000032044e4500257009997b00386781000097969900') + (None, "SYSTEM\\CurrentControlSet\\Services\\USBSTOR\\Enum"): [ + ("0", "USB\\VID_0D28&PID_0204\\0000000032044e4500257009997b00386781000097969900") ], - (None, 'SYSTEM\\CurrentControlSet\\Enum\\USB\\VID_0D28&PID_0204'): [], - ((None, 'SYSTEM\\CurrentControlSet\\Enum\\USB\\VID_0D28&PID_0204\\0000000032044e4500257009997b00386781000097969900'), - 'CompatibleIDs'): ([u'USB\\Class_08&SubClass_06&Prot_50', u'USB\\Class_08&SubClass_06', u'USB\\Class_08'], 7) + (None, "SYSTEM\\CurrentControlSet\\Enum\\USB\\VID_0D28&PID_0204"): [], + ( + ( + None, + "SYSTEM\\CurrentControlSet\\Enum\\USB\\VID_0D28&PID_0204\\0000000032044e4500257009997b00386781000097969900", + ), + "CompatibleIDs", + ): (["USB\\Class_08&SubClass_06&Prot_50", "USB\\Class_08&SubClass_06", "USB\\Class_08"], 7), } key_dict = { - (None, 'SYSTEM\\CurrentControlSet\\Enum\\USB\\VID_0D28&PID_0204'): - ['0000000032044e4500257009997b00386781000097969900'], - (None, 'SYSTEM\\CurrentControlSet\\Enum\\USB\\VID_0D28&PID_0204\\0000000032044e4500257009997b00386781000097969900'): [] + (None, "SYSTEM\\CurrentControlSet\\Enum\\USB\\VID_0D28&PID_0204"): [ + "0000000032044e4500257009997b00386781000097969900" + ], + ( + None, + "SYSTEM\\CurrentControlSet\\Enum\\USB\\VID_0D28&PID_0204\\0000000032044e4500257009997b00386781000097969900", + ): [], } self.setUpRegistry(value_dict, key_dict) - with patch('mbed_lstools.windows.MbedLsToolsWin7._run_cli_process') as _cliproc: + with patch("mbed_lstools.windows.MbedLsToolsWin7._run_cli_process") as _cliproc: _cliproc.return_value = ("", "", 0) expected_info = { - 'mount_point': 'F:', - 'serial_port': None, - 'target_id_usb_id': u'0000000032044e4500257009997b00386781000097969900', - 'vendor_id': '0d28', - 'product_id': '0204' + "mount_point": "F:", + "serial_port": None, + "target_id_usb_id": "0000000032044e4500257009997b00386781000097969900", + "vendor_id": "0d28", + "product_id": "0204", } devices = self.lstool.find_candidates() @@ -216,7 +278,7 @@ def test_one_non_composite_dev(self): self.assertNoRegMut() def test_mount_point_ready(self): - with patch('mbed_lstools.windows.MbedLsToolsWin7._run_cli_process') as _cliproc: + with patch("mbed_lstools.windows.MbedLsToolsWin7._run_cli_process") as _cliproc: _cliproc.return_value = ("dummy", "", 0) self.assertTrue(self.lstool.mount_point_ready("dummy")) @@ -226,5 +288,5 @@ def test_mount_point_ready(self): self.assertFalse(self.lstool.mount_point_ready("dummy")) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tools/python_tests/mbed_lstools/platform_database.py b/tools/python_tests/mbed_lstools/platform_database.py index 775a48bfc82..2851c8835d9 100644 --- a/tools/python_tests/mbed_lstools/platform_database.py +++ b/tools/python_tests/mbed_lstools/platform_database.py @@ -24,22 +24,21 @@ from mock import patch, MagicMock, DEFAULT from io import StringIO -from mbed_lstools.platform_database import PlatformDatabase, DEFAULT_PLATFORM_DB,\ - LOCAL_PLATFORM_DATABASE +from mbed_lstools.platform_database import PlatformDatabase, DEFAULT_PLATFORM_DB, LOCAL_PLATFORM_DATABASE try: unicode except NameError: unicode = str + class EmptyPlatformDatabaseTests(unittest.TestCase): - """ Basic test cases with an empty database - """ + """Basic test cases with an empty database""" def setUp(self): - self.base_db_path = os.path.join(tempfile.mkdtemp(), 'base') - self.base_db = open(self.base_db_path, 'w+b') - self.base_db.write(b'{}') + self.base_db_path = os.path.join(tempfile.mkdtemp(), "base") + self.base_db = open(self.base_db_path, "w+b") + self.base_db.write(b"{}") self.base_db.seek(0) self.pdb = PlatformDatabase([self.base_db_path]) @@ -60,23 +59,22 @@ def test_broken_database_bad_json(self): """Verify that the platform database still works without a working backing file """ - self.base_db.write(b'{}') + self.base_db.write(b"{}") self.base_db.seek(0) self.pdb = PlatformDatabase([self.base_db_path]) self.pdb.add("1234", "MYTARGET") self.assertEqual(self.pdb.get("1234"), "MYTARGET") def test_broken_database(self): - """Verify that the platform database correctly reset's its database - """ - with patch("mbed_os_tools.detect.platform_database.open") as _open,\ - patch("mbed_os_tools.detect.platform_database._older_than_me") as _older: + """Verify that the platform database correctly reset's its database""" + with patch("mbed_os_tools.detect.platform_database.open") as _open, patch( + "mbed_os_tools.detect.platform_database._older_than_me" + ) as _older: _older.return_value = False stringio = MagicMock() _open.side_effect = (IOError("Bogus"), stringio) self.pdb = PlatformDatabase([LOCAL_PLATFORM_DATABASE]) - stringio.__enter__.return_value.write.assert_called_with( - unicode(json.dumps(DEFAULT_PLATFORM_DB))) + stringio.__enter__.return_value.write.assert_called_with(unicode(json.dumps(DEFAULT_PLATFORM_DB))) self.pdb.add("1234", "MYTARGET") self.assertEqual(self.pdb.get("1234"), "MYTARGET") @@ -91,83 +89,74 @@ def test_extra_broken_database(self): self.assertEqual(self.pdb.get("1234"), "MYTARGET") def test_old_database(self): - """Verify that the platform database correctly updates's its database - """ - with patch("mbed_os_tools.detect.platform_database.open") as _open,\ - patch("mbed_os_tools.detect.platform_database.getmtime") as _getmtime: + """Verify that the platform database correctly updates's its database""" + with patch("mbed_os_tools.detect.platform_database.open") as _open, patch( + "mbed_os_tools.detect.platform_database.getmtime" + ) as _getmtime: file_mock = MagicMock() - file_mock.read.return_value = '' + file_mock.read.return_value = "" _open.return_value.__enter__.return_value = file_mock _getmtime.side_effect = (0, 1000000) self.pdb = PlatformDatabase([LOCAL_PLATFORM_DATABASE]) - file_mock.write.assert_called_with( - unicode(json.dumps(DEFAULT_PLATFORM_DB))) + file_mock.write.assert_called_with(unicode(json.dumps(DEFAULT_PLATFORM_DB))) def test_bogus_database(self): - """Basic empty database test - """ + """Basic empty database test""" self.assertEqual(list(self.pdb.items()), []) self.assertEqual(list(self.pdb.all_ids()), []) - self.assertEqual(self.pdb.get('Also_Junk', None), None) + self.assertEqual(self.pdb.get("Also_Junk", None), None) def test_add(self): - """Test that what was added can later be queried - """ - self.assertEqual(self.pdb.get('4753', None), None) - self.pdb.add('4753', 'Test_Platform', permanent=False) - self.assertEqual(self.pdb.get('4753', None), 'Test_Platform') + """Test that what was added can later be queried""" + self.assertEqual(self.pdb.get("4753", None), None) + self.pdb.add("4753", "Test_Platform", permanent=False) + self.assertEqual(self.pdb.get("4753", None), "Test_Platform") def test_remove(self): - """Test that once something is removed it no longer shows up when queried - """ - self.assertEqual(self.pdb.get('4753', None), None) - self.pdb.add('4753', 'Test_Platform', permanent=False) - self.assertEqual(self.pdb.get('4753', None), 'Test_Platform') - self.assertEqual(self.pdb.remove('4753', permanent=False), 'Test_Platform') - self.assertEqual(self.pdb.get('4753', None), None) + """Test that once something is removed it no longer shows up when queried""" + self.assertEqual(self.pdb.get("4753", None), None) + self.pdb.add("4753", "Test_Platform", permanent=False) + self.assertEqual(self.pdb.get("4753", None), "Test_Platform") + self.assertEqual(self.pdb.remove("4753", permanent=False), "Test_Platform") + self.assertEqual(self.pdb.get("4753", None), None) def test_bogus_add(self): - """Test that add requires properly formatted platform ids - """ - self.assertEqual(self.pdb.get('NOTVALID', None), None) + """Test that add requires properly formatted platform ids""" + self.assertEqual(self.pdb.get("NOTVALID", None), None) with self.assertRaises(ValueError): - self.pdb.add('NOTVALID', 'Test_Platform', permanent=False) + self.pdb.add("NOTVALID", "Test_Platform", permanent=False) def test_bogus_remove(self): - """Test that removing a not present platform does nothing - """ - self.assertEqual(self.pdb.get('NOTVALID', None), None) - self.assertEqual(self.pdb.remove('NOTVALID', permanent=False), None) + """Test that removing a not present platform does nothing""" + self.assertEqual(self.pdb.get("NOTVALID", None), None) + self.assertEqual(self.pdb.remove("NOTVALID", permanent=False), None) def test_simplify_verbose_data(self): """Test that fetching a verbose entry without verbose data correctly returns just the 'platform_name' """ - platform_data = { - 'platform_name': 'VALID', - 'other_data': 'data' - } - self.pdb.add('1337', platform_data, permanent=False) - self.assertEqual(self.pdb.get('1337', verbose_data=True), platform_data) - self.assertEqual(self.pdb.get('1337'), platform_data['platform_name']) + platform_data = {"platform_name": "VALID", "other_data": "data"} + self.pdb.add("1337", platform_data, permanent=False) + self.assertEqual(self.pdb.get("1337", verbose_data=True), platform_data) + self.assertEqual(self.pdb.get("1337"), platform_data["platform_name"]) + class OverriddenPlatformDatabaseTests(unittest.TestCase): - """ Test that for one database overriding another - """ + """Test that for one database overriding another""" def setUp(self): self.temp_dir = tempfile.mkdtemp() - self.base_db_path = os.path.join(self.temp_dir, 'base') - self.base_db = open(self.base_db_path, 'w+b') - self.base_db.write(json.dumps(dict([('0123', 'Base_Platform')])). - encode('utf-8')) + self.base_db_path = os.path.join(self.temp_dir, "base") + self.base_db = open(self.base_db_path, "w+b") + self.base_db.write(json.dumps(dict([("0123", "Base_Platform")])).encode("utf-8")) self.base_db.seek(0) - self.overriding_db_path = os.path.join(self.temp_dir, 'overriding') - self.overriding_db = open(self.overriding_db_path, 'w+b') - self.overriding_db.write(b'{}') + self.overriding_db_path = os.path.join(self.temp_dir, "overriding") + self.overriding_db = open(self.overriding_db_path, "w+b") + self.overriding_db.write(b"{}") self.overriding_db.seek(0) - self.pdb = PlatformDatabase([self.overriding_db_path, self.base_db_path], - primary_database=self.overriding_db_path) + self.pdb = PlatformDatabase( + [self.overriding_db_path, self.base_db_path], primary_database=self.overriding_db_path + ) self.base_db.seek(0) self.overriding_db.seek(0) @@ -176,31 +165,25 @@ def tearDown(self): self.overriding_db.close() def assertBaseUnchanged(self): - """Assert that the base database has not changed - """ + """Assert that the base database has not changed""" self.base_db.seek(0) - self.assertEqual(self.base_db.read(), - json.dumps(dict([('0123', 'Base_Platform')])) - .encode('utf-8')) + self.assertEqual(self.base_db.read(), json.dumps(dict([("0123", "Base_Platform")])).encode("utf-8")) def assertOverrideUnchanged(self): - """Assert that the override database has not changed - """ + """Assert that the override database has not changed""" self.overriding_db.seek(0) - self.assertEqual(self.overriding_db.read(), b'{}') + self.assertEqual(self.overriding_db.read(), b"{}") def test_basline(self): - """Sanity check that the base database does what we expect - """ - self.assertEqual(list(self.pdb.items()), [('0123', 'Base_Platform')]) - self.assertEqual(list(self.pdb.all_ids()), ['0123']) + """Sanity check that the base database does what we expect""" + self.assertEqual(list(self.pdb.items()), [("0123", "Base_Platform")]) + self.assertEqual(list(self.pdb.all_ids()), ["0123"]) def test_add_non_override(self): - """Check that adding keys goes to the Override database - """ - self.pdb.add('1234', 'Another_Platform') - self.assertEqual(list(self.pdb.items()), [('1234', 'Another_Platform'), ('0123', 'Base_Platform')]) - self.assertEqual(set(self.pdb.all_ids()), set(['0123', '1234'])) + """Check that adding keys goes to the Override database""" + self.pdb.add("1234", "Another_Platform") + self.assertEqual(list(self.pdb.items()), [("1234", "Another_Platform"), ("0123", "Base_Platform")]) + self.assertEqual(set(self.pdb.all_ids()), set(["0123", "1234"])) self.assertBaseUnchanged() def test_load_override(self): @@ -208,14 +191,14 @@ def test_load_override(self): you can no longer query for the base database definition and that the override database was not written to disk """ - self.overriding_db.write(json.dumps(dict([('0123', 'Overriding_Platform')])). - encode('utf-8')) + self.overriding_db.write(json.dumps(dict([("0123", "Overriding_Platform")])).encode("utf-8")) self.overriding_db.seek(0) - self.pdb = PlatformDatabase([self.overriding_db_path, self.base_db_path], - primary_database=self.overriding_db_path) - self.assertIn(('0123', 'Overriding_Platform'), list(self.pdb.items())) - self.assertEqual(set(self.pdb.all_ids()), set(['0123'])) - self.assertEqual(self.pdb.get('0123'), 'Overriding_Platform') + self.pdb = PlatformDatabase( + [self.overriding_db_path, self.base_db_path], primary_database=self.overriding_db_path + ) + self.assertIn(("0123", "Overriding_Platform"), list(self.pdb.items())) + self.assertEqual(set(self.pdb.all_ids()), set(["0123"])) + self.assertEqual(self.pdb.get("0123"), "Overriding_Platform") self.assertBaseUnchanged() def test_add_override_permanent(self): @@ -223,14 +206,15 @@ def test_add_override_permanent(self): you can no longer query for the base database definition and that the override database was written to disk """ - self.pdb.add('0123', 'Overriding_Platform', permanent=True) - self.assertIn(('0123', 'Overriding_Platform'), list(self.pdb.items())) - self.assertEqual(set(self.pdb.all_ids()), set(['0123'])) - self.assertEqual(self.pdb.get('0123'), 'Overriding_Platform') + self.pdb.add("0123", "Overriding_Platform", permanent=True) + self.assertIn(("0123", "Overriding_Platform"), list(self.pdb.items())) + self.assertEqual(set(self.pdb.all_ids()), set(["0123"])) + self.assertEqual(self.pdb.get("0123"), "Overriding_Platform") self.overriding_db.seek(0) - self.assertEqual(self.overriding_db.read(), - json.dumps(dict([('daplink', dict([('0123', 'Overriding_Platform')]))])) - .encode('utf-8')) + self.assertEqual( + self.overriding_db.read(), + json.dumps(dict([("daplink", dict([("0123", "Overriding_Platform")]))])).encode("utf-8"), + ) self.assertBaseUnchanged() def test_remove_override(self): @@ -238,12 +222,12 @@ def test_remove_override(self): the original base database definition and that that the override database was not written to disk """ - self.pdb.add('0123', 'Overriding_Platform') - self.assertIn(('0123', 'Overriding_Platform'), list(self.pdb.items())) - self.assertEqual(set(self.pdb.all_ids()), set(['0123'])) - self.assertEqual(self.pdb.get('0123'), 'Overriding_Platform') - self.assertEqual(self.pdb.remove('0123'), 'Overriding_Platform') - self.assertEqual(self.pdb.get('0123'), 'Base_Platform') + self.pdb.add("0123", "Overriding_Platform") + self.assertIn(("0123", "Overriding_Platform"), list(self.pdb.items())) + self.assertEqual(set(self.pdb.all_ids()), set(["0123"])) + self.assertEqual(self.pdb.get("0123"), "Overriding_Platform") + self.assertEqual(self.pdb.remove("0123"), "Overriding_Platform") + self.assertEqual(self.pdb.get("0123"), "Base_Platform") self.assertOverrideUnchanged() self.assertBaseUnchanged() @@ -252,8 +236,8 @@ def test_remove_from_base(self): the original base database definition and that that the base database was not written to disk """ - self.assertEqual(self.pdb.remove('0123'), 'Base_Platform') - self.assertEqual(self.pdb.get('0123'), None) + self.assertEqual(self.pdb.remove("0123"), "Base_Platform") + self.assertEqual(self.pdb.get("0123"), None) self.assertOverrideUnchanged() self.assertBaseUnchanged() @@ -262,20 +246,20 @@ def test_remove_from_base_permanent(self): the original base database definition and that that the base database was not modified on disk """ - self.assertEqual(self.pdb.remove('0123', permanent=True), 'Base_Platform') - self.assertEqual(self.pdb.get('0123'), None) + self.assertEqual(self.pdb.remove("0123", permanent=True), "Base_Platform") + self.assertEqual(self.pdb.get("0123"), None) self.assertBaseUnchanged() -class InternalLockingChecks(unittest.TestCase): +class InternalLockingChecks(unittest.TestCase): def setUp(self): - self.mocked_lock = patch('mbed_os_tools.detect.platform_database.InterProcessLock', spec=True).start() + self.mocked_lock = patch("mbed_os_tools.detect.platform_database.InterProcessLock", spec=True).start() self.acquire = self.mocked_lock.return_value.acquire self.release = self.mocked_lock.return_value.release - self.base_db_path = os.path.join(tempfile.mkdtemp(), 'base') - self.base_db = open(self.base_db_path, 'w+b') - self.base_db.write(b'{}') + self.base_db_path = os.path.join(tempfile.mkdtemp(), "base") + self.base_db = open(self.base_db_path, "w+b") + self.base_db.write(b"{}") self.base_db.seek(0) self.pdb = PlatformDatabase([self.base_db_path]) self.addCleanup(patch.stopall) @@ -284,33 +268,29 @@ def tearDown(self): self.base_db.close() def test_no_update(self): - """Test that no locks are used when no modifications are specified - """ - self.pdb.add('7155', 'Junk') + """Test that no locks are used when no modifications are specified""" + self.pdb.add("7155", "Junk") self.acquire.assert_not_called() self.release.assert_not_called() def test_update(self): - """Test that locks are used when modifications are specified - """ - self.pdb.add('7155', 'Junk', permanent=True) - assert self.acquire.called, 'Lock acquire should have been called' + """Test that locks are used when modifications are specified""" + self.pdb.add("7155", "Junk", permanent=True) + assert self.acquire.called, "Lock acquire should have been called" assert self.release.called def test_update_fail_acquire(self): - """Test that the backing file is not updated when lock acquisition fails - """ + """Test that the backing file is not updated when lock acquisition fails""" self.acquire.return_value = False - self.pdb.add('7155', 'Junk', permanent=True) - assert self.acquire.called, 'Lock acquire should have been called' + self.pdb.add("7155", "Junk", permanent=True) + assert self.acquire.called, "Lock acquire should have been called" self.base_db.seek(0) - self.assertEqual(self.base_db.read(), b'{}') + self.assertEqual(self.base_db.read(), b"{}") def test_update_ambiguous(self): - """Test that the backing file is not updated when lock acquisition fails - """ + """Test that the backing file is not updated when lock acquisition fails""" self.pdb._prim_db = None - self.pdb.add('7155', 'Junk', permanent=True) + self.pdb.add("7155", "Junk", permanent=True) self.acquire.assert_not_called() self.release.assert_not_called() - self.assertEqual(self.base_db.read(), b'{}') + self.assertEqual(self.base_db.read(), b"{}") diff --git a/tools/python_tests/mbed_lstools/platform_detection.py b/tools/python_tests/mbed_lstools/platform_detection.py index 1039965fa67..26a95b882a7 100644 --- a/tools/python_tests/mbed_lstools/platform_detection.py +++ b/tools/python_tests/mbed_lstools/platform_detection.py @@ -22,13 +22,16 @@ from mbed_lstools.lstools_base import MbedLsToolsBase -TEST_DATA_PATH = 'test_data' +TEST_DATA_PATH = "test_data" + class DummyLsTools(MbedLsToolsBase): return_value = [] + def find_candidates(self): return self.return_value + try: basestring except NameError: @@ -41,12 +44,11 @@ def get_case_insensitive_path(path, file_name): if entry.lower() == file_name.lower(): return os.path.join(path, entry) - raise Exception('No matching file for %s found in $s' % (file_name, path)) + raise Exception("No matching file for %s found in $s" % (file_name, path)) class PlatformDetectionTestCase(unittest.TestCase): - """ Basic test cases checking trivial asserts - """ + """Basic test cases checking trivial asserts""" def setUp(self): self.base = DummyLsTools() @@ -56,13 +58,16 @@ def tearDown(self): def run_test(self, test_data_case, candidate_data, expected_data): # Add necessary candidate data - candidate_data['mount_point'] = 'dummy_mount_point' + candidate_data["mount_point"] = "dummy_mount_point" # Find the test data in the test_data folder test_script_path = os.path.dirname(os.path.abspath(__file__)) test_data_path = os.path.join(test_script_path, TEST_DATA_PATH) test_data_cases = os.listdir(test_data_path) - self.assertTrue(test_data_case in test_data_cases, 'Expected %s to be present in %s folder' % (test_data_case, test_data_path)) + self.assertTrue( + test_data_case in test_data_cases, + "Expected %s to be present in %s folder" % (test_data_case, test_data_path), + ) test_data_case_path = os.path.join(test_data_path, test_data_case) # NOTE a limitation of this mocked test is that it only allows mocking of one directory level. @@ -70,15 +75,17 @@ def run_test(self, test_data_case, candidate_data, expected_data): # If this changes in the future, this mocking framework can be extended to support this. test_data_case_file_names = os.listdir(test_data_case_path) - mocked_open_file_paths = [os.path.join(candidate_data['mount_point'], file_name ) for file_name in test_data_case_file_names] + mocked_open_file_paths = [ + os.path.join(candidate_data["mount_point"], file_name) for file_name in test_data_case_file_names + ] # Setup all the mocks self.base.return_value = [candidate_data] - def do_open(path, mode='r'): + def do_open(path, mode="r"): file_name = os.path.basename(path) try: - with open(get_case_insensitive_path(test_data_case_path, file_name), 'r') as test_data_file: + with open(get_case_insensitive_path(test_data_case_path, file_name), "r") as test_data_file: test_data_file_data = test_data_file.read() except OSError: raise OSError("(mocked open) No such file or directory: '%s'" % (path)) @@ -87,9 +94,9 @@ def do_open(path, mode='r'): file_object.__iter__.return_value = test_data_file_data.splitlines(True) return file_object - with patch("mbed_lstools.lstools_base.MbedLsToolsBase.mount_point_ready") as _mpr,\ - patch('mbed_os_tools.detect.lstools_base.open', do_open) as _,\ - patch("mbed_os_tools.detect.lstools_base.listdir") as _listdir: + with patch("mbed_lstools.lstools_base.MbedLsToolsBase.mount_point_ready") as _mpr, patch( + "mbed_os_tools.detect.lstools_base.open", do_open + ) as _, patch("mbed_os_tools.detect.lstools_base.listdir") as _listdir: _mpr.return_value = True _listdir.return_value = test_data_case_file_names results = self.base.list_mbeds(read_details_txt=True) @@ -106,108 +113,116 @@ def do_open(path, mode='r'): if actual_value != expected_data[key]: differing_map[key] = (actual_value, expected_data[key]) - if differing_map: - differing_string = '' + differing_string = "" for differing_key in sorted(list(differing_map.keys())): actual, expected = differing_map[differing_key] differing_string += ' "%s": "%s" (expected "%s")\n' % (differing_key, actual, expected) - assert_string = 'Expected data mismatch:\n\n{\n%s}' % (differing_string) + assert_string = "Expected data mismatch:\n\n{\n%s}" % (differing_string) self.assertTrue(False, assert_string) - - def test_efm32pg_stk3401_jlink(self): - self.run_test('efm32pg_stk3401_jlink', { - 'target_id_usb_id': u'000440074453', - 'vendor_id': '1366', - 'product_id': '1015' - }, { - 'platform_name': 'EFM32PG_STK3401', - 'device_type': 'jlink', - 'target_id': '2035022D000122D5D475113A', - 'target_id_usb_id': '000440074453', - 'target_id_mbed_htm': '2035022D000122D5D475113A' - }) + self.run_test( + "efm32pg_stk3401_jlink", + {"target_id_usb_id": "000440074453", "vendor_id": "1366", "product_id": "1015"}, + { + "platform_name": "EFM32PG_STK3401", + "device_type": "jlink", + "target_id": "2035022D000122D5D475113A", + "target_id_usb_id": "000440074453", + "target_id_mbed_htm": "2035022D000122D5D475113A", + }, + ) def test_lpc1768(self): - self.run_test('lpc1768', { - 'target_id_usb_id': u'101000000000000000000002F7F20DF3', - 'vendor_id': '0d28', - 'product_id': '0204' - }, { - 'platform_name': 'LPC1768', - 'device_type': 'daplink', - 'target_id': '101000000000000000000002F7F20DF3d51e6be5ac41795761dc44148e3b7000', - 'target_id_usb_id': '101000000000000000000002F7F20DF3', - 'target_id_mbed_htm': '101000000000000000000002F7F20DF3d51e6be5ac41795761dc44148e3b7000' - }) + self.run_test( + "lpc1768", + {"target_id_usb_id": "101000000000000000000002F7F20DF3", "vendor_id": "0d28", "product_id": "0204"}, + { + "platform_name": "LPC1768", + "device_type": "daplink", + "target_id": "101000000000000000000002F7F20DF3d51e6be5ac41795761dc44148e3b7000", + "target_id_usb_id": "101000000000000000000002F7F20DF3", + "target_id_mbed_htm": "101000000000000000000002F7F20DF3d51e6be5ac41795761dc44148e3b7000", + }, + ) def test_nucleo_f411re_stlink(self): - self.run_test('nucleo_f411re_stlink', { - 'target_id_usb_id': u'0671FF554856805087112815', - 'vendor_id': '0483', - 'product_id': '374b' - }, { - 'platform_name': 'NUCLEO_F411RE', - 'device_type': 'stlink', - 'target_id': '07400221076061193824F764', - 'target_id_usb_id': '0671FF554856805087112815', - 'target_id_mbed_htm': '07400221076061193824F764' - }) + self.run_test( + "nucleo_f411re_stlink", + {"target_id_usb_id": "0671FF554856805087112815", "vendor_id": "0483", "product_id": "374b"}, + { + "platform_name": "NUCLEO_F411RE", + "device_type": "stlink", + "target_id": "07400221076061193824F764", + "target_id_usb_id": "0671FF554856805087112815", + "target_id_mbed_htm": "07400221076061193824F764", + }, + ) def test_nrf51_microbit(self): - self.run_test('nrf51_microbit', { - 'target_id_usb_id': u'9900007031324e45000f9019000000340000000097969901', - 'vendor_id': '0d28', - 'product_id': '0204' - }, { - 'platform_name': 'NRF51_MICROBIT', - 'device_type': 'daplink', - 'target_id': '9900007031324e45000f9019000000340000000097969901', - 'target_id_usb_id': '9900007031324e45000f9019000000340000000097969901' - }) + self.run_test( + "nrf51_microbit", + { + "target_id_usb_id": "9900007031324e45000f9019000000340000000097969901", + "vendor_id": "0d28", + "product_id": "0204", + }, + { + "platform_name": "NRF51_MICROBIT", + "device_type": "daplink", + "target_id": "9900007031324e45000f9019000000340000000097969901", + "target_id_usb_id": "9900007031324e45000f9019000000340000000097969901", + }, + ) def test_k64f_daplink(self): - self.run_test('k64f_daplink', { - 'target_id_usb_id': u'0240000032044e45000a700a997b00356781000097969900', - 'vendor_id': '0d28', - 'product_id': '0204' - }, { - 'platform_name': 'K64F', - 'device_type': 'daplink', - 'target_id': '0240000032044e45000a700a997b00356781000097969900', - 'target_id_usb_id': '0240000032044e45000a700a997b00356781000097969900', - 'target_id_mbed_htm': '0240000032044e45000a700a997b00356781000097969900' - }) + self.run_test( + "k64f_daplink", + { + "target_id_usb_id": "0240000032044e45000a700a997b00356781000097969900", + "vendor_id": "0d28", + "product_id": "0204", + }, + { + "platform_name": "K64F", + "device_type": "daplink", + "target_id": "0240000032044e45000a700a997b00356781000097969900", + "target_id_usb_id": "0240000032044e45000a700a997b00356781000097969900", + "target_id_mbed_htm": "0240000032044e45000a700a997b00356781000097969900", + }, + ) def test_nrf52_dk_daplink(self): - self.run_test('nrf52_dk_daplink', { - 'target_id_usb_id': u'110100004420312043574641323032203233303397969903', - 'vendor_id': '0d28', - 'product_id': '0204' - }, { - 'platform_name': 'NRF52_DK', - 'device_type': 'daplink', - 'target_id': '110100004420312043574641323032203233303397969903', - 'target_id_usb_id': '110100004420312043574641323032203233303397969903', - 'target_id_mbed_htm': '110100004420312043574641323032203233303397969903' - }) + self.run_test( + "nrf52_dk_daplink", + { + "target_id_usb_id": "110100004420312043574641323032203233303397969903", + "vendor_id": "0d28", + "product_id": "0204", + }, + { + "platform_name": "NRF52_DK", + "device_type": "daplink", + "target_id": "110100004420312043574641323032203233303397969903", + "target_id_usb_id": "110100004420312043574641323032203233303397969903", + "target_id_mbed_htm": "110100004420312043574641323032203233303397969903", + }, + ) def test_nrf52_dk_jlink(self): - self.run_test('nrf52_dk_jlink', { - 'target_id_usb_id': u'000682546728', - 'vendor_id': '1366', - 'product_id': '1015' - }, { - 'platform_name': 'NRF52_DK', - 'device_type': 'jlink', - 'target_id': '000682546728', - 'target_id_usb_id': '000682546728' - }) - - - -if __name__ == '__main__': + self.run_test( + "nrf52_dk_jlink", + {"target_id_usb_id": "000682546728", "vendor_id": "1366", "product_id": "1015"}, + { + "platform_name": "NRF52_DK", + "device_type": "jlink", + "target_id": "000682546728", + "target_id_usb_id": "000682546728", + }, + ) + + +if __name__ == "__main__": unittest.main() diff --git a/tools/python_tests/mbed_os_tools/detect/details_txt.py b/tools/python_tests/mbed_os_tools/detect/details_txt.py index 092eba54365..6f230eaacaa 100644 --- a/tools/python_tests/mbed_os_tools/detect/details_txt.py +++ b/tools/python_tests/mbed_os_tools/detect/details_txt.py @@ -21,10 +21,8 @@ from mbed_os_tools.detect.main import create - class ParseMbedHTMTestCase(unittest.TestCase): - """ Unit tests checking HTML parsing code for 'mbed.htm' files - """ + """Unit tests checking HTML parsing code for 'mbed.htm' files""" details_txt_0226 = """Version: 0226 Build: Aug 24 2015 17:06:30 @@ -59,13 +57,13 @@ def test_simplified_daplink_txt_content(self): # Check parsing content result = self.mbeds._parse_details(lines) self.assertEqual(4, len(result)) - self.assertIn('Version', result) - self.assertIn('Build', result) - self.assertIn('Git Commit SHA', result) - self.assertIn('Git Local mods', result) + self.assertIn("Version", result) + self.assertIn("Build", result) + self.assertIn("Git Commit SHA", result) + self.assertIn("Git Local mods", result) # Check for daplink_version - self.assertEqual(result['Version'], "0226") + self.assertEqual(result["Version"], "0226") def test_extended_daplink_txt_content(self): # Fetch lines from DETAILS.TXT @@ -74,30 +72,30 @@ def test_extended_daplink_txt_content(self): # Check parsing content result = self.mbeds._parse_details(lines) - self.assertEqual(11, len(result)) # 12th would be comment - self.assertIn('Unique ID', result) - self.assertIn('HIF ID', result) - self.assertIn('Auto Reset', result) - self.assertIn('Automation allowed', result) - self.assertIn('Daplink Mode', result) - self.assertIn('Interface Version', result) - self.assertIn('Git SHA', result) - self.assertIn('Local Mods', result) - self.assertIn('USB Interfaces', result) - self.assertIn('Interface CRC', result) + self.assertEqual(11, len(result)) # 12th would be comment + self.assertIn("Unique ID", result) + self.assertIn("HIF ID", result) + self.assertIn("Auto Reset", result) + self.assertIn("Automation allowed", result) + self.assertIn("Daplink Mode", result) + self.assertIn("Interface Version", result) + self.assertIn("Git SHA", result) + self.assertIn("Local Mods", result) + self.assertIn("USB Interfaces", result) + self.assertIn("Interface CRC", result) # Check if we parsed comment line: # "# DAPLink Firmware - see https://mbed.com/daplink" for key in result: # Check if we parsed comment - self.assertFalse(key.startswith('#')) + self.assertFalse(key.startswith("#")) # Check if we parsed - self.assertFalse('https://mbed.com/daplink' in result[key]) + self.assertFalse("https://mbed.com/daplink" in result[key]) # Check for daplink_version # DAPlink <240 compatibility - self.assertEqual(result['Interface Version'], "0240") - self.assertEqual(result['Version'], "0240") + self.assertEqual(result["Interface Version"], "0240") + self.assertEqual(result["Version"], "0240") def test_(self): pass @@ -105,5 +103,6 @@ def test_(self): def test_(self): pass -if __name__ == '__main__': + +if __name__ == "__main__": unittest.main() diff --git a/tools/python_tests/mbed_os_tools/detect/detect_os.py b/tools/python_tests/mbed_os_tools/detect/detect_os.py index 2317f3bc08d..f664d0f3094 100644 --- a/tools/python_tests/mbed_os_tools/detect/detect_os.py +++ b/tools/python_tests/mbed_os_tools/detect/detect_os.py @@ -26,8 +26,7 @@ class DetectOSTestCase(unittest.TestCase): - """ Test cases for host OS related functionality. Helpful during porting - """ + """Test cases for host OS related functionality. Helpful during porting""" def setUp(self): pass @@ -45,18 +44,14 @@ def test_porting_create(self): self.assertNotEqual(None, create()) def test_supported_os_name(self): - os_names = ['Windows7', 'Ubuntu', 'LinuxGeneric', 'Darwin'] + os_names = ["Windows7", "Ubuntu", "LinuxGeneric", "Darwin"] self.assertIn(mbed_os_support(), os_names) def test_detect_os_support_ext(self): - os_info = (os.name, - platform.system(), - platform.release(), - platform.version(), - sys.platform) + os_info = (os.name, platform.system(), platform.release(), platform.version(), sys.platform) self.assertEqual(os_info, mbed_lstools_os_info()) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tools/python_tests/mbed_os_tools/detect/mbed_htm.py b/tools/python_tests/mbed_os_tools/detect/mbed_htm.py index 4cdd2015be8..e3e7d3aa5df 100644 --- a/tools/python_tests/mbed_os_tools/detect/mbed_htm.py +++ b/tools/python_tests/mbed_os_tools/detect/mbed_htm.py @@ -21,14 +21,13 @@ from mbed_os_tools.detect.main import create - - class ParseMbedHTMTestCase(unittest.TestCase): - """ Unit tests checking HTML parsing code for 'mbed.htm' files - """ + """Unit tests checking HTML parsing code for 'mbed.htm' files""" # DAPlink <0240 - test_mbed_htm_k64f_url_str = '' + test_mbed_htm_k64f_url_str = ( + '' + ) test_mbed_htm_lpc1768_url_str = '' test_mbed_htm_nrf51_url_str = '' @@ -43,46 +42,51 @@ def tearDown(self): def test_mbed_htm_k64f_url(self): target_id = self.mbeds._target_id_from_htm(self.test_mbed_htm_k64f_url_str) - self.assertEqual('02400203D94B0E7724B7F3CF', target_id) + self.assertEqual("02400203D94B0E7724B7F3CF", target_id) def test_mbed_htm_lpc1768_url(self): target_id = self.mbeds._target_id_from_htm(self.test_mbed_htm_lpc1768_url_str) - self.assertEqual('101000000000000000000002F7F1869557200730298d254d3ff3509e3fe4722d', target_id) + self.assertEqual("101000000000000000000002F7F1869557200730298d254d3ff3509e3fe4722d", target_id) def test_daplink_240_mbed_html(self): target_id = self.mbeds._target_id_from_htm(self.test_daplink_240_mbed_html_str) - self.assertEqual('0240000029164e45002f0012706e0006f301000097969900', target_id) + self.assertEqual("0240000029164e45002f0012706e0006f301000097969900", target_id) def test_mbed_htm_nrf51_url(self): target_id = self.mbeds._target_id_from_htm(self.test_mbed_htm_nrf51_url_str) - self.assertEqual('1100021952333120353935373130313232323032AFD5DFD8', target_id) + self.assertEqual("1100021952333120353935373130313232323032AFD5DFD8", target_id) def get_mbed_htm_comment_section_ver_build(self): # Incorrect data - ver_bld = self.mbeds._mbed_htm_comment_section_ver_build('') + ver_bld = self.mbeds._mbed_htm_comment_section_ver_build("") self.assertIsNone(ver_bld) - ver_bld = self.mbeds._mbed_htm_comment_section_ver_build('') + ver_bld = self.mbeds._mbed_htm_comment_section_ver_build( + "" + ) self.assertIsNone(ver_bld) - ver_bld = self.mbeds._mbed_htm_comment_section_ver_build('') + ver_bld = self.mbeds._mbed_htm_comment_section_ver_build("") self.assertIsNone(ver_bld) # Correct data - ver_bld = self.mbeds._mbed_htm_comment_section_ver_build('') + ver_bld = self.mbeds._mbed_htm_comment_section_ver_build("") self.assertIsNotNone(ver_bld) - self.assertEqual(('0200', 'Mar 26 2014 13:22:20'), ver_bld) + self.assertEqual(("0200", "Mar 26 2014 13:22:20"), ver_bld) - ver_bld = self.mbeds._mbed_htm_comment_section_ver_build('') + ver_bld = self.mbeds._mbed_htm_comment_section_ver_build( + "" + ) self.assertIsNotNone(ver_bld) - self.assertEqual(('0219', 'Feb 2 2016 15:20:54'), ver_bld) + self.assertEqual(("0219", "Feb 2 2016 15:20:54"), ver_bld) - ver_bld = self.mbeds._mbed_htm_comment_section_ver_build('') + ver_bld = self.mbeds._mbed_htm_comment_section_ver_build("") self.assertIsNotNone(ver_bld) - self.assertEqual(('0.14.3', '471'), ver_bld) + self.assertEqual(("0.14.3", "471"), ver_bld) def test_(self): pass -if __name__ == '__main__': + +if __name__ == "__main__": unittest.main() diff --git a/tools/python_tests/mbed_os_tools/detect/mbedls_toolsbase.py b/tools/python_tests/mbed_os_tools/detect/mbedls_toolsbase.py index 6dea2e7da25..be29be4bc2e 100644 --- a/tools/python_tests/mbed_os_tools/detect/mbedls_toolsbase.py +++ b/tools/python_tests/mbed_os_tools/detect/mbedls_toolsbase.py @@ -25,20 +25,23 @@ from mbed_os_tools.detect.lstools_base import MbedLsToolsBase, FSInteraction + class DummyLsTools(MbedLsToolsBase): return_value = [] + def find_candidates(self): return self.return_value + try: basestring except NameError: # Python 3 basestring = str + class BasicTestCase(unittest.TestCase): - """ Basic test cases checking trivial asserts - """ + """Basic test cases checking trivial asserts""" def setUp(self): self.base = DummyLsTools(force_mock=True) @@ -47,80 +50,88 @@ def tearDown(self): pass def test_list_mbeds_valid_platform(self): - self.base.return_value = [{'mount_point': 'dummy_mount_point', - 'target_id_usb_id': u'0240DEADBEEF', - 'serial_port': "dummy_serial_port"}, - {'mount_point': None, - 'target_id_usb_id': '00000000000', - 'serial_port': 'not_valid'}] - with patch("mbed_os_tools.detect.lstools_base.MbedLsToolsBase._read_htm_ids") as _read_htm,\ - patch("mbed_os_tools.detect.lstools_base.MbedLsToolsBase.mount_point_ready") as _mpr,\ - patch("mbed_os_tools.detect.lstools_base.PlatformDatabase.get") as _get,\ - patch('mbed_os_tools.detect.lstools_base.listdir') as _listdir: + self.base.return_value = [ + { + "mount_point": "dummy_mount_point", + "target_id_usb_id": "0240DEADBEEF", + "serial_port": "dummy_serial_port", + }, + {"mount_point": None, "target_id_usb_id": "00000000000", "serial_port": "not_valid"}, + ] + with patch("mbed_os_tools.detect.lstools_base.MbedLsToolsBase._read_htm_ids") as _read_htm, patch( + "mbed_os_tools.detect.lstools_base.MbedLsToolsBase.mount_point_ready" + ) as _mpr, patch("mbed_os_tools.detect.lstools_base.PlatformDatabase.get") as _get, patch( + "mbed_os_tools.detect.lstools_base.listdir" + ) as _listdir: _mpr.return_value = True - _read_htm.return_value = (u'0241BEEFDEAD', {}) - _get.return_value = { - 'platform_name': 'foo_target' - } - _listdir.return_value = ['mbed.htm'] + _read_htm.return_value = ("0241BEEFDEAD", {}) + _get.return_value = {"platform_name": "foo_target"} + _listdir.return_value = ["mbed.htm"] to_check = self.base.list_mbeds() - _read_htm.assert_called_once_with('dummy_mount_point') - _get.assert_any_call('0241', device_type='daplink', verbose_data=True) + _read_htm.assert_called_once_with("dummy_mount_point") + _get.assert_any_call("0241", device_type="daplink", verbose_data=True) self.assertEqual(len(to_check), 1) - self.assertEqual(to_check[0]['target_id'], "0241BEEFDEAD") - self.assertEqual(to_check[0]['platform_name'], 'foo_target') + self.assertEqual(to_check[0]["target_id"], "0241BEEFDEAD") + self.assertEqual(to_check[0]["platform_name"], "foo_target") def test_list_mbeds_invalid_tid(self): - self.base.return_value = [{'mount_point': 'dummy_mount_point', - 'target_id_usb_id': u'0240DEADBEEF', - 'serial_port': "dummy_serial_port"}, - {'mount_point': 'dummy_mount_point', - 'target_id_usb_id': "", - 'serial_port': 'not_valid'}] - with patch("mbed_os_tools.detect.lstools_base.MbedLsToolsBase._read_htm_ids") as _read_htm,\ - patch("mbed_os_tools.detect.lstools_base.MbedLsToolsBase.mount_point_ready") as _mpr,\ - patch("mbed_os_tools.detect.lstools_base.PlatformDatabase.get") as _get,\ - patch('mbed_os_tools.detect.lstools_base.listdir') as _listdir: + self.base.return_value = [ + { + "mount_point": "dummy_mount_point", + "target_id_usb_id": "0240DEADBEEF", + "serial_port": "dummy_serial_port", + }, + {"mount_point": "dummy_mount_point", "target_id_usb_id": "", "serial_port": "not_valid"}, + ] + with patch("mbed_os_tools.detect.lstools_base.MbedLsToolsBase._read_htm_ids") as _read_htm, patch( + "mbed_os_tools.detect.lstools_base.MbedLsToolsBase.mount_point_ready" + ) as _mpr, patch("mbed_os_tools.detect.lstools_base.PlatformDatabase.get") as _get, patch( + "mbed_os_tools.detect.lstools_base.listdir" + ) as _listdir: _mpr.return_value = True - _read_htm.side_effect = [(u'0241BEEFDEAD', {}), (None, {})] - _get.return_value = { - 'platform_name': 'foo_target' - } - _listdir.return_value = ['mbed.htm'] + _read_htm.side_effect = [("0241BEEFDEAD", {}), (None, {})] + _get.return_value = {"platform_name": "foo_target"} + _listdir.return_value = ["mbed.htm"] to_check = self.base.list_mbeds() - _get.assert_any_call('0241', device_type='daplink', verbose_data=True) + _get.assert_any_call("0241", device_type="daplink", verbose_data=True) self.assertEqual(len(to_check), 2) - self.assertEqual(to_check[0]['target_id'], "0241BEEFDEAD") - self.assertEqual(to_check[0]['platform_name'], 'foo_target') - self.assertEqual(to_check[1]['target_id'], "") - self.assertEqual(to_check[1]['platform_name'], None) + self.assertEqual(to_check[0]["target_id"], "0241BEEFDEAD") + self.assertEqual(to_check[0]["platform_name"], "foo_target") + self.assertEqual(to_check[1]["target_id"], "") + self.assertEqual(to_check[1]["platform_name"], None) def test_list_mbeds_invalid_platform(self): - self.base.return_value = [{'mount_point': 'dummy_mount_point', - 'target_id_usb_id': u'not_in_target_db', - 'serial_port': "dummy_serial_port"}] + self.base.return_value = [ + { + "mount_point": "dummy_mount_point", + "target_id_usb_id": "not_in_target_db", + "serial_port": "dummy_serial_port", + } + ] for qos in [FSInteraction.BeforeFilter, FSInteraction.AfterFilter]: - with patch("mbed_os_tools.detect.lstools_base.MbedLsToolsBase._read_htm_ids") as _read_htm,\ - patch("mbed_os_tools.detect.lstools_base.MbedLsToolsBase.mount_point_ready") as _mpr,\ - patch("mbed_os_tools.detect.lstools_base.PlatformDatabase.get") as _get,\ - patch('mbed_os_tools.detect.lstools_base.listdir') as _listdir: + with patch("mbed_os_tools.detect.lstools_base.MbedLsToolsBase._read_htm_ids") as _read_htm, patch( + "mbed_os_tools.detect.lstools_base.MbedLsToolsBase.mount_point_ready" + ) as _mpr, patch("mbed_os_tools.detect.lstools_base.PlatformDatabase.get") as _get, patch( + "mbed_os_tools.detect.lstools_base.listdir" + ) as _listdir: _mpr.return_value = True - _read_htm.return_value = (u'not_in_target_db', {}) + _read_htm.return_value = ("not_in_target_db", {}) _get.return_value = None - _listdir.return_value = ['MBED.HTM'] + _listdir.return_value = ["MBED.HTM"] to_check = self.base.list_mbeds() - _read_htm.assert_called_once_with('dummy_mount_point') - _get.assert_any_call('not_', device_type='daplink', verbose_data=True) + _read_htm.assert_called_once_with("dummy_mount_point") + _get.assert_any_call("not_", device_type="daplink", verbose_data=True) self.assertEqual(len(to_check), 1) - self.assertEqual(to_check[0]['target_id'], "not_in_target_db") - self.assertEqual(to_check[0]['platform_name'], None) + self.assertEqual(to_check[0]["target_id"], "not_in_target_db") + self.assertEqual(to_check[0]["platform_name"], None) def test_list_mbeds_unmount_mid_read(self): - self.base.return_value = [{'mount_point': 'dummy_mount_point', - 'target_id_usb_id': u'0240DEADBEEF', - 'serial_port': "dummy_serial_port"}] - with patch("mbed_os_tools.detect.lstools_base.MbedLsToolsBase.mount_point_ready") as _mpr,\ - patch('mbed_os_tools.detect.lstools_base.listdir') as _listdir: + self.base.return_value = [ + {"mount_point": "dummy_mount_point", "target_id_usb_id": "0240DEADBEEF", "serial_port": "dummy_serial_port"} + ] + with patch("mbed_os_tools.detect.lstools_base.MbedLsToolsBase.mount_point_ready") as _mpr, patch( + "mbed_os_tools.detect.lstools_base.listdir" + ) as _listdir: _mpr.return_value = True _listdir.side_effect = OSError to_check = self.base.list_mbeds() @@ -128,16 +139,20 @@ def test_list_mbeds_unmount_mid_read(self): def test_list_mbeds_read_mbed_htm_failure(self): def _test(mock): - self.base.return_value = [{'mount_point': 'dummy_mount_point', - 'target_id_usb_id': u'0240DEADBEEF', - 'serial_port': "dummy_serial_port"}] - with patch("mbed_os_tools.detect.lstools_base.MbedLsToolsBase.mount_point_ready") as _mpr,\ - patch('mbed_os_tools.detect.lstools_base.listdir') as _listdir,\ - patch('mbed_os_tools.detect.lstools_base.open', mock, create=True): + self.base.return_value = [ + { + "mount_point": "dummy_mount_point", + "target_id_usb_id": "0240DEADBEEF", + "serial_port": "dummy_serial_port", + } + ] + with patch("mbed_os_tools.detect.lstools_base.MbedLsToolsBase.mount_point_ready") as _mpr, patch( + "mbed_os_tools.detect.lstools_base.listdir" + ) as _listdir, patch("mbed_os_tools.detect.lstools_base.open", mock, create=True): _mpr.return_value = True - _listdir.return_value = ['MBED.HTM', 'DETAILS.TXT'] + _listdir.return_value = ["MBED.HTM", "DETAILS.TXT"] to_check = self.base.list_mbeds() - mock.assert_called_once_with(os.path.join('dummy_mount_point', 'mbed.htm'), 'r') + mock.assert_called_once_with(os.path.join("dummy_mount_point", "mbed.htm"), "r") self.assertEqual(len(to_check), 0) m = mock_open() @@ -149,11 +164,11 @@ def _test(mock): _test(m) def test_list_mbeds_read_no_mbed_htm(self): - self.base.return_value = [{'mount_point': 'dummy_mount_point', - 'target_id_usb_id': u'0240DEADBEEF', - 'serial_port': "dummy_serial_port"}] + self.base.return_value = [ + {"mount_point": "dummy_mount_point", "target_id_usb_id": "0240DEADBEEF", "serial_port": "dummy_serial_port"} + ] - details_txt_contents = '''\ + details_txt_contents = """\ # DAPLink Firmware - see https://mbed.com/daplink Unique ID: 0240000032044e4500257009997b00386781000097969900 HIC ID: 97969900 @@ -169,39 +184,45 @@ def test_list_mbeds_read_no_mbed_htm(self): Bootloader CRC: 0xb92403e6 Interface CRC: 0x434eddd1 Remount count: 0 -''' +""" + def _handle_open(*args, **kwargs): - if args[0].lower() == os.path.join('dummy_mount_point', 'mbed.htm'): + if args[0].lower() == os.path.join("dummy_mount_point", "mbed.htm"): raise OSError("(mocked open) No such file or directory: 'mbed.htm'") else: return DEFAULT m = mock_open(read_data=details_txt_contents) - with patch("mbed_os_tools.detect.lstools_base.MbedLsToolsBase.mount_point_ready") as _mpr,\ - patch('mbed_os_tools.detect.lstools_base.listdir') as _listdir,\ - patch('mbed_os_tools.detect.lstools_base.open', m, create=True) as mocked_open: + with patch("mbed_os_tools.detect.lstools_base.MbedLsToolsBase.mount_point_ready") as _mpr, patch( + "mbed_os_tools.detect.lstools_base.listdir" + ) as _listdir, patch("mbed_os_tools.detect.lstools_base.open", m, create=True) as mocked_open: mocked_open.side_effect = _handle_open _mpr.return_value = True - _listdir.return_value = ['PRODINFO.HTM', 'DETAILS.TXT'] + _listdir.return_value = ["PRODINFO.HTM", "DETAILS.TXT"] to_check = self.base.list_mbeds() self.assertEqual(len(to_check), 1) - m.assert_called_once_with(os.path.join('dummy_mount_point', 'DETAILS.TXT'), 'r') - self.assertEqual(to_check[0]['target_id'], '0240000032044e4500257009997b00386781000097969900') + m.assert_called_once_with(os.path.join("dummy_mount_point", "DETAILS.TXT"), "r") + self.assertEqual(to_check[0]["target_id"], "0240000032044e4500257009997b00386781000097969900") def test_list_mbeds_read_details_txt_failure(self): def _test(mock): - self.base.return_value = [{'mount_point': 'dummy_mount_point', - 'target_id_usb_id': u'0240DEADBEEF', - 'serial_port': "dummy_serial_port"}] - with patch("mbed_os_tools.detect.lstools_base.MbedLsToolsBase.mount_point_ready") as _mpr,\ - patch('mbed_os_tools.detect.lstools_base.listdir') as _listdir,\ - patch("mbed_os_tools.detect.lstools_base.MbedLsToolsBase._update_device_from_htm") as _htm,\ - patch('mbed_os_tools.detect.lstools_base.open', mock, create=True): + self.base.return_value = [ + { + "mount_point": "dummy_mount_point", + "target_id_usb_id": "0240DEADBEEF", + "serial_port": "dummy_serial_port", + } + ] + with patch("mbed_os_tools.detect.lstools_base.MbedLsToolsBase.mount_point_ready") as _mpr, patch( + "mbed_os_tools.detect.lstools_base.listdir" + ) as _listdir, patch( + "mbed_os_tools.detect.lstools_base.MbedLsToolsBase._update_device_from_htm" + ) as _htm, patch("mbed_os_tools.detect.lstools_base.open", mock, create=True): _mpr.return_value = True _htm.side_effect = None - _listdir.return_value = ['MBED.HTM', 'DETAILS.TXT'] + _listdir.return_value = ["MBED.HTM", "DETAILS.TXT"] to_check = self.base.list_mbeds(read_details_txt=True) - mock.assert_called_once_with(os.path.join('dummy_mount_point', 'DETAILS.TXT'), 'r') + mock.assert_called_once_with(os.path.join("dummy_mount_point", "DETAILS.TXT"), "r") self.assertEqual(len(to_check), 0) m = mock_open() @@ -214,146 +235,148 @@ def _test(mock): def test_list_mbeds_unmount_mid_read_list_unmounted(self): self.base.list_unmounted = True - self.base.return_value = [{'mount_point': 'dummy_mount_point', - 'target_id_usb_id': u'0240DEADBEEF', - 'serial_port': "dummy_serial_port"}] - with patch("mbed_os_tools.detect.lstools_base.MbedLsToolsBase.mount_point_ready") as _mpr,\ - patch('mbed_os_tools.detect.lstools_base.listdir') as _listdir: + self.base.return_value = [ + {"mount_point": "dummy_mount_point", "target_id_usb_id": "0240DEADBEEF", "serial_port": "dummy_serial_port"} + ] + with patch("mbed_os_tools.detect.lstools_base.MbedLsToolsBase.mount_point_ready") as _mpr, patch( + "mbed_os_tools.detect.lstools_base.listdir" + ) as _listdir: _mpr.return_value = True _listdir.side_effect = OSError to_check = self.base.list_mbeds() self.assertEqual(len(to_check), 1) - self.assertEqual(to_check[0]['mount_point'], None) - self.assertEqual(to_check[0]['device_type'], 'unknown') - self.assertEqual(to_check[0]['platform_name'], 'K64F') + self.assertEqual(to_check[0]["mount_point"], None) + self.assertEqual(to_check[0]["device_type"], "unknown") + self.assertEqual(to_check[0]["platform_name"], "K64F") def test_mock_manufacture_ids_default_multiple(self): # oper='+' - for mid, platform_name in [('0341', 'TEST_PLATFORM_NAME_1'), - ('0342', 'TEST_PLATFORM_NAME_2'), - ('0343', 'TEST_PLATFORM_NAME_3')]: + for mid, platform_name in [ + ("0341", "TEST_PLATFORM_NAME_1"), + ("0342", "TEST_PLATFORM_NAME_2"), + ("0343", "TEST_PLATFORM_NAME_3"), + ]: self.base.mock_manufacture_id(mid, platform_name) self.assertEqual(platform_name, self.base.plat_db.get(mid)) def test_mock_manufacture_ids_minus(self): # oper='+' - for mid, platform_name in [('0341', 'TEST_PLATFORM_NAME_1'), - ('0342', 'TEST_PLATFORM_NAME_2'), - ('0343', 'TEST_PLATFORM_NAME_3')]: + for mid, platform_name in [ + ("0341", "TEST_PLATFORM_NAME_1"), + ("0342", "TEST_PLATFORM_NAME_2"), + ("0343", "TEST_PLATFORM_NAME_3"), + ]: self.base.mock_manufacture_id(mid, platform_name) self.assertEqual(platform_name, self.base.plat_db.get(mid)) # oper='-' - mock_ids = self.base.mock_manufacture_id('0342', '', oper='-') - self.assertEqual('TEST_PLATFORM_NAME_1', self.base.plat_db.get("0341")) + mock_ids = self.base.mock_manufacture_id("0342", "", oper="-") + self.assertEqual("TEST_PLATFORM_NAME_1", self.base.plat_db.get("0341")) self.assertEqual(None, self.base.plat_db.get("0342")) - self.assertEqual('TEST_PLATFORM_NAME_3', self.base.plat_db.get("0343")) + self.assertEqual("TEST_PLATFORM_NAME_3", self.base.plat_db.get("0343")) def test_mock_manufacture_ids_star(self): # oper='+' - for mid, platform_name in [('0341', 'TEST_PLATFORM_NAME_1'), - ('0342', 'TEST_PLATFORM_NAME_2'), - ('0343', 'TEST_PLATFORM_NAME_3')]: + for mid, platform_name in [ + ("0341", "TEST_PLATFORM_NAME_1"), + ("0342", "TEST_PLATFORM_NAME_2"), + ("0343", "TEST_PLATFORM_NAME_3"), + ]: self.base.mock_manufacture_id(mid, platform_name) self.assertEqual(platform_name, self.base.plat_db.get(mid)) # oper='-' - self.base.mock_manufacture_id('*', '', oper='-') + self.base.mock_manufacture_id("*", "", oper="-") self.assertEqual(None, self.base.plat_db.get("0341")) self.assertEqual(None, self.base.plat_db.get("0342")) self.assertEqual(None, self.base.plat_db.get("0343")) - def test_update_device_from_fs_mid_unmount(self): - dummy_mount = 'dummy_mount' - device = { - 'mount_point': dummy_mount - } + dummy_mount = "dummy_mount" + device = {"mount_point": dummy_mount} - with patch('os.listdir') as _listdir: + with patch("os.listdir") as _listdir: _listdir.side_effect = OSError self.base._update_device_from_fs(device, False) - self.assertEqual(device['mount_point'], None) + self.assertEqual(device["mount_point"], None) def test_detect_device_test(self): - device_type = self.base._detect_device_type({ - 'vendor_id': '0483' - }) - self.assertEqual(device_type, 'stlink') + device_type = self.base._detect_device_type({"vendor_id": "0483"}) + self.assertEqual(device_type, "stlink") - device_type = self.base._detect_device_type({ - 'vendor_id': '0d28' - }) - self.assertEqual(device_type, 'daplink') + device_type = self.base._detect_device_type({"vendor_id": "0d28"}) + self.assertEqual(device_type, "daplink") - device_type = self.base._detect_device_type({ - 'vendor_id': '1366' - }) - self.assertEqual(device_type, 'jlink') + device_type = self.base._detect_device_type({"vendor_id": "1366"}) + self.assertEqual(device_type, "jlink") def test_device_type_unmounted(self): self.base.list_unmounted = True - self.base.return_value = [{'mount_point': None, - 'target_id_usb_id': u'0240DEADBEEF', - 'serial_port': "dummy_serial_port", - 'vendor_id': '0d28', - 'product_id': '0204'}] - with patch("mbed_os_tools.detect.lstools_base.PlatformDatabase.get") as _get,\ - patch('mbed_os_tools.detect.lstools_base.listdir') as _listdir: - _get.return_value = { - 'platform_name': 'foo_target' + self.base.return_value = [ + { + "mount_point": None, + "target_id_usb_id": "0240DEADBEEF", + "serial_port": "dummy_serial_port", + "vendor_id": "0d28", + "product_id": "0204", } + ] + with patch("mbed_os_tools.detect.lstools_base.PlatformDatabase.get") as _get, patch( + "mbed_os_tools.detect.lstools_base.listdir" + ) as _listdir: + _get.return_value = {"platform_name": "foo_target"} to_check = self.base.list_mbeds() - #_get.assert_any_call('0240', device_type='daplink', verbose_data=True) + # _get.assert_any_call('0240', device_type='daplink', verbose_data=True) self.assertEqual(len(to_check), 1) - self.assertEqual(to_check[0]['target_id'], "0240DEADBEEF") - self.assertEqual(to_check[0]['platform_name'], 'foo_target') - self.assertEqual(to_check[0]['device_type'], 'daplink') + self.assertEqual(to_check[0]["target_id"], "0240DEADBEEF") + self.assertEqual(to_check[0]["platform_name"], "foo_target") + self.assertEqual(to_check[0]["device_type"], "daplink") def test_update_device_details_jlink(self): - jlink_html_contents = ('' - 'NXP Product Page') + jlink_html_contents = ( + '' + "NXP Product Page" + ) _open = mock_open(read_data=jlink_html_contents) - dummy_mount_point = 'dummy' - base_device = { - 'mount_point': dummy_mount_point - } + dummy_mount_point = "dummy" + base_device = {"mount_point": dummy_mount_point} - with patch('mbed_os_tools.detect.lstools_base.open', _open, create=True): + with patch("mbed_os_tools.detect.lstools_base.open", _open, create=True): device = deepcopy(base_device) - device['directory_entries'] = ['Board.html', 'User Guide.html'] + device["directory_entries"] = ["Board.html", "User Guide.html"] self.base._update_device_details_jlink(device, False) - self.assertEqual(device['url'], 'http://www.nxp.com/FRDM-KL27Z') - self.assertEqual(device['platform_name'], 'KL27Z') - _open.assert_called_once_with(os.path.join(dummy_mount_point, 'Board.html'), 'r') + self.assertEqual(device["url"], "http://www.nxp.com/FRDM-KL27Z") + self.assertEqual(device["platform_name"], "KL27Z") + _open.assert_called_once_with(os.path.join(dummy_mount_point, "Board.html"), "r") _open.reset_mock() device = deepcopy(base_device) - device['directory_entries'] = ['User Guide.html'] + device["directory_entries"] = ["User Guide.html"] self.base._update_device_details_jlink(device, False) - self.assertEqual(device['url'], 'http://www.nxp.com/FRDM-KL27Z') - self.assertEqual(device['platform_name'], 'KL27Z') - _open.assert_called_once_with(os.path.join(dummy_mount_point, 'User Guide.html'), 'r') + self.assertEqual(device["url"], "http://www.nxp.com/FRDM-KL27Z") + self.assertEqual(device["platform_name"], "KL27Z") + _open.assert_called_once_with(os.path.join(dummy_mount_point, "User Guide.html"), "r") _open.reset_mock() device = deepcopy(base_device) - device['directory_entries'] = ['unhelpful_file.html'] + device["directory_entries"] = ["unhelpful_file.html"] self.base._update_device_details_jlink(device, False) _open.assert_not_called() def test_fs_never(self): device = { - 'target_id_usb_id': '024075309420ABCE', - 'mount_point': 'invalid_mount_point', - 'serial_port': 'invalid_serial_port' + "target_id_usb_id": "024075309420ABCE", + "mount_point": "invalid_mount_point", + "serial_port": "invalid_serial_port", } self.base.return_value = [device] - with patch("mbed_os_tools.detect.lstools_base.MbedLsToolsBase._update_device_from_fs") as _up_fs,\ - patch("mbed_os_tools.detect.lstools_base.MbedLsToolsBase.mount_point_ready") as mount_point_ready: + with patch("mbed_os_tools.detect.lstools_base.MbedLsToolsBase._update_device_from_fs") as _up_fs, patch( + "mbed_os_tools.detect.lstools_base.MbedLsToolsBase.mount_point_ready" + ) as mount_point_ready: mount_point_ready.return_value = True filter = None @@ -361,22 +384,22 @@ def test_fs_never(self): ret_with_details = self.base.list_mbeds(FSInteraction.Never, filter, read_details_txt=True) self.assertIsNotNone(ret[0]) self.assertIsNotNone(ret_with_details[0]) - self.assertEqual(ret[0]['target_id'], ret[0]['target_id_usb_id']) - self.assertEqual(ret[0]['platform_name'], "K64F") + self.assertEqual(ret[0]["target_id"], ret[0]["target_id_usb_id"]) + self.assertEqual(ret[0]["platform_name"], "K64F") self.assertEqual(ret[0], ret_with_details[0]) _up_fs.assert_not_called() - filter_in = lambda m: m['platform_name'] == 'K64F' + filter_in = lambda m: m["platform_name"] == "K64F" ret = self.base.list_mbeds(FSInteraction.Never, filter_in, read_details_txt=False) ret_with_details = self.base.list_mbeds(FSInteraction.Never, filter_in, read_details_txt=True) self.assertIsNotNone(ret[0]) self.assertIsNotNone(ret_with_details[0]) - self.assertEqual(ret[0]['target_id'], ret[0]['target_id_usb_id']) - self.assertEqual(ret[0]['platform_name'], "K64F") + self.assertEqual(ret[0]["target_id"], ret[0]["target_id_usb_id"]) + self.assertEqual(ret[0]["platform_name"], "K64F") self.assertEqual(ret[0], ret_with_details[0]) _up_fs.assert_not_called() - filter_out = lambda m: m['platform_name'] != 'K64F' + filter_out = lambda m: m["platform_name"] != "K64F" ret = self.base.list_mbeds(FSInteraction.Never, filter_out, read_details_txt=False) ret_with_details = self.base.list_mbeds(FSInteraction.Never, filter_out, read_details_txt=True) _up_fs.assert_not_called() @@ -386,20 +409,19 @@ def test_fs_never(self): def test_fs_after(self): device = { - 'target_id_usb_id': '024075309420ABCE', - 'mount_point': 'invalid_mount_point', - 'serial_port': 'invalid_serial_port' + "target_id_usb_id": "024075309420ABCE", + "mount_point": "invalid_mount_point", + "serial_port": "invalid_serial_port", } - with patch("mbed_os_tools.detect.lstools_base.MbedLsToolsBase._read_htm_ids") as _read_htm,\ - patch("mbed_os_tools.detect.lstools_base.MbedLsToolsBase._details_txt") as _up_details,\ - patch("mbed_os_tools.detect.lstools_base.MbedLsToolsBase.mount_point_ready") as mount_point_ready,\ - patch('mbed_os_tools.detect.lstools_base.listdir') as _listdir: + with patch("mbed_os_tools.detect.lstools_base.MbedLsToolsBase._read_htm_ids") as _read_htm, patch( + "mbed_os_tools.detect.lstools_base.MbedLsToolsBase._details_txt" + ) as _up_details, patch( + "mbed_os_tools.detect.lstools_base.MbedLsToolsBase.mount_point_ready" + ) as mount_point_ready, patch("mbed_os_tools.detect.lstools_base.listdir") as _listdir: new_device_id = "00017531642046" _read_htm.return_value = (new_device_id, {}) - _listdir.return_value = ['mbed.htm', 'details.txt'] - _up_details.return_value = { - 'automation_allowed': '0' - } + _listdir.return_value = ["mbed.htm", "details.txt"] + _up_details.return_value = {"automation_allowed": "0"} mount_point_ready.return_value = True filter = None @@ -411,46 +433,42 @@ def test_fs_after(self): self.assertIsNotNone(ret[0]) self.assertIsNotNone(ret_with_details[0]) - self.assertEqual(ret[0]['target_id'], new_device_id) - self.assertEqual(ret_with_details[0]['daplink_automation_allowed'], '0') + self.assertEqual(ret[0]["target_id"], new_device_id) + self.assertEqual(ret_with_details[0]["daplink_automation_allowed"], "0") # Below is the recommended replacement for assertDictContainsSubset(). # See: https://stackoverflow.com/a/59777678/7083698 self.assertEqual(ret_with_details[0], {**ret_with_details[0], **ret[0]}) - - _read_htm.assert_called_with(device['mount_point']) - _up_details.assert_called_with(device['mount_point']) + + _read_htm.assert_called_with(device["mount_point"]) + _up_details.assert_called_with(device["mount_point"]) _read_htm.reset_mock() _up_details.reset_mock() - filter_in = lambda m: m['target_id'] == device['target_id_usb_id'] - filter_details = lambda m: m.get('daplink_automation_allowed', None) == '0' + filter_in = lambda m: m["target_id"] == device["target_id_usb_id"] + filter_details = lambda m: m.get("daplink_automation_allowed", None) == "0" self.base.return_value = [deepcopy(device)] - ret = self.base.list_mbeds( - FSInteraction.AfterFilter, filter_in, False, False) + ret = self.base.list_mbeds(FSInteraction.AfterFilter, filter_in, False, False) self.base.return_value = [deepcopy(device)] - ret_with_details = self.base.list_mbeds( - FSInteraction.AfterFilter, filter_details, False, True) + ret_with_details = self.base.list_mbeds(FSInteraction.AfterFilter, filter_details, False, True) self.assertIsNotNone(ret[0]) self.assertEqual(ret_with_details, []) - self.assertEqual(ret[0]['target_id'], new_device_id) - _read_htm.assert_called_with(device['mount_point']) + self.assertEqual(ret[0]["target_id"], new_device_id) + _read_htm.assert_called_with(device["mount_point"]) _up_details.assert_not_called() _read_htm.reset_mock() _up_details.reset_mock() - filter_out = lambda m: m['target_id'] == new_device_id + filter_out = lambda m: m["target_id"] == new_device_id self.base.return_value = [deepcopy(device)] - ret = self.base.list_mbeds( - FSInteraction.AfterFilter, filter_out, False, False) + ret = self.base.list_mbeds(FSInteraction.AfterFilter, filter_out, False, False) self.base.return_value = [deepcopy(device)] - ret_with_details = self.base.list_mbeds( - FSInteraction.AfterFilter, filter_out, False, True) + ret_with_details = self.base.list_mbeds(FSInteraction.AfterFilter, filter_out, False, True) self.assertEqual(ret, []) self.assertEqual(ret_with_details, []) @@ -460,97 +478,87 @@ def test_fs_after(self): def test_get_supported_platforms(self): supported_platforms = self.base.get_supported_platforms() self.assertTrue(isinstance(supported_platforms, dict)) - self.assertEqual(supported_platforms['0240'], 'K64F') + self.assertEqual(supported_platforms["0240"], "K64F") def test_fs_before(self): device = { - 'target_id_usb_id': '024075309420ABCE', - 'mount_point': 'invalid_mount_point', - 'serial_port': 'invalid_serial_port' + "target_id_usb_id": "024075309420ABCE", + "mount_point": "invalid_mount_point", + "serial_port": "invalid_serial_port", } - with patch("mbed_os_tools.detect.lstools_base.MbedLsToolsBase._read_htm_ids") as _read_htm,\ - patch("mbed_os_tools.detect.lstools_base.MbedLsToolsBase._details_txt") as _up_details,\ - patch("mbed_os_tools.detect.lstools_base.MbedLsToolsBase.mount_point_ready") as mount_point_ready,\ - patch('mbed_os_tools.detect.lstools_base.listdir') as _listdir: - new_device_id = u'00017575430420' + with patch("mbed_os_tools.detect.lstools_base.MbedLsToolsBase._read_htm_ids") as _read_htm, patch( + "mbed_os_tools.detect.lstools_base.MbedLsToolsBase._details_txt" + ) as _up_details, patch( + "mbed_os_tools.detect.lstools_base.MbedLsToolsBase.mount_point_ready" + ) as mount_point_ready, patch("mbed_os_tools.detect.lstools_base.listdir") as _listdir: + new_device_id = "00017575430420" _read_htm.return_value = (new_device_id, {}) - _listdir.return_value = ['mbed.htm', 'details.txt'] - _up_details.return_value = { - 'automation_allowed': '0' - } + _listdir.return_value = ["mbed.htm", "details.txt"] + _up_details.return_value = {"automation_allowed": "0"} mount_point_ready.return_value = True filter = None self.base.return_value = [deepcopy(device)] - ret = self.base.list_mbeds( - FSInteraction.BeforeFilter, filter, False, False) + ret = self.base.list_mbeds(FSInteraction.BeforeFilter, filter, False, False) _up_details.assert_not_called() self.base.return_value = [deepcopy(device)] - ret_with_details = self.base.list_mbeds( - FSInteraction.BeforeFilter, filter, False, True) + ret_with_details = self.base.list_mbeds(FSInteraction.BeforeFilter, filter, False, True) self.assertIsNotNone(ret[0]) self.assertIsNotNone(ret_with_details[0]) - self.assertEqual(ret[0]['target_id'], new_device_id) - self.assertEqual(ret_with_details[0]['daplink_automation_allowed'], '0') + self.assertEqual(ret[0]["target_id"], new_device_id) + self.assertEqual(ret_with_details[0]["daplink_automation_allowed"], "0") self.assertEqual(ret_with_details[0], {**ret_with_details[0], **ret[0]}) - _read_htm.assert_called_with(device['mount_point']) - _up_details.assert_called_with(device['mount_point']) + _read_htm.assert_called_with(device["mount_point"]) + _up_details.assert_called_with(device["mount_point"]) _read_htm.reset_mock() _up_details.reset_mock() - filter_in = lambda m: m['target_id'] == '00017575430420' - filter_in_details = lambda m: m['daplink_automation_allowed'] == '0' + filter_in = lambda m: m["target_id"] == "00017575430420" + filter_in_details = lambda m: m["daplink_automation_allowed"] == "0" self.base.return_value = [deepcopy(device)] - ret = self.base.list_mbeds( - FSInteraction.BeforeFilter, filter_in, False, False) + ret = self.base.list_mbeds(FSInteraction.BeforeFilter, filter_in, False, False) _up_details.assert_not_called() self.base.return_value = [deepcopy(device)] - ret_with_details = self.base.list_mbeds( - FSInteraction.BeforeFilter, filter_in_details, False, True) + ret_with_details = self.base.list_mbeds(FSInteraction.BeforeFilter, filter_in_details, False, True) self.assertIsNotNone(ret[0]) self.assertIsNotNone(ret_with_details[0]) - self.assertEqual(ret[0]['target_id'], new_device_id) - self.assertEqual(ret_with_details[0]['daplink_automation_allowed'], '0') + self.assertEqual(ret[0]["target_id"], new_device_id) + self.assertEqual(ret_with_details[0]["daplink_automation_allowed"], "0") self.assertEqual(ret_with_details[0], {**ret_with_details[0], **ret[0]}) - _read_htm.assert_called_with(device['mount_point']) - _up_details.assert_called_with(device['mount_point']) + _read_htm.assert_called_with(device["mount_point"]) + _up_details.assert_called_with(device["mount_point"]) _read_htm.reset_mock() _up_details.reset_mock() - filter_out = lambda m: m['target_id'] == '024075309420ABCE' - filter_out_details = lambda m: m['daplink_automation_allowed'] == '1' - ret = self.base.list_mbeds( - FSInteraction.BeforeFilter, filter_out, False, False) + filter_out = lambda m: m["target_id"] == "024075309420ABCE" + filter_out_details = lambda m: m["daplink_automation_allowed"] == "1" + ret = self.base.list_mbeds(FSInteraction.BeforeFilter, filter_out, False, False) _up_details.assert_not_called() self.base.return_value = [deepcopy(device)] - ret_with_details = self.base.list_mbeds( - FSInteraction.BeforeFilter, filter_out_details, False, True) + ret_with_details = self.base.list_mbeds(FSInteraction.BeforeFilter, filter_out_details, False, True) self.assertEqual(ret, []) self.assertEqual(ret_with_details, []) - _read_htm.assert_called_with(device['mount_point']) + _read_htm.assert_called_with(device["mount_point"]) + class RetargetTestCase(unittest.TestCase): - """ Test cases that makes use of retargetting - """ + """Test cases that makes use of retargetting""" def setUp(self): - retarget_data = { - '0240DEADBEEF': { - 'serial_port' : 'valid' - } - } + retarget_data = {"0240DEADBEEF": {"serial_port": "valid"}} _open = mock_open(read_data=json.dumps(retarget_data)) - with patch('mbed_os_tools.detect.lstools_base.isfile') as _isfile,\ - patch('mbed_os_tools.detect.lstools_base.open', _open, create=True): + with patch("mbed_os_tools.detect.lstools_base.isfile") as _isfile, patch( + "mbed_os_tools.detect.lstools_base.open", _open, create=True + ): self.base = DummyLsTools() _open.assert_called() @@ -558,22 +566,22 @@ def tearDown(self): pass def test_list_mbeds_valid_platform(self): - self.base.return_value = [{'mount_point': 'dummy_mount_point', - 'target_id_usb_id': u'0240DEADBEEF', - 'serial_port': None}] - with patch('mbed_os_tools.detect.lstools_base.MbedLsToolsBase._read_htm_ids') as _read_htm,\ - patch('mbed_os_tools.detect.lstools_base.MbedLsToolsBase.mount_point_ready') as _mpr,\ - patch('mbed_os_tools.detect.lstools_base.PlatformDatabase.get') as _get,\ - patch('mbed_os_tools.detect.lstools_base.listdir') as _listdir: + self.base.return_value = [ + {"mount_point": "dummy_mount_point", "target_id_usb_id": "0240DEADBEEF", "serial_port": None} + ] + with patch("mbed_os_tools.detect.lstools_base.MbedLsToolsBase._read_htm_ids") as _read_htm, patch( + "mbed_os_tools.detect.lstools_base.MbedLsToolsBase.mount_point_ready" + ) as _mpr, patch("mbed_os_tools.detect.lstools_base.PlatformDatabase.get") as _get, patch( + "mbed_os_tools.detect.lstools_base.listdir" + ) as _listdir: _mpr.return_value = True - _read_htm.return_value = (u'0240DEADBEEF', {}) - _get.return_value = { - 'platform_name': 'foo_target' - } - _listdir.return_value = ['mbed.htm'] + _read_htm.return_value = ("0240DEADBEEF", {}) + _get.return_value = {"platform_name": "foo_target"} + _listdir.return_value = ["mbed.htm"] to_check = self.base.list_mbeds() self.assertEqual(len(to_check), 1) - self.assertEqual(to_check[0]['serial_port'], 'valid') + self.assertEqual(to_check[0]["serial_port"], "valid") + -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tools/python_tests/mbed_os_tools/detect/os_darwin.py b/tools/python_tests/mbed_os_tools/detect/os_darwin.py index c952dfdf2a2..e4abf99b549 100644 --- a/tools/python_tests/mbed_os_tools/detect/os_darwin.py +++ b/tools/python_tests/mbed_os_tools/detect/os_darwin.py @@ -21,9 +21,9 @@ from mbed_os_tools.detect.darwin import MbedLsToolsDarwin + class DarwinTestCase(unittest.TestCase): - """Tests for the Darwin port - """ + """Tests for the Darwin port""" def setUp(self): with patch("platform.mac_ver") as _pv: @@ -35,135 +35,187 @@ def tearDown(self): def test_a_k64f(self): disks = { - 'AllDisks': ['disk0', 'disk0s1', 'disk0s2', 'disk0s3', 'disk1', 'disk2'], - 'AllDisksAndPartitions': [{ 'Content': 'GUID_partition_scheme', - 'DeviceIdentifier': 'disk0', - 'Partitions': [ - { 'Content': 'EFI', - 'DeviceIdentifier': 'disk0s1', - 'DiskUUID': 'nope', - 'Size': 209715200, - 'VolumeName': 'EFI', - 'VolumeUUID': 'nu-uh'}, - { 'Content': 'Apple_CoreStorage', - 'DeviceIdentifier': 'disk0s2', - 'DiskUUID': 'nodda', - 'Size': 250006216704}, - { 'Content': 'Apple_Boot', - 'DeviceIdentifier': 'disk0s3', - 'DiskUUID': 'no soup for you!', - 'Size': 650002432, - 'VolumeName': 'Recovery HD', - 'VolumeUUID': 'Id rather not'}], - 'Size': 251000193024}, - { 'Content': 'Apple_HFS', - 'DeviceIdentifier': 'disk1', - 'MountPoint': '/', - 'Size': 249653772288, - 'VolumeName': 'Mac HD'}, - { 'Content': '', - 'DeviceIdentifier': 'disk2', - 'MountPoint': '/Volumes/DAPLINK', - 'Size': 67174400, - 'VolumeName': 'DAPLINK'}], - 'VolumesFromDisks': ['Mac HD', 'DAPLINK'], - 'WholeDisks': ['disk0', 'disk1', 'disk2'] - } - usb_tree = [{ - 'IORegistryEntryName': 'DAPLink CMSIS-DAP', - 'USB Serial Number': '0240000034544e45003a00048e3800525a91000097969900', - 'idProduct': 516, - 'idVendor': 3368, - 'IORegistryEntryChildren': [ - {'IORegistryEntryName': 'AppleUSBHostLegacyClient'}, - {'IORegistryEntryName': 'AppleUSBHostCompositeDevice'}, - {'IORegistryEntryName': 'USB_MSC', - 'idProduct': 516, - 'idVendor': 3368, - 'IORegistryEntryChildren': [ - {'IORegistryEntryName': 'IOUSBMassStorageInterfaceNub', - 'idProduct': 516, - 'idVendor': 3368, - 'IORegistryEntryChildren': [ - {'IORegistryEntryName': 'IOUSBMassStorageDriverNub', - 'idProduct': 516, - 'idVendor': 3368, - 'IORegistryEntryChildren': [ - {'IORegistryEntryName': 'IOUSBMassStorageDriver', - 'IORegistryEntryChildren': [ - {'IORegistryEntryName': 'IOSCSILogicalUnitNub', - 'IORegistryEntryChildren': [ - {'IORegistryEntryName': 'IOSCSIPeripheralDeviceType00', - 'IORegistryEntryChildren': [ - {'IORegistryEntryName': 'IOBlockStorageServices', - 'IORegistryEntryChildren': [ - {'IORegistryEntryName': 'IOBlockStorageDriver', - 'IORegistryEntryChildren': [ - {'BSD Name': 'disk2', - 'IORegistryEntryName': 'MBED VFS Media', - 'IORegistryEntryChildren': [ - {'IORegistryEntryName': 'IOMediaBSDClient'}], - }], - }], - }], - }], - }], - }], - }], - }], - }, - {'IORegistryEntryName': 'CMSIS-DAP', - 'idProduct': 516, - 'idVendor': 3368, - 'IORegistryEntryChildren': [ - {'IORegistryEntryName': 'IOUSBHostHIDDevice', - 'IORegistryEntryChildren': [ - {'IORegistryEntryName': 'IOHIDInterface'}, - {'IORegistryEntryName': 'IOHIDLibUserClient'}, - {'IORegistryEntryName': 'IOHIDLibUserClient'}], - }], - }, - {'IORegistryEntryName': 'mbed Serial Port', - 'idProduct': 516, - 'idVendor': 3368, - 'IORegistryEntryChildren': [ - { 'IORegistryEntryName': 'AppleUSBACMControl'}], + "AllDisks": ["disk0", "disk0s1", "disk0s2", "disk0s3", "disk1", "disk2"], + "AllDisksAndPartitions": [ + { + "Content": "GUID_partition_scheme", + "DeviceIdentifier": "disk0", + "Partitions": [ + { + "Content": "EFI", + "DeviceIdentifier": "disk0s1", + "DiskUUID": "nope", + "Size": 209715200, + "VolumeName": "EFI", + "VolumeUUID": "nu-uh", + }, + { + "Content": "Apple_CoreStorage", + "DeviceIdentifier": "disk0s2", + "DiskUUID": "nodda", + "Size": 250006216704, + }, + { + "Content": "Apple_Boot", + "DeviceIdentifier": "disk0s3", + "DiskUUID": "no soup for you!", + "Size": 650002432, + "VolumeName": "Recovery HD", + "VolumeUUID": "Id rather not", + }, + ], + "Size": 251000193024, + }, + { + "Content": "Apple_HFS", + "DeviceIdentifier": "disk1", + "MountPoint": "/", + "Size": 249653772288, + "VolumeName": "Mac HD", + }, + { + "Content": "", + "DeviceIdentifier": "disk2", + "MountPoint": "/Volumes/DAPLINK", + "Size": 67174400, + "VolumeName": "DAPLINK", }, - {'IORegistryEntryName': 'mbed Serial Port', - 'idProduct': 516, - 'idVendor': 3368, - 'IORegistryEntryChildren': [ - {'IORegistryEntryName': 'AppleUSBACMData', - 'idProduct': 516, - 'idVendor': 3368, - 'IORegistryEntryChildren': [ - {'IORegistryEntryName': 'IOModemSerialStreamSync', - 'IORegistryEntryChildren': [ - {'IODialinDevice': '/dev/tty.usbmodem1422', - 'IORegistryEntryName': 'IOSerialBSDClient'}], - }], - }], - }], + ], + "VolumesFromDisks": ["Mac HD", "DAPLINK"], + "WholeDisks": ["disk0", "disk1", "disk2"], + } + usb_tree = [ + { + "IORegistryEntryName": "DAPLink CMSIS-DAP", + "USB Serial Number": "0240000034544e45003a00048e3800525a91000097969900", + "idProduct": 516, + "idVendor": 3368, + "IORegistryEntryChildren": [ + {"IORegistryEntryName": "AppleUSBHostLegacyClient"}, + {"IORegistryEntryName": "AppleUSBHostCompositeDevice"}, + { + "IORegistryEntryName": "USB_MSC", + "idProduct": 516, + "idVendor": 3368, + "IORegistryEntryChildren": [ + { + "IORegistryEntryName": "IOUSBMassStorageInterfaceNub", + "idProduct": 516, + "idVendor": 3368, + "IORegistryEntryChildren": [ + { + "IORegistryEntryName": "IOUSBMassStorageDriverNub", + "idProduct": 516, + "idVendor": 3368, + "IORegistryEntryChildren": [ + { + "IORegistryEntryName": "IOUSBMassStorageDriver", + "IORegistryEntryChildren": [ + { + "IORegistryEntryName": "IOSCSILogicalUnitNub", + "IORegistryEntryChildren": [ + { + "IORegistryEntryName": "IOSCSIPeripheralDeviceType00", + "IORegistryEntryChildren": [ + { + "IORegistryEntryName": "IOBlockStorageServices", + "IORegistryEntryChildren": [ + { + "IORegistryEntryName": "IOBlockStorageDriver", + "IORegistryEntryChildren": [ + { + "BSD Name": "disk2", + "IORegistryEntryName": "MBED VFS Media", + "IORegistryEntryChildren": [ + { + "IORegistryEntryName": "IOMediaBSDClient" + } + ], + } + ], + } + ], + } + ], + } + ], + } + ], + } + ], + } + ], + } + ], + }, + { + "IORegistryEntryName": "CMSIS-DAP", + "idProduct": 516, + "idVendor": 3368, + "IORegistryEntryChildren": [ + { + "IORegistryEntryName": "IOUSBHostHIDDevice", + "IORegistryEntryChildren": [ + {"IORegistryEntryName": "IOHIDInterface"}, + {"IORegistryEntryName": "IOHIDLibUserClient"}, + {"IORegistryEntryName": "IOHIDLibUserClient"}, + ], + } + ], + }, + { + "IORegistryEntryName": "mbed Serial Port", + "idProduct": 516, + "idVendor": 3368, + "IORegistryEntryChildren": [{"IORegistryEntryName": "AppleUSBACMControl"}], + }, + { + "IORegistryEntryName": "mbed Serial Port", + "idProduct": 516, + "idVendor": 3368, + "IORegistryEntryChildren": [ + { + "IORegistryEntryName": "AppleUSBACMData", + "idProduct": 516, + "idVendor": 3368, + "IORegistryEntryChildren": [ + { + "IORegistryEntryName": "IOModemSerialStreamSync", + "IORegistryEntryChildren": [ + { + "IODialinDevice": "/dev/tty.usbmodem1422", + "IORegistryEntryName": "IOSerialBSDClient", + } + ], + } + ], + } + ], + }, + ], } ] with patch("subprocess.Popen") as _popen: + def do_popen(command, *args, **kwargs): to_ret = MagicMock() to_ret.wait.return_value = 0 to_ret.stdout = BytesIO() - plistlib.dump( - {'diskutil': disks, - 'ioreg': usb_tree}[command[0]], - to_ret.stdout) + plistlib.dump({"diskutil": disks, "ioreg": usb_tree}[command[0]], to_ret.stdout) to_ret.stdout.seek(0) to_ret.communicate.return_value = (to_ret.stdout.getvalue(), "") return to_ret + _popen.side_effect = do_popen candidates = self.darwin.find_candidates() - self.assertIn({'mount_point': '/Volumes/DAPLINK', - 'serial_port': '/dev/tty.usbmodem1422', - 'target_id_usb_id': '0240000034544e45003a00048e3800525a91000097969900', - 'vendor_id': '0d28', - 'product_id': '0204'}, - candidates) + self.assertIn( + { + "mount_point": "/Volumes/DAPLINK", + "serial_port": "/dev/tty.usbmodem1422", + "target_id_usb_id": "0240000034544e45003a00048e3800525a91000097969900", + "vendor_id": "0d28", + "product_id": "0204", + }, + candidates, + ) diff --git a/tools/python_tests/mbed_os_tools/detect/os_linux_generic.py b/tools/python_tests/mbed_os_tools/detect/os_linux_generic.py index ef1cad548b1..870e3da4a2d 100644 --- a/tools/python_tests/mbed_os_tools/detect/os_linux_generic.py +++ b/tools/python_tests/mbed_os_tools/detect/os_linux_generic.py @@ -21,8 +21,7 @@ class LinuxPortTestCase(unittest.TestCase): - ''' Basic test cases checking trivial asserts - ''' + """Basic test cases checking trivial asserts""" def setUp(self): self.linux_generic = MbedLsToolsLinuxGeneric() @@ -31,513 +30,486 @@ def tearDown(self): pass vfat_devices = [ - b'/dev/sdb on /media/usb0 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)', - b'/dev/sdd on /media/usb2 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)', - b'/dev/sde on /media/usb3 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)', - b'/dev/sdc on /media/usb1 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)' + b"/dev/sdb on /media/usb0 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)", + b"/dev/sdd on /media/usb2 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)", + b"/dev/sde on /media/usb3 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)", + b"/dev/sdc on /media/usb1 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)", ] def test_get_mount_point_basic(self): - with patch('mbed_os_tools.detect.linux.MbedLsToolsLinuxGeneric._run_cli_process') as _cliproc: - _cliproc.return_value = (b'\n'.join(self.vfat_devices), None, 0) + with patch("mbed_os_tools.detect.linux.MbedLsToolsLinuxGeneric._run_cli_process") as _cliproc: + _cliproc.return_value = (b"\n".join(self.vfat_devices), None, 0) mount_dict = dict(self.linux_generic._fat_mounts()) - _cliproc.assert_called_once_with('mount') - self.assertEqual('/media/usb0', mount_dict['/dev/sdb']) - self.assertEqual('/media/usb2', mount_dict['/dev/sdd']) - self.assertEqual('/media/usb3', mount_dict['/dev/sde']) - self.assertEqual('/media/usb1', mount_dict['/dev/sdc']) - + _cliproc.assert_called_once_with("mount") + self.assertEqual("/media/usb0", mount_dict["/dev/sdb"]) + self.assertEqual("/media/usb2", mount_dict["/dev/sdd"]) + self.assertEqual("/media/usb3", mount_dict["/dev/sde"]) + self.assertEqual("/media/usb1", mount_dict["/dev/sdc"]) vfat_devices_ext = [ - b'/dev/sdb on /media/MBED_xxx type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)', - b'/dev/sdd on /media/MBED___x type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)', - b'/dev/sde on /media/MBED-xxx type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)', - b'/dev/sdc on /media/MBED_x-x type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)', - - b'/dev/sda on /mnt/NUCLEO type vfat (rw,relatime,uid=999,fmask=0133,dmask=0022,codepage=437,iocharset=ascii,shortname=mixed,utf8,flush,errors=remount-ro,uhelper=ldm)', - b'/dev/sdf on /mnt/NUCLEO_ type vfat (rw,relatime,uid=999,fmask=0133,dmask=0022,codepage=437,iocharset=ascii,shortname=mixed,utf8,flush,errors=remount-ro,uhelper=ldm)', - b'/dev/sdg on /mnt/DAPLINK type vfat (rw,relatime,sync,uid=999,fmask=0022,dmask=0022,codepage=437,iocharset=ascii,shortname=mixed,errors=remount-ro,uhelper=ldm)', - b'/dev/sdh on /mnt/DAPLINK_ type vfat (rw,relatime,sync,uid=999,fmask=0022,dmask=0022,codepage=437,iocharset=ascii,shortname=mixed,errors=remount-ro,uhelper=ldm)', - b'/dev/sdi on /mnt/DAPLINK__ type vfat (rw,relatime,sync,uid=999,fmask=0022,dmask=0022,codepage=437,iocharset=ascii,shortname=mixed,errors=remount-ro,uhelper=ldm)', + b"/dev/sdb on /media/MBED_xxx type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)", + b"/dev/sdd on /media/MBED___x type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)", + b"/dev/sde on /media/MBED-xxx type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)", + b"/dev/sdc on /media/MBED_x-x type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)", + b"/dev/sda on /mnt/NUCLEO type vfat (rw,relatime,uid=999,fmask=0133,dmask=0022,codepage=437,iocharset=ascii,shortname=mixed,utf8,flush,errors=remount-ro,uhelper=ldm)", + b"/dev/sdf on /mnt/NUCLEO_ type vfat (rw,relatime,uid=999,fmask=0133,dmask=0022,codepage=437,iocharset=ascii,shortname=mixed,utf8,flush,errors=remount-ro,uhelper=ldm)", + b"/dev/sdg on /mnt/DAPLINK type vfat (rw,relatime,sync,uid=999,fmask=0022,dmask=0022,codepage=437,iocharset=ascii,shortname=mixed,errors=remount-ro,uhelper=ldm)", + b"/dev/sdh on /mnt/DAPLINK_ type vfat (rw,relatime,sync,uid=999,fmask=0022,dmask=0022,codepage=437,iocharset=ascii,shortname=mixed,errors=remount-ro,uhelper=ldm)", + b"/dev/sdi on /mnt/DAPLINK__ type vfat (rw,relatime,sync,uid=999,fmask=0022,dmask=0022,codepage=437,iocharset=ascii,shortname=mixed,errors=remount-ro,uhelper=ldm)", ] def test_get_mount_point_ext(self): - with patch('mbed_os_tools.detect.linux.MbedLsToolsLinuxGeneric._run_cli_process') as _cliproc: - _cliproc.return_value = (b'\n'.join(self.vfat_devices_ext), None, 0) + with patch("mbed_os_tools.detect.linux.MbedLsToolsLinuxGeneric._run_cli_process") as _cliproc: + _cliproc.return_value = (b"\n".join(self.vfat_devices_ext), None, 0) mount_dict = dict(self.linux_generic._fat_mounts()) - _cliproc.assert_called_once_with('mount') - self.assertEqual('/media/MBED_xxx', mount_dict['/dev/sdb']) - self.assertEqual('/media/MBED___x', mount_dict['/dev/sdd']) - self.assertEqual('/media/MBED-xxx', mount_dict['/dev/sde']) - self.assertEqual('/media/MBED_x-x', mount_dict['/dev/sdc']) - - self.assertEqual('/mnt/NUCLEO', mount_dict['/dev/sda']) - self.assertEqual('/mnt/NUCLEO_', mount_dict['/dev/sdf']) - self.assertEqual('/mnt/DAPLINK', mount_dict['/dev/sdg']) - self.assertEqual('/mnt/DAPLINK_', mount_dict['/dev/sdh']) - self.assertEqual('/mnt/DAPLINK__', mount_dict['/dev/sdi']) + _cliproc.assert_called_once_with("mount") + self.assertEqual("/media/MBED_xxx", mount_dict["/dev/sdb"]) + self.assertEqual("/media/MBED___x", mount_dict["/dev/sdd"]) + self.assertEqual("/media/MBED-xxx", mount_dict["/dev/sde"]) + self.assertEqual("/media/MBED_x-x", mount_dict["/dev/sdc"]) + + self.assertEqual("/mnt/NUCLEO", mount_dict["/dev/sda"]) + self.assertEqual("/mnt/NUCLEO_", mount_dict["/dev/sdf"]) + self.assertEqual("/mnt/DAPLINK", mount_dict["/dev/sdg"]) + self.assertEqual("/mnt/DAPLINK_", mount_dict["/dev/sdh"]) + self.assertEqual("/mnt/DAPLINK__", mount_dict["/dev/sdi"]) def find_candidates_with_patch(self, mount_list, link_dict, listdir_dict, open_dict): - if not getattr(sys.modules['os'], 'readlink', None): - sys.modules['os'].readlink = None + if not getattr(sys.modules["os"], "readlink", None): + sys.modules["os"].readlink = None - def do_open(path, mode='r'): - path = path.replace('\\', '/') + def do_open(path, mode="r"): + path = path.replace("\\", "/") file_object = mock_open(read_data=open_dict[path]).return_value file_object.__iter__.return_value = open_dict[path].splitlines(True) return file_object - with patch('mbed_os_tools.detect.linux.MbedLsToolsLinuxGeneric._run_cli_process') as _cliproc,\ - patch('os.readlink') as _readlink,\ - patch('os.listdir') as _listdir,\ - patch('os.path.abspath') as _abspath,\ - patch('mbed_os_tools.detect.linux.open', do_open) as _,\ - patch('os.path.isdir') as _isdir: + with patch("mbed_os_tools.detect.linux.MbedLsToolsLinuxGeneric._run_cli_process") as _cliproc, patch( + "os.readlink" + ) as _readlink, patch("os.listdir") as _listdir, patch("os.path.abspath") as _abspath, patch( + "mbed_os_tools.detect.linux.open", do_open + ) as _, patch("os.path.isdir") as _isdir: _isdir.return_value = True - _cliproc.return_value = (b'\n'.join(mount_list), None, 0) + _cliproc.return_value = (b"\n".join(mount_list), None, 0) + def do_readlink(link): # Fix for testing on Windows - link = link.replace('\\', '/') + link = link.replace("\\", "/") return link_dict[link] + _readlink.side_effect = do_readlink + def do_listdir(dir): # Fix for testing on Windows - dir = dir.replace('\\', '/') + dir = dir.replace("\\", "/") return listdir_dict[dir] + _listdir.side_effect = do_listdir + def do_abspath(dir): - _, path = os.path.splitdrive( - os.path.normpath(os.path.join(os.getcwd(), dir))) - path = path.replace('\\', '/') + _, path = os.path.splitdrive(os.path.normpath(os.path.join(os.getcwd(), dir))) + path = path.replace("\\", "/") return path + _abspath.side_effect = do_abspath ret_val = self.linux_generic.find_candidates() - _cliproc.assert_called_once_with('mount') + _cliproc.assert_called_once_with("mount") return ret_val - listdir_dict_rpi = { - '/dev/disk/by-id': [ - 'usb-MBED_VFS_0240000028634e4500135006691700105f21000097969900-0:0', - 'usb-MBED_VFS_0240000028884e450018700f6bf000338021000097969900-0:0', - 'usb-MBED_VFS_0240000028884e45001f700f6bf000118021000097969900-0:0', - 'usb-MBED_VFS_0240000028884e450036700f6bf000118021000097969900-0:0', - 'usb-MBED_VFS_0240000029164e45001b0012706e000df301000097969900-0:0', - 'usb-MBED_VFS_0240000029164e45002f0012706e0006f301000097969900-0:0', - 'usb-MBED_VFS_9900000031864e45000a100e0000003c0000000097969901-0:0' - ], - '/dev/serial/by-id': [ - 'usb-ARM_DAPLink_CMSIS-DAP_0240000028634e4500135006691700105f21000097969900-if01', - 'usb-ARM_DAPLink_CMSIS-DAP_0240000028884e450018700f6bf000338021000097969900-if01', - 'usb-ARM_DAPLink_CMSIS-DAP_0240000028884e450036700f6bf000118021000097969900-if01', - 'usb-ARM_DAPLink_CMSIS-DAP_0240000029164e45001b0012706e000df301000097969900-if01', - 'usb-ARM_BBC_micro:bit_CMSIS-DAP_9900000031864e45000a100e0000003c0000000097969901-if01' + "/dev/disk/by-id": [ + "usb-MBED_VFS_0240000028634e4500135006691700105f21000097969900-0:0", + "usb-MBED_VFS_0240000028884e450018700f6bf000338021000097969900-0:0", + "usb-MBED_VFS_0240000028884e45001f700f6bf000118021000097969900-0:0", + "usb-MBED_VFS_0240000028884e450036700f6bf000118021000097969900-0:0", + "usb-MBED_VFS_0240000029164e45001b0012706e000df301000097969900-0:0", + "usb-MBED_VFS_0240000029164e45002f0012706e0006f301000097969900-0:0", + "usb-MBED_VFS_9900000031864e45000a100e0000003c0000000097969901-0:0", ], - '/sys/class/block': [ - 'sdb', - 'sdc', - 'sdd', - 'sde', - 'sdf', - 'sdg', - 'sdh', + "/dev/serial/by-id": [ + "usb-ARM_DAPLink_CMSIS-DAP_0240000028634e4500135006691700105f21000097969900-if01", + "usb-ARM_DAPLink_CMSIS-DAP_0240000028884e450018700f6bf000338021000097969900-if01", + "usb-ARM_DAPLink_CMSIS-DAP_0240000028884e450036700f6bf000118021000097969900-if01", + "usb-ARM_DAPLink_CMSIS-DAP_0240000029164e45001b0012706e000df301000097969900-if01", + "usb-ARM_BBC_micro:bit_CMSIS-DAP_9900000031864e45000a100e0000003c0000000097969901-if01", ], - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-1/1-1.2/1-1.2.6': [ - 'idVendor', - 'idProduct' - ], - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-3': [ - 'idVendor', - 'idProduct' - ], - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-4': [ - 'idVendor', - 'idProduct' - ], - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-5': [ - 'idVendor', - 'idProduct' - ], - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-6': [ - 'idVendor', - 'idProduct' - ], - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-7': [ - 'idVendor', - 'idProduct' - ], - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-8': [ - 'idVendor', - 'idProduct' - ] + "/sys/class/block": ["sdb", "sdc", "sdd", "sde", "sdf", "sdg", "sdh"], + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-1/1-1.2/1-1.2.6": ["idVendor", "idProduct"], + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-3": ["idVendor", "idProduct"], + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-4": ["idVendor", "idProduct"], + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-5": ["idVendor", "idProduct"], + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-6": ["idVendor", "idProduct"], + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-7": ["idVendor", "idProduct"], + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-8": ["idVendor", "idProduct"], } open_dict_rpi = { - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-1/1-1.2/1-1.2.6/idVendor': '0d28\n', - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-1/1-1.2/1-1.2.6/idProduct': '0204\n', - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-3/idVendor': '0d28\n', - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-3/idProduct': '0204\n', - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-4/idVendor': '0d28\n', - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-4/idProduct': '0204\n', - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-5/idVendor': '0d28\n', - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-5/idProduct': '0204\n', - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-6/idVendor': '0d28\n', - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-6/idProduct': '0204\n', - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-7/idVendor': '0d28\n', - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-7/idProduct': '0204\n', - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-8/idVendor': '0d28\n', - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-8/idProduct': '0204\n' + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-1/1-1.2/1-1.2.6/idVendor": "0d28\n", + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-1/1-1.2/1-1.2.6/idProduct": "0204\n", + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-3/idVendor": "0d28\n", + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-3/idProduct": "0204\n", + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-4/idVendor": "0d28\n", + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-4/idProduct": "0204\n", + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-5/idVendor": "0d28\n", + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-5/idProduct": "0204\n", + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-6/idVendor": "0d28\n", + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-6/idProduct": "0204\n", + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-7/idVendor": "0d28\n", + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-7/idProduct": "0204\n", + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-8/idVendor": "0d28\n", + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-8/idProduct": "0204\n", } link_dict_rpi = { - '/dev/disk/by-id/usb-MBED_VFS_0240000028634e4500135006691700105f21000097969900-0:0': '../../sdb', - '/dev/disk/by-id/usb-MBED_VFS_0240000028884e450018700f6bf000338021000097969900-0:0': '../../sdc', - '/dev/disk/by-id/usb-MBED_VFS_0240000028884e45001f700f6bf000118021000097969900-0:0': '../../sdd', - '/dev/disk/by-id/usb-MBED_VFS_0240000028884e450036700f6bf000118021000097969900-0:0': '../../sde', - '/dev/disk/by-id/usb-MBED_VFS_0240000029164e45001b0012706e000df301000097969900-0:0': '../../sdf', - '/dev/disk/by-id/usb-MBED_VFS_0240000029164e45002f0012706e0006f301000097969900-0:0': '../../sdg', - '/dev/disk/by-id/usb-MBED_VFS_9900000031864e45000a100e0000003c0000000097969901-0:0': '../../sdh', - '/dev/serial/by-id/usb-ARM_DAPLink_CMSIS-DAP_0240000028634e4500135006691700105f21000097969900-if01': '../../ttyACM0', - '/dev/serial/by-id/usb-ARM_DAPLink_CMSIS-DAP_0240000028884e450018700f6bf000338021000097969900-if01': '../../ttyACM1', - '/dev/serial/by-id/usb-ARM_DAPLink_CMSIS-DAP_0240000028884e450036700f6bf000118021000097969900-if01': '../../ttyACM3', - '/dev/serial/by-id/usb-ARM_DAPLink_CMSIS-DAP_0240000029164e45001b0012706e000df301000097969900-if01': '../../ttyACM2', - '/dev/serial/by-id/usb-ARM_BBC_micro:bit_CMSIS-DAP_9900000031864e45000a100e0000003c0000000097969901-if01': '../../ttyACM4', - '/sys/class/block/sdb': '../../devices/pci0000:00/0000:00:06.0/usb1/1-1/1-1.2/1-1.2.6/1-1.2.6:1.0/host8568/target8568:0:0/8568:0:0:0/block/sdb', - '/sys/class/block/sdc': '../../devices/pci0000:00/0000:00:06.0/usb1/1-3/1-3:1.0/host4/target4:0:0/4:0:0:0/block/sdc', - '/sys/class/block/sdd': '../../devices/pci0000:00/0000:00:06.0/usb1/1-4/1-4:1.0/host5/target5:0:0/5:0:0:0/block/sdd', - '/sys/class/block/sde': '../../devices/pci0000:00/0000:00:06.0/usb1/1-5/1-5:1.0/host6/target6:0:0/6:0:0:0/block/sde', - '/sys/class/block/sdf': '../../devices/pci0000:00/0000:00:06.0/usb1/1-6/1-6:1.0/host7/target7:0:0/7:0:0:0/block/sdf', - '/sys/class/block/sdg': '../../devices/pci0000:00/0000:00:06.0/usb1/1-7/1-7:1.0/host8/target8:0:0/8:0:0:0/block/sdg', - '/sys/class/block/sdh': '../../devices/pci0000:00/0000:00:06.0/usb1/1-8/1-7:1.0/host9/target9:0:0/9:0:0:0/block/sdh' + "/dev/disk/by-id/usb-MBED_VFS_0240000028634e4500135006691700105f21000097969900-0:0": "../../sdb", + "/dev/disk/by-id/usb-MBED_VFS_0240000028884e450018700f6bf000338021000097969900-0:0": "../../sdc", + "/dev/disk/by-id/usb-MBED_VFS_0240000028884e45001f700f6bf000118021000097969900-0:0": "../../sdd", + "/dev/disk/by-id/usb-MBED_VFS_0240000028884e450036700f6bf000118021000097969900-0:0": "../../sde", + "/dev/disk/by-id/usb-MBED_VFS_0240000029164e45001b0012706e000df301000097969900-0:0": "../../sdf", + "/dev/disk/by-id/usb-MBED_VFS_0240000029164e45002f0012706e0006f301000097969900-0:0": "../../sdg", + "/dev/disk/by-id/usb-MBED_VFS_9900000031864e45000a100e0000003c0000000097969901-0:0": "../../sdh", + "/dev/serial/by-id/usb-ARM_DAPLink_CMSIS-DAP_0240000028634e4500135006691700105f21000097969900-if01": "../../ttyACM0", + "/dev/serial/by-id/usb-ARM_DAPLink_CMSIS-DAP_0240000028884e450018700f6bf000338021000097969900-if01": "../../ttyACM1", + "/dev/serial/by-id/usb-ARM_DAPLink_CMSIS-DAP_0240000028884e450036700f6bf000118021000097969900-if01": "../../ttyACM3", + "/dev/serial/by-id/usb-ARM_DAPLink_CMSIS-DAP_0240000029164e45001b0012706e000df301000097969900-if01": "../../ttyACM2", + "/dev/serial/by-id/usb-ARM_BBC_micro:bit_CMSIS-DAP_9900000031864e45000a100e0000003c0000000097969901-if01": "../../ttyACM4", + "/sys/class/block/sdb": "../../devices/pci0000:00/0000:00:06.0/usb1/1-1/1-1.2/1-1.2.6/1-1.2.6:1.0/host8568/target8568:0:0/8568:0:0:0/block/sdb", + "/sys/class/block/sdc": "../../devices/pci0000:00/0000:00:06.0/usb1/1-3/1-3:1.0/host4/target4:0:0/4:0:0:0/block/sdc", + "/sys/class/block/sdd": "../../devices/pci0000:00/0000:00:06.0/usb1/1-4/1-4:1.0/host5/target5:0:0/5:0:0:0/block/sdd", + "/sys/class/block/sde": "../../devices/pci0000:00/0000:00:06.0/usb1/1-5/1-5:1.0/host6/target6:0:0/6:0:0:0/block/sde", + "/sys/class/block/sdf": "../../devices/pci0000:00/0000:00:06.0/usb1/1-6/1-6:1.0/host7/target7:0:0/7:0:0:0/block/sdf", + "/sys/class/block/sdg": "../../devices/pci0000:00/0000:00:06.0/usb1/1-7/1-7:1.0/host8/target8:0:0/8:0:0:0/block/sdg", + "/sys/class/block/sdh": "../../devices/pci0000:00/0000:00:06.0/usb1/1-8/1-7:1.0/host9/target9:0:0/9:0:0:0/block/sdh", } mount_list_rpi = [ - b'/dev/sdb on /media/usb0 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)', - b'/dev/sdc on /media/usb1 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)', - b'/dev/sdd on /media/usb2 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)', - b'/dev/sde on /media/usb3 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)', - b'/dev/sdf on /media/usb4 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)', - b'/dev/sdg on /media/usb5 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)', - b'/dev/sdh on /media/usb6 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)' + b"/dev/sdb on /media/usb0 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)", + b"/dev/sdc on /media/usb1 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)", + b"/dev/sdd on /media/usb2 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)", + b"/dev/sde on /media/usb3 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)", + b"/dev/sdf on /media/usb4 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)", + b"/dev/sdg on /media/usb5 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)", + b"/dev/sdh on /media/usb6 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)", ] + def test_get_detected_rpi(self): mbed_det = self.find_candidates_with_patch( - self.mount_list_rpi, self.link_dict_rpi, self.listdir_dict_rpi, self.open_dict_rpi) - - self.assertIn({ - 'mount_point': '/media/usb0', - 'serial_port': '/dev/ttyACM0', - 'target_id_usb_id': '0240000028634e4500135006691700105f21000097969900', - 'vendor_id': '0d28', - 'product_id': '0204' - }, mbed_det) - self.assertIn({ - 'mount_point': '/media/usb1', - 'serial_port': '/dev/ttyACM1', - 'target_id_usb_id': '0240000028884e450018700f6bf000338021000097969900', - 'vendor_id': '0d28', - 'product_id': '0204' - }, mbed_det) - self.assertIn({ - 'mount_point': '/media/usb4', - 'serial_port': '/dev/ttyACM2', - 'target_id_usb_id': '0240000029164e45001b0012706e000df301000097969900', - 'vendor_id': '0d28', - 'product_id': '0204' - }, mbed_det) - self.assertIn({ - 'mount_point': '/media/usb3', - 'serial_port': '/dev/ttyACM3', - 'target_id_usb_id': '0240000028884e450036700f6bf000118021000097969900', - 'vendor_id': '0d28', - 'product_id': '0204' - }, mbed_det) - self.assertIn({ - 'mount_point': '/media/usb6', - 'serial_port': '/dev/ttyACM4', - 'target_id_usb_id': '9900000031864e45000a100e0000003c0000000097969901', - 'vendor_id': '0d28', - 'product_id': '0204' - }, mbed_det) - + self.mount_list_rpi, self.link_dict_rpi, self.listdir_dict_rpi, self.open_dict_rpi + ) + + self.assertIn( + { + "mount_point": "/media/usb0", + "serial_port": "/dev/ttyACM0", + "target_id_usb_id": "0240000028634e4500135006691700105f21000097969900", + "vendor_id": "0d28", + "product_id": "0204", + }, + mbed_det, + ) + self.assertIn( + { + "mount_point": "/media/usb1", + "serial_port": "/dev/ttyACM1", + "target_id_usb_id": "0240000028884e450018700f6bf000338021000097969900", + "vendor_id": "0d28", + "product_id": "0204", + }, + mbed_det, + ) + self.assertIn( + { + "mount_point": "/media/usb4", + "serial_port": "/dev/ttyACM2", + "target_id_usb_id": "0240000029164e45001b0012706e000df301000097969900", + "vendor_id": "0d28", + "product_id": "0204", + }, + mbed_det, + ) + self.assertIn( + { + "mount_point": "/media/usb3", + "serial_port": "/dev/ttyACM3", + "target_id_usb_id": "0240000028884e450036700f6bf000118021000097969900", + "vendor_id": "0d28", + "product_id": "0204", + }, + mbed_det, + ) + self.assertIn( + { + "mount_point": "/media/usb6", + "serial_port": "/dev/ttyACM4", + "target_id_usb_id": "9900000031864e45000a100e0000003c0000000097969901", + "vendor_id": "0d28", + "product_id": "0204", + }, + mbed_det, + ) listdir_dict_1 = { - '/dev/disk/by-id': [ - '/dev/disk/by-id/ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM', - '/dev/disk/by-id/ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM-part1', - '/dev/disk/by-id/ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM-part2', - '/dev/disk/by-id/ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM-part5', - '/dev/disk/by-id/ata-TSSTcorpDVD-ROM_TS-H352C', - '/dev/disk/by-id/usb-MBED_FDi_sk_A000000001-0:0', - '/dev/disk/by-id/usb-MBED_microcontroller_0240020152986E5EAF6693E6-0:0', - '/dev/disk/by-id/wwn-0x5000cca30ccffb77', - '/dev/disk/by-id/wwn-0x5000cca30ccffb77-part1', - '/dev/disk/by-id/wwn-0x5000cca30ccffb77-part2', - '/dev/disk/by-id/wwn-0x5000cca30ccffb77-part5', - ], - '/dev/serial/by-id': [ - '/dev/serial/by-id/usb-MBED_MBED_CMSIS-DAP_0240020152986E5EAF6693E6-if01', - '/dev/serial/by-id/usb-MBED_MBED_CMSIS-DAP_A000000001-if01', + "/dev/disk/by-id": [ + "/dev/disk/by-id/ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM", + "/dev/disk/by-id/ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM-part1", + "/dev/disk/by-id/ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM-part2", + "/dev/disk/by-id/ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM-part5", + "/dev/disk/by-id/ata-TSSTcorpDVD-ROM_TS-H352C", + "/dev/disk/by-id/usb-MBED_FDi_sk_A000000001-0:0", + "/dev/disk/by-id/usb-MBED_microcontroller_0240020152986E5EAF6693E6-0:0", + "/dev/disk/by-id/wwn-0x5000cca30ccffb77", + "/dev/disk/by-id/wwn-0x5000cca30ccffb77-part1", + "/dev/disk/by-id/wwn-0x5000cca30ccffb77-part2", + "/dev/disk/by-id/wwn-0x5000cca30ccffb77-part5", ], - '/sys/class/block': [ - 'sdb', - 'sdc' + "/dev/serial/by-id": [ + "/dev/serial/by-id/usb-MBED_MBED_CMSIS-DAP_0240020152986E5EAF6693E6-if01", + "/dev/serial/by-id/usb-MBED_MBED_CMSIS-DAP_A000000001-if01", ], - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-2': [ - 'idVendor', - 'idProduct' - ], - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-3': [ - 'idVendor', - 'idProduct' - ] + "/sys/class/block": ["sdb", "sdc"], + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-2": ["idVendor", "idProduct"], + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-3": ["idVendor", "idProduct"], } link_dict_1 = { - '/dev/disk/by-id/ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM': '../../sda', - '/dev/disk/by-id/ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM-part1': '../../sda1', - '/dev/disk/by-id/ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM-part2': '../../sda2', - '/dev/disk/by-id/ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM-part5': '../../sda5', - '/dev/disk/by-id/ata-TSSTcorpDVD-ROM_TS-H352C': '../../sr0', - '/dev/disk/by-id/usb-MBED_FDi_sk_A000000001-0:0': '../../sdc', - '/dev/disk/by-id/usb-MBED_microcontroller_0240020152986E5EAF6693E6-0:0': '../../sdb', - '/dev/disk/by-id/wwn-0x5000cca30ccffb77': '../../sda', - '/dev/disk/by-id/wwn-0x5000cca30ccffb77-part1': '../../sda1', - '/dev/disk/by-id/wwn-0x5000cca30ccffb77-part2': '../../sda2', - '/dev/disk/by-id/wwn-0x5000cca30ccffb77-part5': '../../sda5', - '/dev/serial/by-id/usb-MBED_MBED_CMSIS-DAP_0240020152986E5EAF6693E6-if01': '../../ttyACM1', - '/dev/serial/by-id/usb-MBED_MBED_CMSIS-DAP_A000000001-if01': '../../ttyACM0', - '/sys/class/block/sdb': '../../devices/pci0000:00/0000:00:06.0/usb1/1-2/1-2:1.0/host3/target3:0:0/3:0:0:0/block/sdb', - '/sys/class/block/sdc': '../../devices/pci0000:00/0000:00:06.0/usb1/1-3/1-3:1.0/host4/target4:0:0/4:0:0:0/block/sdc' + "/dev/disk/by-id/ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM": "../../sda", + "/dev/disk/by-id/ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM-part1": "../../sda1", + "/dev/disk/by-id/ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM-part2": "../../sda2", + "/dev/disk/by-id/ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM-part5": "../../sda5", + "/dev/disk/by-id/ata-TSSTcorpDVD-ROM_TS-H352C": "../../sr0", + "/dev/disk/by-id/usb-MBED_FDi_sk_A000000001-0:0": "../../sdc", + "/dev/disk/by-id/usb-MBED_microcontroller_0240020152986E5EAF6693E6-0:0": "../../sdb", + "/dev/disk/by-id/wwn-0x5000cca30ccffb77": "../../sda", + "/dev/disk/by-id/wwn-0x5000cca30ccffb77-part1": "../../sda1", + "/dev/disk/by-id/wwn-0x5000cca30ccffb77-part2": "../../sda2", + "/dev/disk/by-id/wwn-0x5000cca30ccffb77-part5": "../../sda5", + "/dev/serial/by-id/usb-MBED_MBED_CMSIS-DAP_0240020152986E5EAF6693E6-if01": "../../ttyACM1", + "/dev/serial/by-id/usb-MBED_MBED_CMSIS-DAP_A000000001-if01": "../../ttyACM0", + "/sys/class/block/sdb": "../../devices/pci0000:00/0000:00:06.0/usb1/1-2/1-2:1.0/host3/target3:0:0/3:0:0:0/block/sdb", + "/sys/class/block/sdc": "../../devices/pci0000:00/0000:00:06.0/usb1/1-3/1-3:1.0/host4/target4:0:0/4:0:0:0/block/sdc", } open_dict_1 = { - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-2/idVendor': '0d28\n', - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-2/idProduct': '0204\n', - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-3/idVendor': '0d28\n', - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-3/idProduct': '0204\n' + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-2/idVendor": "0d28\n", + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-2/idProduct": "0204\n", + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-3/idVendor": "0d28\n", + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-3/idProduct": "0204\n", } mount_list_1 = [ - b'/dev/sdb on /media/usb0 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)', - b'/dev/sdc on /media/usb1 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)' + b"/dev/sdb on /media/usb0 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)", + b"/dev/sdc on /media/usb1 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)", ] + def test_get_detected_1_k64f(self): mbed_det = self.find_candidates_with_patch( - self.mount_list_1, self.link_dict_1, self.listdir_dict_1, self.open_dict_1) - self.assertIn({ - 'mount_point': '/media/usb0', - 'serial_port': '/dev/ttyACM1', - 'target_id_usb_id': '0240020152986E5EAF6693E6', - 'vendor_id': '0d28', - 'product_id': '0204' - }, mbed_det) - - self.assertIn({ - 'mount_point': '/media/usb1', - 'serial_port': '/dev/ttyACM0', - 'target_id_usb_id': 'A000000001', - 'vendor_id': '0d28', - 'product_id': '0204' - }, mbed_det) - + self.mount_list_1, self.link_dict_1, self.listdir_dict_1, self.open_dict_1 + ) + self.assertIn( + { + "mount_point": "/media/usb0", + "serial_port": "/dev/ttyACM1", + "target_id_usb_id": "0240020152986E5EAF6693E6", + "vendor_id": "0d28", + "product_id": "0204", + }, + mbed_det, + ) + + self.assertIn( + { + "mount_point": "/media/usb1", + "serial_port": "/dev/ttyACM0", + "target_id_usb_id": "A000000001", + "vendor_id": "0d28", + "product_id": "0204", + }, + mbed_det, + ) listdir_dict_2 = { - '/dev/disk/by-id': [ - 'ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM', - 'ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM-part1', - 'ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM-part2', - 'ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM-part5', - 'ata-TSSTcorpDVD-ROM_TS-H352C', - 'usb-MBED_FDi_sk_A000000001-0:0', - 'usb-MBED_microcontroller_02400201489A1E6CB564E3D4-0:0', - 'usb-MBED_microcontroller_0240020152986E5EAF6693E6-0:0', - 'usb-MBED_microcontroller_0240020152A06E54AF5E93EC-0:0', - 'usb-MBED_microcontroller_0672FF485649785087171742-0:0', - 'wwn-0x5000cca30ccffb77', - 'wwn-0x5000cca30ccffb77-part1', - 'wwn-0x5000cca30ccffb77-part2', - 'wwn-0x5000cca30ccffb77-part5' - ], - '/dev/serial/by-id': [ - 'usb-MBED_MBED_CMSIS-DAP_02400201489A1E6CB564E3D4-if01', - 'usb-MBED_MBED_CMSIS-DAP_0240020152986E5EAF6693E6-if01', - 'usb-MBED_MBED_CMSIS-DAP_0240020152A06E54AF5E93EC-if01', - 'usb-MBED_MBED_CMSIS-DAP_A000000001-if01', - 'usb-STMicroelectronics_STM32_STLink_0672FF485649785087171742-if02' - ], - '/sys/class/block': [ - 'sdb', - 'sdc', - 'sdd', - 'sde', - 'sdf' + "/dev/disk/by-id": [ + "ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM", + "ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM-part1", + "ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM-part2", + "ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM-part5", + "ata-TSSTcorpDVD-ROM_TS-H352C", + "usb-MBED_FDi_sk_A000000001-0:0", + "usb-MBED_microcontroller_02400201489A1E6CB564E3D4-0:0", + "usb-MBED_microcontroller_0240020152986E5EAF6693E6-0:0", + "usb-MBED_microcontroller_0240020152A06E54AF5E93EC-0:0", + "usb-MBED_microcontroller_0672FF485649785087171742-0:0", + "wwn-0x5000cca30ccffb77", + "wwn-0x5000cca30ccffb77-part1", + "wwn-0x5000cca30ccffb77-part2", + "wwn-0x5000cca30ccffb77-part5", ], - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-2': [ - 'idVendor', - 'idProduct' + "/dev/serial/by-id": [ + "usb-MBED_MBED_CMSIS-DAP_02400201489A1E6CB564E3D4-if01", + "usb-MBED_MBED_CMSIS-DAP_0240020152986E5EAF6693E6-if01", + "usb-MBED_MBED_CMSIS-DAP_0240020152A06E54AF5E93EC-if01", + "usb-MBED_MBED_CMSIS-DAP_A000000001-if01", + "usb-STMicroelectronics_STM32_STLink_0672FF485649785087171742-if02", ], - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-3': [ - 'idVendor', - 'idProduct' - ], - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-4': [ - 'idVendor', - 'idProduct' - ], - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-5': [ - 'idVendor', - 'idProduct' - ], - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-6': [ - 'idVendor', - 'idProduct' - ] + "/sys/class/block": ["sdb", "sdc", "sdd", "sde", "sdf"], + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-2": ["idVendor", "idProduct"], + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-3": ["idVendor", "idProduct"], + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-4": ["idVendor", "idProduct"], + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-5": ["idVendor", "idProduct"], + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-6": ["idVendor", "idProduct"], } open_dict_2 = { - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-2/idVendor': '0d28\n', - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-2/idProduct': '0204\n', - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-3/idVendor': '0d28\n', - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-3/idProduct': '0204\n', - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-4/idVendor': '0d28\n', - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-4/idProduct': '0204\n', - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-5/idVendor': '0d28\n', - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-5/idProduct': '0204\n', - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-6/idVendor': '0d28\n', - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-6/idProduct': '0204\n' + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-2/idVendor": "0d28\n", + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-2/idProduct": "0204\n", + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-3/idVendor": "0d28\n", + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-3/idProduct": "0204\n", + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-4/idVendor": "0d28\n", + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-4/idProduct": "0204\n", + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-5/idVendor": "0d28\n", + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-5/idProduct": "0204\n", + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-6/idVendor": "0d28\n", + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-6/idProduct": "0204\n", } link_dict_2 = { - '/dev/disk/by-id/ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM': '../../sda', - '/dev/disk/by-id/ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM-part1': '../../sda1', - '/dev/disk/by-id/ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM-part2': '../../sda2', - '/dev/disk/by-id/ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM-part5': '../../sda5', - '/dev/disk/by-id/ata-TSSTcorpDVD-ROM_TS-H352C': '../../sr0', - '/dev/disk/by-id/usb-MBED_FDi_sk_A000000001-0:0': '../../sdc', - '/dev/disk/by-id/usb-MBED_microcontroller_02400201489A1E6CB564E3D4-0:0': '../../sde', - '/dev/disk/by-id/usb-MBED_microcontroller_0240020152986E5EAF6693E6-0:0': '../../sdb', - '/dev/disk/by-id/usb-MBED_microcontroller_0240020152A06E54AF5E93EC-0:0': '../../sdf', - '/dev/disk/by-id/usb-MBED_microcontroller_0672FF485649785087171742-0:0': '../../sdd', - '/dev/disk/by-id/wwn-0x5000cca30ccffb77': '../../sda', - '/dev/disk/by-id/wwn-0x5000cca30ccffb77-part1': '../../sda1', - '/dev/disk/by-id/wwn-0x5000cca30ccffb77-part2': '../../sda2', - '/dev/disk/by-id/wwn-0x5000cca30ccffb77-part5': '../../sda5', - '/dev/serial/by-id/usb-MBED_MBED_CMSIS-DAP_02400201489A1E6CB564E3D4-if01': '../../ttyACM3', - '/dev/serial/by-id/usb-MBED_MBED_CMSIS-DAP_0240020152986E5EAF6693E6-if01': '../../ttyACM1', - '/dev/serial/by-id/usb-MBED_MBED_CMSIS-DAP_0240020152A06E54AF5E93EC-if01': '../../ttyACM4', - '/dev/serial/by-id/usb-MBED_MBED_CMSIS-DAP_A000000001-if01': '../../ttyACM0', - '/dev/serial/by-id/usb-STMicroelectronics_STM32_STLink_0672FF485649785087171742-if02': '../../ttyACM2', - '/sys/class/block/sdb': '../../devices/pci0000:00/0000:00:06.0/usb1/1-2/1-2:1.0/host3/target3:0:0/3:0:0:0/block/sdb', - '/sys/class/block/sdc': '../../devices/pci0000:00/0000:00:06.0/usb1/1-3/1-3:1.0/host4/target4:0:0/4:0:0:0/block/sdc', - '/sys/class/block/sdd': '../../devices/pci0000:00/0000:00:06.0/usb1/1-4/1-4:1.0/host5/target5:0:0/5:0:0:0/block/sdd', - '/sys/class/block/sde': '../../devices/pci0000:00/0000:00:06.0/usb1/1-5/1-5:1.0/host6/target6:0:0/6:0:0:0/block/sde', - '/sys/class/block/sdf': '../../devices/pci0000:00/0000:00:06.0/usb1/1-6/1-6:1.0/host7/target7:0:0/7:0:0:0/block/sdf' + "/dev/disk/by-id/ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM": "../../sda", + "/dev/disk/by-id/ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM-part1": "../../sda1", + "/dev/disk/by-id/ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM-part2": "../../sda2", + "/dev/disk/by-id/ata-HDS728080PLA380_40Y9028LEN_PFDB32S7S44XLM-part5": "../../sda5", + "/dev/disk/by-id/ata-TSSTcorpDVD-ROM_TS-H352C": "../../sr0", + "/dev/disk/by-id/usb-MBED_FDi_sk_A000000001-0:0": "../../sdc", + "/dev/disk/by-id/usb-MBED_microcontroller_02400201489A1E6CB564E3D4-0:0": "../../sde", + "/dev/disk/by-id/usb-MBED_microcontroller_0240020152986E5EAF6693E6-0:0": "../../sdb", + "/dev/disk/by-id/usb-MBED_microcontroller_0240020152A06E54AF5E93EC-0:0": "../../sdf", + "/dev/disk/by-id/usb-MBED_microcontroller_0672FF485649785087171742-0:0": "../../sdd", + "/dev/disk/by-id/wwn-0x5000cca30ccffb77": "../../sda", + "/dev/disk/by-id/wwn-0x5000cca30ccffb77-part1": "../../sda1", + "/dev/disk/by-id/wwn-0x5000cca30ccffb77-part2": "../../sda2", + "/dev/disk/by-id/wwn-0x5000cca30ccffb77-part5": "../../sda5", + "/dev/serial/by-id/usb-MBED_MBED_CMSIS-DAP_02400201489A1E6CB564E3D4-if01": "../../ttyACM3", + "/dev/serial/by-id/usb-MBED_MBED_CMSIS-DAP_0240020152986E5EAF6693E6-if01": "../../ttyACM1", + "/dev/serial/by-id/usb-MBED_MBED_CMSIS-DAP_0240020152A06E54AF5E93EC-if01": "../../ttyACM4", + "/dev/serial/by-id/usb-MBED_MBED_CMSIS-DAP_A000000001-if01": "../../ttyACM0", + "/dev/serial/by-id/usb-STMicroelectronics_STM32_STLink_0672FF485649785087171742-if02": "../../ttyACM2", + "/sys/class/block/sdb": "../../devices/pci0000:00/0000:00:06.0/usb1/1-2/1-2:1.0/host3/target3:0:0/3:0:0:0/block/sdb", + "/sys/class/block/sdc": "../../devices/pci0000:00/0000:00:06.0/usb1/1-3/1-3:1.0/host4/target4:0:0/4:0:0:0/block/sdc", + "/sys/class/block/sdd": "../../devices/pci0000:00/0000:00:06.0/usb1/1-4/1-4:1.0/host5/target5:0:0/5:0:0:0/block/sdd", + "/sys/class/block/sde": "../../devices/pci0000:00/0000:00:06.0/usb1/1-5/1-5:1.0/host6/target6:0:0/6:0:0:0/block/sde", + "/sys/class/block/sdf": "../../devices/pci0000:00/0000:00:06.0/usb1/1-6/1-6:1.0/host7/target7:0:0/7:0:0:0/block/sdf", } mount_list_2 = [ - b'/dev/sdb on /media/usb0 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)', - b'/dev/sdc on /media/usb1 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)', - b'/dev/sdd on /media/usb2 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)', - b'/dev/sde on /media/usb3 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)', - b'/dev/sdf on /media/usb4 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)' + b"/dev/sdb on /media/usb0 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)", + b"/dev/sdc on /media/usb1 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)", + b"/dev/sdd on /media/usb2 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)", + b"/dev/sde on /media/usb3 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)", + b"/dev/sdf on /media/usb4 type vfat (rw,noexec,nodev,sync,noatime,nodiratime,gid=1000,uid=1000,dmask=000,fmask=000)", ] + def test_get_detected_2_k64f(self): mbed_det = self.find_candidates_with_patch( - self.mount_list_2, self.link_dict_2, self.listdir_dict_2, self.open_dict_2) - - self.assertIn({ - 'mount_point': '/media/usb1', - 'serial_port': '/dev/ttyACM0', - 'target_id_usb_id': 'A000000001', - 'vendor_id': '0d28', - 'product_id': '0204' - }, - mbed_det) - self.assertIn({ - 'mount_point': '/media/usb2', - 'serial_port': '/dev/ttyACM2', - 'target_id_usb_id': '0672FF485649785087171742', - 'vendor_id': '0d28', - 'product_id': '0204' - }, - mbed_det) - - self.assertIn({ - 'mount_point': '/media/usb4', - 'serial_port': '/dev/ttyACM4', - 'target_id_usb_id': '0240020152A06E54AF5E93EC', - 'vendor_id': '0d28', - 'product_id': '0204' - }, - mbed_det) - - self.assertIn({ - 'mount_point': '/media/usb3', - 'serial_port': '/dev/ttyACM3', - 'target_id_usb_id': '02400201489A1E6CB564E3D4', - 'vendor_id': '0d28', - 'product_id': '0204' - }, - mbed_det) - - self.assertIn({ - 'mount_point': '/media/usb0', - 'serial_port': '/dev/ttyACM1', - 'target_id_usb_id': '0240020152986E5EAF6693E6', - 'vendor_id': '0d28', - 'product_id': '0204' - }, - mbed_det) - + self.mount_list_2, self.link_dict_2, self.listdir_dict_2, self.open_dict_2 + ) + + self.assertIn( + { + "mount_point": "/media/usb1", + "serial_port": "/dev/ttyACM0", + "target_id_usb_id": "A000000001", + "vendor_id": "0d28", + "product_id": "0204", + }, + mbed_det, + ) + self.assertIn( + { + "mount_point": "/media/usb2", + "serial_port": "/dev/ttyACM2", + "target_id_usb_id": "0672FF485649785087171742", + "vendor_id": "0d28", + "product_id": "0204", + }, + mbed_det, + ) + + self.assertIn( + { + "mount_point": "/media/usb4", + "serial_port": "/dev/ttyACM4", + "target_id_usb_id": "0240020152A06E54AF5E93EC", + "vendor_id": "0d28", + "product_id": "0204", + }, + mbed_det, + ) + + self.assertIn( + { + "mount_point": "/media/usb3", + "serial_port": "/dev/ttyACM3", + "target_id_usb_id": "02400201489A1E6CB564E3D4", + "vendor_id": "0d28", + "product_id": "0204", + }, + mbed_det, + ) + + self.assertIn( + { + "mount_point": "/media/usb0", + "serial_port": "/dev/ttyACM1", + "target_id_usb_id": "0240020152986E5EAF6693E6", + "vendor_id": "0d28", + "product_id": "0204", + }, + mbed_det, + ) listdir_dict_4 = { - '/dev/disk/by-id': [ - 'ata-VMware_Virtual_SATA_CDRW_Drive_00000000000000000001', - 'ata-VMware_Virtual_SATA_CDRW_Drive_01000000000000000001', - 'usb-MBED_VFS_0240000033514e45001f500585d40014e981000097969900-0:0' - ], - '/dev/serial/by-id': [ - 'pci-ARM_DAPLink_CMSIS-DAP_0240000033514e45001f500585d40014e981000097969900-if01' - ], - '/sys/class/block': [ - 'sdb' - ], - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-2': [ - 'idVendor', - 'idProduct' + "/dev/disk/by-id": [ + "ata-VMware_Virtual_SATA_CDRW_Drive_00000000000000000001", + "ata-VMware_Virtual_SATA_CDRW_Drive_01000000000000000001", + "usb-MBED_VFS_0240000033514e45001f500585d40014e981000097969900-0:0", ], + "/dev/serial/by-id": ["pci-ARM_DAPLink_CMSIS-DAP_0240000033514e45001f500585d40014e981000097969900-if01"], + "/sys/class/block": ["sdb"], + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-2": ["idVendor", "idProduct"], } open_dict_4 = { - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-2/idVendor': '0d28\n', - '/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-2/idProduct': '0204\n' + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-2/idVendor": "0d28\n", + "/sys/class/block/../../devices/pci0000:00/0000:00:06.0/usb1/1-2/idProduct": "0204\n", } link_dict_4 = { - '/dev/disk/by-id/ata-VMware_Virtual_SATA_CDRW_Drive_00000000000000000001': '../../sr0', - '/dev/disk/by-id/ata-VMware_Virtual_SATA_CDRW_Drive_01000000000000000001': '../../sr1', - '/dev/disk/by-id/usb-MBED_VFS_0240000033514e45001f500585d40014e981000097969900-0:0': '../../sdb', - '/dev/serial/by-id/pci-ARM_DAPLink_CMSIS-DAP_0240000033514e45001f500585d40014e981000097969900-if01': '../../ttyACM0', - '/sys/class/block/sdb': '../../devices/pci0000:00/0000:00:06.0/usb1/1-2/1-2:1.0/host3/target3:0:0/3:0:0:0/block/sdb' + "/dev/disk/by-id/ata-VMware_Virtual_SATA_CDRW_Drive_00000000000000000001": "../../sr0", + "/dev/disk/by-id/ata-VMware_Virtual_SATA_CDRW_Drive_01000000000000000001": "../../sr1", + "/dev/disk/by-id/usb-MBED_VFS_0240000033514e45001f500585d40014e981000097969900-0:0": "../../sdb", + "/dev/serial/by-id/pci-ARM_DAPLink_CMSIS-DAP_0240000033514e45001f500585d40014e981000097969900-if01": "../../ttyACM0", + "/sys/class/block/sdb": "../../devices/pci0000:00/0000:00:06.0/usb1/1-2/1-2:1.0/host3/target3:0:0/3:0:0:0/block/sdb", } mount_list_4 = [ - b'/dev/sdb on /media/przemek/DAPLINK type vfat (rw,nosuid,nodev,relatime,uid=1000,gid=1000,fmask=0022,dmask=0022,codepage=437,iocharset=iso8859-1,shortname=mixed,showexec,utf8,flush,errors=remount-ro,uhelper=udisks2)' + b"/dev/sdb on /media/przemek/DAPLINK type vfat (rw,nosuid,nodev,relatime,uid=1000,gid=1000,fmask=0022,dmask=0022,codepage=437,iocharset=iso8859-1,shortname=mixed,showexec,utf8,flush,errors=remount-ro,uhelper=udisks2)" ] + def test_get_detected_3_k64f(self): mbed_det = self.find_candidates_with_patch( - self.mount_list_4, self.link_dict_4, self.listdir_dict_4, self.open_dict_4) - - self.assertIn({ - 'mount_point': '/media/przemek/DAPLINK', - 'serial_port': '/dev/ttyACM0', - 'target_id_usb_id': '0240000033514e45001f500585d40014e981000097969900', - 'vendor_id': '0d28', - 'product_id': '0204' - }, - mbed_det) - - -if __name__ == '__main__': + self.mount_list_4, self.link_dict_4, self.listdir_dict_4, self.open_dict_4 + ) + + self.assertIn( + { + "mount_point": "/media/przemek/DAPLINK", + "serial_port": "/dev/ttyACM0", + "target_id_usb_id": "0240000033514e45001f500585d40014e981000097969900", + "vendor_id": "0d28", + "product_id": "0204", + }, + mbed_det, + ) + + +if __name__ == "__main__": unittest.main() diff --git a/tools/python_tests/mbed_os_tools/detect/os_win7.py b/tools/python_tests/mbed_os_tools/detect/os_win7.py index 48de3d34c0d..00721372106 100644 --- a/tools/python_tests/mbed_os_tools/detect/os_win7.py +++ b/tools/python_tests/mbed_os_tools/detect/os_win7.py @@ -21,21 +21,29 @@ # Mock the winreg and _winreg module for non-windows python _winreg = MagicMock() -sys.modules['_winreg'] = _winreg -sys.modules['winreg'] = _winreg +sys.modules["_winreg"] = _winreg +sys.modules["winreg"] = _winreg + +from mbed_os_tools.detect.windows import ( + MbedLsToolsWin7, + CompatibleIDsNotFoundException, + _get_cached_mounted_points, + _is_mbed_volume, + _get_values_with_numeric_keys, + _get_disks, + _get_usb_storage_devices, + _determine_valid_non_composite_devices, + _determine_subdevice_capability, +) -from mbed_os_tools.detect.windows import (MbedLsToolsWin7, CompatibleIDsNotFoundException, - _get_cached_mounted_points, _is_mbed_volume, _get_values_with_numeric_keys, - _get_disks, _get_usb_storage_devices, _determine_valid_non_composite_devices, - _determine_subdevice_capability) class Win7TestCase(unittest.TestCase): - """ Basic test cases checking trivial asserts - """ + """Basic test cases checking trivial asserts""" def setUp(self): self.lstool = MbedLsToolsWin7() import logging + logging.basicConfig() root_logger = logging.getLogger("mbedls") root_logger.setLevel(logging.DEBUG) @@ -66,20 +74,27 @@ def test_os_supported(self): def test_empty_reg(self): value_dict = { - (None, 'SYSTEM\\MountedDevices'): [ - ('\\DosDevices\\F:', - u'_??_USBSTOR#Disk&Ven_MBED&Prod_VFS&Rev_0.1#9&215b8c47&0&0240000032044e4500257009997b00386781000097969900&0#{53f56307-b6bf-11d0-94f2-00a0c91efb8b}'.encode('utf-16le')), + (None, "SYSTEM\\MountedDevices"): [ + ( + "\\DosDevices\\F:", + "_??_USBSTOR#Disk&Ven_MBED&Prod_VFS&Rev_0.1#9&215b8c47&0&0240000032044e4500257009997b00386781000097969900&0#{53f56307-b6bf-11d0-94f2-00a0c91efb8b}".encode( + "utf-16le" + ), + ) ], - (None, 'SYSTEM\\CurrentControlSet\\Services\\volume\\Enum'): [], - (None, 'SYSTEM\\CurrentControlSet\\Services\\USBSTOR\\Enum'): [] + (None, "SYSTEM\\CurrentControlSet\\Services\\volume\\Enum"): [], + (None, "SYSTEM\\CurrentControlSet\\Services\\USBSTOR\\Enum"): [], } self.setUpRegistry(value_dict, {}) candidates = self.lstool.find_candidates() - self.assertEqual(_winreg.OpenKey.mock_calls, [ - call(_winreg.HKEY_LOCAL_MACHINE, 'SYSTEM\\MountedDevices'), - call(_winreg.HKEY_LOCAL_MACHINE, 'SYSTEM\\CurrentControlSet\\Services\\Disk\\Enum'), - call(_winreg.HKEY_LOCAL_MACHINE, 'SYSTEM\\CurrentControlSet\\Services\\USBSTOR\\Enum') - ]) + self.assertEqual( + _winreg.OpenKey.mock_calls, + [ + call(_winreg.HKEY_LOCAL_MACHINE, "SYSTEM\\MountedDevices"), + call(_winreg.HKEY_LOCAL_MACHINE, "SYSTEM\\CurrentControlSet\\Services\\Disk\\Enum"), + call(_winreg.HKEY_LOCAL_MACHINE, "SYSTEM\\CurrentControlSet\\Services\\USBSTOR\\Enum"), + ], + ) self.assertEqual(candidates, []) def assertNoRegMut(self): @@ -95,45 +110,49 @@ def assertNoRegMut(self): def setUpRegistry(self, value_dict, key_dict): all_keys = set(value_dict.keys()) | set(key_dict.keys()) + def open_key_effect(key, subkey): - if ((key, subkey) in all_keys or key in all_keys): + if (key, subkey) in all_keys or key in all_keys: return key, subkey else: raise OSError((key, subkey)) + _winreg.OpenKey.side_effect = open_key_effect + def enum_value(key, index): try: a, b = value_dict[key][index] return a, b, None except KeyError: raise OSError + _winreg.EnumValue.side_effect = enum_value + def enum_key(key, index): try: return key_dict[key][index] except KeyError: raise OSError + _winreg.EnumKey.side_effect = enum_key + def query_value(key, subkey): try: return value_dict[(key, subkey)] except KeyError: raise OSError + _winreg.QueryValueEx.side_effect = query_value + def query_info_key(key): - return (len(key_dict.get(key, [])), - len(value_dict.get(key, []))) + return (len(key_dict.get(key, [])), len(value_dict.get(key, []))) + _winreg.QueryInfoKey.side_effect = query_info_key def test_get_values_with_numeric_keys(self): - dummy_key = 'dummy_key' - with patch('mbed_os_tools.detect.windows._iter_vals') as _iter_vals: - _iter_vals.return_value = [ - ('0', True), - ('1', True), - ('Count', False), - ('NextInstance', False), - ] + dummy_key = "dummy_key" + with patch("mbed_os_tools.detect.windows._iter_vals") as _iter_vals: + _iter_vals.return_value = [("0", True), ("1", True), ("Count", False), ("NextInstance", False)] values = _get_values_with_numeric_keys(dummy_key) _iter_vals.assert_called_once_with(dummy_key) @@ -146,64 +165,68 @@ def test_get_values_with_numeric_keys(self): self.assertEqual(_get_values_with_numeric_keys(dummy_key), []) def test_is_mbed_volume(self): - self.assertTrue(_is_mbed_volume(u'_??_USBSTOR#Disk&Ven_MBED&Prod_VFS&Rev_0.1#0240000032044e4500367009997b00086781000097969900&0#{53f56307-b6bf-11d0-94f2-00a0c91efb8b}')) - self.assertTrue(_is_mbed_volume(u'_??_USBSTOR#Disk&Ven_mbed&Prod_VFS&Rev_0.1#0240000032044e4500367009997b00086781000097969900&0#{53f56307-b6bf-11d0-94f2-00a0c91efb8b}')) - self.assertFalse(_is_mbed_volume(u'_??_USBSTOR#Disk&Ven_Invalid&Prod_VFS&Rev_0.1#0240000032044e4500367009997b00086781000097969900&0#{53f56307-b6bf-11d0-94f2-00a0c91efb8b}')) - self.assertFalse(_is_mbed_volume(u'_??_USBSTOR#Disk&Ven_invalid&Prod_VFS&Rev_0.1#0240000032044e4500367009997b00086781000097969900&0#{53f56307-b6bf-11d0-94f2-00a0c91efb8b}')) + self.assertTrue( + _is_mbed_volume( + "_??_USBSTOR#Disk&Ven_MBED&Prod_VFS&Rev_0.1#0240000032044e4500367009997b00086781000097969900&0#{53f56307-b6bf-11d0-94f2-00a0c91efb8b}" + ) + ) + self.assertTrue( + _is_mbed_volume( + "_??_USBSTOR#Disk&Ven_mbed&Prod_VFS&Rev_0.1#0240000032044e4500367009997b00086781000097969900&0#{53f56307-b6bf-11d0-94f2-00a0c91efb8b}" + ) + ) + self.assertFalse( + _is_mbed_volume( + "_??_USBSTOR#Disk&Ven_Invalid&Prod_VFS&Rev_0.1#0240000032044e4500367009997b00086781000097969900&0#{53f56307-b6bf-11d0-94f2-00a0c91efb8b}" + ) + ) + self.assertFalse( + _is_mbed_volume( + "_??_USBSTOR#Disk&Ven_invalid&Prod_VFS&Rev_0.1#0240000032044e4500367009997b00086781000097969900&0#{53f56307-b6bf-11d0-94f2-00a0c91efb8b}" + ) + ) def test_get_cached_mount_points(self): - dummy_val = 'dummy_val' - volume_string_1 = u'_??_USBSTOR#Disk&Ven_MBED&Prod_VFS&Rev_0.1#0240000032044e4500367009997b00086781000097969900&0#{53f56307-b6bf-11d0-94f2-00a0c91efb8b}' - volume_string_2 = u'_??_USBSTOR#Disk&Ven_MBED&Prod_VFS&Rev_0.1#1234000032044e4500367009997b00086781000097969900&0#{53f56307-b6bf-11d0-94f2-00a0c91efb8b}' - with patch('mbed_os_tools.detect.windows._iter_vals') as _iter_vals, \ - patch('mbed_os_tools.detect.windows._is_mbed_volume') as _is_mbed_volume: + dummy_val = "dummy_val" + volume_string_1 = "_??_USBSTOR#Disk&Ven_MBED&Prod_VFS&Rev_0.1#0240000032044e4500367009997b00086781000097969900&0#{53f56307-b6bf-11d0-94f2-00a0c91efb8b}" + volume_string_2 = "_??_USBSTOR#Disk&Ven_MBED&Prod_VFS&Rev_0.1#1234000032044e4500367009997b00086781000097969900&0#{53f56307-b6bf-11d0-94f2-00a0c91efb8b}" + with patch("mbed_os_tools.detect.windows._iter_vals") as _iter_vals, patch( + "mbed_os_tools.detect.windows._is_mbed_volume" + ) as _is_mbed_volume: _winreg.OpenKey.return_value = dummy_val _iter_vals.return_value = [ - ('dummy_device', 'this is not a valid volume string'), - ('\\DosDevices\\D:', - volume_string_1.encode('utf-16le')), - ('\\DosDevices\\invalid_drive', - volume_string_2.encode('utf-16le')) + ("dummy_device", "this is not a valid volume string"), + ("\\DosDevices\\D:", volume_string_1.encode("utf-16le")), + ("\\DosDevices\\invalid_drive", volume_string_2.encode("utf-16le")), ] _is_mbed_volume.return_value = True result = _get_cached_mounted_points() - _winreg.OpenKey.assert_called_once_with(_winreg.HKEY_LOCAL_MACHINE, - 'SYSTEM\\MountedDevices') + _winreg.OpenKey.assert_called_once_with(_winreg.HKEY_LOCAL_MACHINE, "SYSTEM\\MountedDevices") _iter_vals.assert_called_once_with(dummy_val) - _is_mbed_volume.assert_has_calls([ - call(volume_string_1), - call(volume_string_2) - ]) - self.assertEqual(result, [ - { - 'mount_point': 'D:', - 'volume_string': volume_string_1 - } - ]) + _is_mbed_volume.assert_has_calls([call(volume_string_1), call(volume_string_2)]) + self.assertEqual(result, [{"mount_point": "D:", "volume_string": volume_string_1}]) _winreg.OpenKey.reset_mock() _winreg.OpenKey.side_effect = OSError self.assertEqual(_get_cached_mounted_points(), []) - def test_get_disks(self): - dummy_key = 'dummy_key' - volume_strings = [ - 'dummy_volume_1', - 'dummy_volume_2', - ] - with patch('mbed_os_tools.detect.windows._get_values_with_numeric_keys') as _num_keys, \ - patch('mbed_os_tools.detect.windows._is_mbed_volume') as _is_mbed_volume: + dummy_key = "dummy_key" + volume_strings = ["dummy_volume_1", "dummy_volume_2"] + with patch("mbed_os_tools.detect.windows._get_values_with_numeric_keys") as _num_keys, patch( + "mbed_os_tools.detect.windows._is_mbed_volume" + ) as _is_mbed_volume: _winreg.OpenKey.return_value = dummy_key _num_keys.return_value = volume_strings _is_mbed_volume.return_value = True result = _get_disks() - _winreg.OpenKey.assert_called_once_with(_winreg.HKEY_LOCAL_MACHINE, - 'SYSTEM\\CurrentControlSet\\Services\\Disk\\Enum') + _winreg.OpenKey.assert_called_once_with( + _winreg.HKEY_LOCAL_MACHINE, "SYSTEM\\CurrentControlSet\\Services\\Disk\\Enum" + ) _num_keys.assert_called_once_with(dummy_key) self.assertEqual(result, volume_strings) @@ -213,27 +236,27 @@ def test_get_disks(self): result = _get_disks() - _winreg.OpenKey.assert_called_once_with(_winreg.HKEY_LOCAL_MACHINE, - 'SYSTEM\\CurrentControlSet\\Services\\Disk\\Enum') + _winreg.OpenKey.assert_called_once_with( + _winreg.HKEY_LOCAL_MACHINE, "SYSTEM\\CurrentControlSet\\Services\\Disk\\Enum" + ) _num_keys.assert_not_called() self.assertEqual(result, []) def test_get_usb_storage_devices(self): - dummy_key = 'dummy_key' - volume_strings = [ - 'dummy_usb_storage_1', - 'dummy_usb_storage_2', - ] - with patch('mbed_os_tools.detect.windows._get_values_with_numeric_keys') as _num_keys, \ - patch('mbed_os_tools.detect.windows._is_mbed_volume') as _is_mbed_volume: + dummy_key = "dummy_key" + volume_strings = ["dummy_usb_storage_1", "dummy_usb_storage_2"] + with patch("mbed_os_tools.detect.windows._get_values_with_numeric_keys") as _num_keys, patch( + "mbed_os_tools.detect.windows._is_mbed_volume" + ) as _is_mbed_volume: _winreg.OpenKey.return_value = dummy_key _num_keys.return_value = volume_strings _is_mbed_volume.return_value = True result = _get_usb_storage_devices() - _winreg.OpenKey.assert_called_once_with(_winreg.HKEY_LOCAL_MACHINE, - 'SYSTEM\\CurrentControlSet\\Services\\USBSTOR\\Enum') + _winreg.OpenKey.assert_called_once_with( + _winreg.HKEY_LOCAL_MACHINE, "SYSTEM\\CurrentControlSet\\Services\\USBSTOR\\Enum" + ) _num_keys.assert_called_once_with(dummy_key) self.assertEqual(result, volume_strings) @@ -243,85 +266,67 @@ def test_get_usb_storage_devices(self): result = _get_usb_storage_devices() - _winreg.OpenKey.assert_called_once_with(_winreg.HKEY_LOCAL_MACHINE, - 'SYSTEM\\CurrentControlSet\\Services\\USBSTOR\\Enum') + _winreg.OpenKey.assert_called_once_with( + _winreg.HKEY_LOCAL_MACHINE, "SYSTEM\\CurrentControlSet\\Services\\USBSTOR\\Enum" + ) _num_keys.assert_not_called() self.assertEqual(result, []) def test_determine_valid_non_composite_devices(self): - dummy_full_path = 'dummy_full_path' - dummy_target_id = 'dummy_target_id' - dummy_mount_point = 'dummy_mount_point' - devices = [ - { - 'full_path': dummy_full_path, - 'entry_key_string': dummy_target_id - } - ] - target_id_usb_id_mount_point_map = { - dummy_target_id: dummy_mount_point - } - dummy_key = 'dummy_key' + dummy_full_path = "dummy_full_path" + dummy_target_id = "dummy_target_id" + dummy_mount_point = "dummy_mount_point" + devices = [{"full_path": dummy_full_path, "entry_key_string": dummy_target_id}] + target_id_usb_id_mount_point_map = {dummy_target_id: dummy_mount_point} + dummy_key = "dummy_key" _winreg.OpenKey.return_value = dummy_key - with patch('mbed_os_tools.detect.windows._determine_subdevice_capability') as _capability: - _capability.return_value = 'msd' + with patch("mbed_os_tools.detect.windows._determine_subdevice_capability") as _capability: + _capability.return_value = "msd" result = _determine_valid_non_composite_devices(devices, target_id_usb_id_mount_point_map) - device_key_string = 'SYSTEM\\CurrentControlSet\\Enum\\' + dummy_full_path - _winreg.OpenKey.assert_called_once_with(_winreg.HKEY_LOCAL_MACHINE, - device_key_string) + device_key_string = "SYSTEM\\CurrentControlSet\\Enum\\" + dummy_full_path + _winreg.OpenKey.assert_called_once_with(_winreg.HKEY_LOCAL_MACHINE, device_key_string) _capability.assert_called_once_with(dummy_key) - self.assertEqual(result, { - dummy_target_id: { - 'target_id_usb_id': dummy_target_id, - 'mount_point': dummy_mount_point - } - }) + self.assertEqual( + result, {dummy_target_id: {"target_id_usb_id": dummy_target_id, "mount_point": dummy_mount_point}} + ) def test_determine_subdevice_capability(self): - dummy_key = 'dummy_key' + dummy_key = "dummy_key" _winreg.QueryValueEx.return_value = ( [ - u'USB\\DevClass_00&SubClass_00&Prot_00', - u'USB\\DevClass_00&SubClass_00', - u'USB\\DevClass_00', - u'USB\\COMPOSITE' + "USB\\DevClass_00&SubClass_00&Prot_00", + "USB\\DevClass_00&SubClass_00", + "USB\\DevClass_00", + "USB\\COMPOSITE", ], - 7 + 7, ) capability = _determine_subdevice_capability(dummy_key) - _winreg.QueryValueEx.assert_called_once_with(dummy_key, 'CompatibleIDs') - self.assertEqual(capability, 'composite') + _winreg.QueryValueEx.assert_called_once_with(dummy_key, "CompatibleIDs") + self.assertEqual(capability, "composite") _winreg.QueryValueEx.reset_mock() _winreg.QueryValueEx.return_value = ( - [ - u'USB\\Class_08&SubClass_06&Prot_50', - u'USB\\Class_08&SubClass_06', - u'USB\\Class_08' - ], - 7 + ["USB\\Class_08&SubClass_06&Prot_50", "USB\\Class_08&SubClass_06", "USB\\Class_08"], + 7, ) capability = _determine_subdevice_capability(dummy_key) - _winreg.QueryValueEx.assert_called_once_with(dummy_key, 'CompatibleIDs') - self.assertEqual(capability, 'msd') + _winreg.QueryValueEx.assert_called_once_with(dummy_key, "CompatibleIDs") + self.assertEqual(capability, "msd") _winreg.QueryValueEx.reset_mock() _winreg.QueryValueEx.return_value = ( - [ - u'USB\\Class_02&SubClass_02&Prot_01', - u'USB\\Class_02&SubClass_02', - u'USB\\Class_02' - ], - 7 + ["USB\\Class_02&SubClass_02&Prot_01", "USB\\Class_02&SubClass_02", "USB\\Class_02"], + 7, ) capability = _determine_subdevice_capability(dummy_key) - _winreg.QueryValueEx.assert_called_once_with(dummy_key, 'CompatibleIDs') - self.assertEqual(capability, 'serial') + _winreg.QueryValueEx.assert_called_once_with(dummy_key, "CompatibleIDs") + self.assertEqual(capability, "serial") _winreg.QueryValueEx.reset_mock() _winreg.QueryValueEx.side_effect = OSError @@ -331,57 +336,88 @@ def test_determine_subdevice_capability(self): except CompatibleIDsNotFoundException as e: exception = True self.assertTrue(exception) - _winreg.QueryValueEx.assert_called_once_with(dummy_key, 'CompatibleIDs') - + _winreg.QueryValueEx.assert_called_once_with(dummy_key, "CompatibleIDs") def test_one_composite_dev(self): value_dict = { - (None, 'SYSTEM\\MountedDevices'): [ - ('\\DosDevices\\C:', u'NOT A VALID MBED DRIVE'.encode('utf-16le')), - ('\\DosDevices\\F:', - u'_??_USBSTOR#Disk&Ven_MBED&Prod_VFS&Rev_0.1#9&215b8c47&0&0240000032044e4500257009997b00386781000097969900&0#{53f56307-b6bf-11d0-94f2-00a0c91efb8b}'.encode('utf-16le')) + (None, "SYSTEM\\MountedDevices"): [ + ("\\DosDevices\\C:", "NOT A VALID MBED DRIVE".encode("utf-16le")), + ( + "\\DosDevices\\F:", + "_??_USBSTOR#Disk&Ven_MBED&Prod_VFS&Rev_0.1#9&215b8c47&0&0240000032044e4500257009997b00386781000097969900&0#{53f56307-b6bf-11d0-94f2-00a0c91efb8b}".encode( + "utf-16le" + ), + ), ], - (None, 'SYSTEM\\CurrentControlSet\\Services\\Disk\\Enum'): [ - ('0', 'USBSTOR\\Disk&Ven_MBED&Prod_VFS&Rev_0.1\\9&215b8c47&0&0240000032044e4500257009997b00386781000097969900&0') + (None, "SYSTEM\\CurrentControlSet\\Services\\Disk\\Enum"): [ + ( + "0", + "USBSTOR\\Disk&Ven_MBED&Prod_VFS&Rev_0.1\\9&215b8c47&0&0240000032044e4500257009997b00386781000097969900&0", + ) ], - (None, 'SYSTEM\\CurrentControlSet\\Services\\USBSTOR\\Enum'): [ - ('0', 'USB\\VID_0D28&PID_0204&MI_00\\8&26b12a60&0&0000') + (None, "SYSTEM\\CurrentControlSet\\Services\\USBSTOR\\Enum"): [ + ("0", "USB\\VID_0D28&PID_0204&MI_00\\8&26b12a60&0&0000") ], - (None, 'SYSTEM\\CurrentControlSet\\Enum\\USB\\VID_0D28&PID_0204'): [], - (((None, 'SYSTEM\\CurrentControlSet\\Enum\\USB\\VID_0D28&PID_0204'), - '0240000032044e4500257009997b00386781000097969900'), - 'ParentIdPrefix'): ('8&26b12a60&0', None), - (((None, 'SYSTEM\\CurrentControlSet\\Enum\\USB\\VID_0D28&PID_0204'), - '0240000032044e4500257009997b00386781000097969900'), - 'CompatibleIDs'): ([u'USB\\DevClass_00&SubClass_00&Prot_00', u'USB\\DevClass_00&SubClass_00', u'USB\\DevClass_00', u'USB\\COMPOSITE'], 7), - (((None, 'SYSTEM\\CurrentControlSet\\Enum\\USB\\VID_0D28&PID_0204&MI_00'), '8&26b12a60&0&0000'), 'CompatibleIDs'): ([u'USB\\Class_08&SubClass_06&Prot_50', u'USB\\Class_08&SubClass_06', u'USB\\Class_08'], 7), - (((None, 'SYSTEM\\CurrentControlSet\\Enum\\USB\\VID_0D28&PID_0204&MI_01'), - '8&26b12a60&0&0001'), - 'CompatibleIDs'): ([u'USB\\CLASS_02&SUBCLASS_02&PROT_01', u'USB\\CLASS_02&SUBCLASS_02', u'USB\\CLASS_02'], 7), - ((((None, 'SYSTEM\\CurrentControlSet\\Enum\\USB\\VID_0D28&PID_0204&MI_01'), - '8&26b12a60&0&0001'), - 'Device Parameters'), - 'PortName'): ('COM7', None) + (None, "SYSTEM\\CurrentControlSet\\Enum\\USB\\VID_0D28&PID_0204"): [], + ( + ( + (None, "SYSTEM\\CurrentControlSet\\Enum\\USB\\VID_0D28&PID_0204"), + "0240000032044e4500257009997b00386781000097969900", + ), + "ParentIdPrefix", + ): ("8&26b12a60&0", None), + ( + ( + (None, "SYSTEM\\CurrentControlSet\\Enum\\USB\\VID_0D28&PID_0204"), + "0240000032044e4500257009997b00386781000097969900", + ), + "CompatibleIDs", + ): ( + [ + "USB\\DevClass_00&SubClass_00&Prot_00", + "USB\\DevClass_00&SubClass_00", + "USB\\DevClass_00", + "USB\\COMPOSITE", + ], + 7, + ), + ( + ((None, "SYSTEM\\CurrentControlSet\\Enum\\USB\\VID_0D28&PID_0204&MI_00"), "8&26b12a60&0&0000"), + "CompatibleIDs", + ): (["USB\\Class_08&SubClass_06&Prot_50", "USB\\Class_08&SubClass_06", "USB\\Class_08"], 7), + ( + ((None, "SYSTEM\\CurrentControlSet\\Enum\\USB\\VID_0D28&PID_0204&MI_01"), "8&26b12a60&0&0001"), + "CompatibleIDs", + ): (["USB\\CLASS_02&SUBCLASS_02&PROT_01", "USB\\CLASS_02&SUBCLASS_02", "USB\\CLASS_02"], 7), + ( + ( + ((None, "SYSTEM\\CurrentControlSet\\Enum\\USB\\VID_0D28&PID_0204&MI_01"), "8&26b12a60&0&0001"), + "Device Parameters", + ), + "PortName", + ): ("COM7", None), } key_dict = { - (None, 'SYSTEM\\CurrentControlSet\\Enum\\USB\\VID_0D28&PID_0204'): - ['0240000032044e4500257009997b00386781000097969900'], - (None, 'SYSTEM\\CurrentControlSet\\Enum\\USB\\VID_0D28&PID_0204&MI_00'): [], - (None, 'SYSTEM\\CurrentControlSet\\Enum\\USB\\VID_0D28&PID_0204&MI_01'): [], - (((None, 'SYSTEM\\CurrentControlSet\\Enum\\USB\\VID_0D28&PID_0204&MI_01'), - '8&26b12a60&0&0001'), - 'Device Parameters'): [] + (None, "SYSTEM\\CurrentControlSet\\Enum\\USB\\VID_0D28&PID_0204"): [ + "0240000032044e4500257009997b00386781000097969900" + ], + (None, "SYSTEM\\CurrentControlSet\\Enum\\USB\\VID_0D28&PID_0204&MI_00"): [], + (None, "SYSTEM\\CurrentControlSet\\Enum\\USB\\VID_0D28&PID_0204&MI_01"): [], + ( + ((None, "SYSTEM\\CurrentControlSet\\Enum\\USB\\VID_0D28&PID_0204&MI_01"), "8&26b12a60&0&0001"), + "Device Parameters", + ): [], } self.setUpRegistry(value_dict, key_dict) - with patch('mbed_os_tools.detect.windows.MbedLsToolsWin7._run_cli_process') as _cliproc: + with patch("mbed_os_tools.detect.windows.MbedLsToolsWin7._run_cli_process") as _cliproc: _cliproc.return_value = ("", "", 0) expected_info = { - 'mount_point': 'F:', - 'serial_port': 'COM7', - 'target_id_usb_id': u'0240000032044e4500257009997b00386781000097969900', - 'vendor_id': '0d28', - 'product_id': '0204' + "mount_point": "F:", + "serial_port": "COM7", + "target_id_usb_id": "0240000032044e4500257009997b00386781000097969900", + "vendor_id": "0d28", + "product_id": "0204", } devices = self.lstool.find_candidates() @@ -390,36 +426,49 @@ def test_one_composite_dev(self): def test_one_non_composite_dev(self): value_dict = { - (None, 'SYSTEM\\MountedDevices'): [ - ('\\DosDevices\\C:', u'NOT A VALID MBED DRIVE'.encode('utf-16le')), - ('\\DosDevices\\F:', - u'_??_USBSTOR#Disk&Ven_MBED&Prod_VFS&Rev_0.1#0000000032044e4500257009997b00386781000097969900&0#{53f56307-b6bf-11d0-94f2-00a0c91efb8b}'.encode('utf-16le')) + (None, "SYSTEM\\MountedDevices"): [ + ("\\DosDevices\\C:", "NOT A VALID MBED DRIVE".encode("utf-16le")), + ( + "\\DosDevices\\F:", + "_??_USBSTOR#Disk&Ven_MBED&Prod_VFS&Rev_0.1#0000000032044e4500257009997b00386781000097969900&0#{53f56307-b6bf-11d0-94f2-00a0c91efb8b}".encode( + "utf-16le" + ), + ), ], - (None, 'SYSTEM\\CurrentControlSet\\Services\\Disk\\Enum'): [ - ('0', 'USBSTOR\Disk&Ven_MBED&Prod_VFS&Rev_0.1\\0000000032044e4500257009997b00386781000097969900&0') + (None, "SYSTEM\\CurrentControlSet\\Services\\Disk\\Enum"): [ + ("0", "USBSTOR\Disk&Ven_MBED&Prod_VFS&Rev_0.1\\0000000032044e4500257009997b00386781000097969900&0") ], - (None, 'SYSTEM\\CurrentControlSet\\Services\\USBSTOR\\Enum'): [ - ('0', 'USB\\VID_0D28&PID_0204\\0000000032044e4500257009997b00386781000097969900') + (None, "SYSTEM\\CurrentControlSet\\Services\\USBSTOR\\Enum"): [ + ("0", "USB\\VID_0D28&PID_0204\\0000000032044e4500257009997b00386781000097969900") ], - (None, 'SYSTEM\\CurrentControlSet\\Enum\\USB\\VID_0D28&PID_0204'): [], - ((None, 'SYSTEM\\CurrentControlSet\\Enum\\USB\\VID_0D28&PID_0204\\0000000032044e4500257009997b00386781000097969900'), - 'CompatibleIDs'): ([u'USB\\Class_08&SubClass_06&Prot_50', u'USB\\Class_08&SubClass_06', u'USB\\Class_08'], 7) + (None, "SYSTEM\\CurrentControlSet\\Enum\\USB\\VID_0D28&PID_0204"): [], + ( + ( + None, + "SYSTEM\\CurrentControlSet\\Enum\\USB\\VID_0D28&PID_0204\\0000000032044e4500257009997b00386781000097969900", + ), + "CompatibleIDs", + ): (["USB\\Class_08&SubClass_06&Prot_50", "USB\\Class_08&SubClass_06", "USB\\Class_08"], 7), } key_dict = { - (None, 'SYSTEM\\CurrentControlSet\\Enum\\USB\\VID_0D28&PID_0204'): - ['0000000032044e4500257009997b00386781000097969900'], - (None, 'SYSTEM\\CurrentControlSet\\Enum\\USB\\VID_0D28&PID_0204\\0000000032044e4500257009997b00386781000097969900'): [] + (None, "SYSTEM\\CurrentControlSet\\Enum\\USB\\VID_0D28&PID_0204"): [ + "0000000032044e4500257009997b00386781000097969900" + ], + ( + None, + "SYSTEM\\CurrentControlSet\\Enum\\USB\\VID_0D28&PID_0204\\0000000032044e4500257009997b00386781000097969900", + ): [], } self.setUpRegistry(value_dict, key_dict) - with patch('mbed_os_tools.detect.windows.MbedLsToolsWin7._run_cli_process') as _cliproc: + with patch("mbed_os_tools.detect.windows.MbedLsToolsWin7._run_cli_process") as _cliproc: _cliproc.return_value = ("", "", 0) expected_info = { - 'mount_point': 'F:', - 'serial_port': None, - 'target_id_usb_id': u'0000000032044e4500257009997b00386781000097969900', - 'vendor_id': '0d28', - 'product_id': '0204' + "mount_point": "F:", + "serial_port": None, + "target_id_usb_id": "0000000032044e4500257009997b00386781000097969900", + "vendor_id": "0d28", + "product_id": "0204", } devices = self.lstool.find_candidates() @@ -427,7 +476,7 @@ def test_one_non_composite_dev(self): self.assertNoRegMut() def test_mount_point_ready(self): - with patch('mbed_os_tools.detect.windows.MbedLsToolsWin7._run_cli_process') as _cliproc: + with patch("mbed_os_tools.detect.windows.MbedLsToolsWin7._run_cli_process") as _cliproc: _cliproc.return_value = ("dummy", "", 0) self.assertTrue(self.lstool.mount_point_ready("dummy")) @@ -437,5 +486,5 @@ def test_mount_point_ready(self): self.assertFalse(self.lstool.mount_point_ready("dummy")) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tools/python_tests/mbed_os_tools/detect/platform_database.py b/tools/python_tests/mbed_os_tools/detect/platform_database.py index 4921dc01ee6..265f489da7d 100644 --- a/tools/python_tests/mbed_os_tools/detect/platform_database.py +++ b/tools/python_tests/mbed_os_tools/detect/platform_database.py @@ -23,23 +23,22 @@ from mock import patch, MagicMock, DEFAULT from io import StringIO -from mbed_os_tools.detect.platform_database import PlatformDatabase, DEFAULT_PLATFORM_DB,\ - LOCAL_PLATFORM_DATABASE +from mbed_os_tools.detect.platform_database import PlatformDatabase, DEFAULT_PLATFORM_DB, LOCAL_PLATFORM_DATABASE try: unicode except NameError: unicode = str + class EmptyPlatformDatabaseTests(unittest.TestCase): - """ Basic test cases with an empty database - """ + """Basic test cases with an empty database""" def setUp(self): self.tempd_dir = tempfile.mkdtemp() - self.base_db_path = os.path.join(self.tempd_dir, 'base') - self.base_db = open(self.base_db_path, 'w+b') - self.base_db.write(b'{}') + self.base_db_path = os.path.join(self.tempd_dir, "base") + self.base_db = open(self.base_db_path, "w+b") + self.base_db.write(b"{}") self.base_db.seek(0) self.pdb = PlatformDatabase([self.base_db_path]) @@ -61,23 +60,22 @@ def test_broken_database_bad_json(self): """Verify that the platform database still works without a working backing file """ - self.base_db.write(b'{}') + self.base_db.write(b"{}") self.base_db.seek(0) self.pdb = PlatformDatabase([self.base_db_path]) self.pdb.add("1234", "MYTARGET") self.assertEqual(self.pdb.get("1234"), "MYTARGET") def test_broken_database(self): - """Verify that the platform database correctly reset's its database - """ - with patch("mbed_os_tools.detect.platform_database.open") as _open,\ - patch("mbed_os_tools.detect.platform_database._older_than_me") as _older: + """Verify that the platform database correctly reset's its database""" + with patch("mbed_os_tools.detect.platform_database.open") as _open, patch( + "mbed_os_tools.detect.platform_database._older_than_me" + ) as _older: _older.return_value = False stringio = MagicMock() _open.side_effect = (IOError("Bogus"), stringio) self.pdb = PlatformDatabase([LOCAL_PLATFORM_DATABASE]) - stringio.__enter__.return_value.write.assert_called_with( - unicode(json.dumps(DEFAULT_PLATFORM_DB))) + stringio.__enter__.return_value.write.assert_called_with(unicode(json.dumps(DEFAULT_PLATFORM_DB))) self.pdb.add("1234", "MYTARGET") self.assertEqual(self.pdb.get("1234"), "MYTARGET") @@ -92,136 +90,125 @@ def test_extra_broken_database(self): self.assertEqual(self.pdb.get("1234"), "MYTARGET") def test_old_database(self): - """Verify that the platform database correctly updates's its database - """ - with patch("mbed_os_tools.detect.platform_database.open") as _open,\ - patch("mbed_os_tools.detect.platform_database.getmtime") as _getmtime: + """Verify that the platform database correctly updates's its database""" + with patch("mbed_os_tools.detect.platform_database.open") as _open, patch( + "mbed_os_tools.detect.platform_database.getmtime" + ) as _getmtime: stringio = MagicMock() _open.return_value = stringio _getmtime.side_effect = (0, 1000000) self.pdb = PlatformDatabase([LOCAL_PLATFORM_DATABASE]) - stringio.__enter__.return_value.write.assert_called_with( - unicode(json.dumps(DEFAULT_PLATFORM_DB))) + stringio.__enter__.return_value.write.assert_called_with(unicode(json.dumps(DEFAULT_PLATFORM_DB))) def test_bogus_database(self): - """Basic empty database test - """ + """Basic empty database test""" self.assertEqual(list(self.pdb.items()), []) self.assertEqual(list(self.pdb.all_ids()), []) - self.assertEqual(self.pdb.get('Also_Junk', None), None) + self.assertEqual(self.pdb.get("Also_Junk", None), None) def test_add(self): - """Test that what was added can later be queried - """ - self.assertEqual(self.pdb.get('4753', None), None) - self.pdb.add('4753', 'Test_Platform', permanent=False) - self.assertEqual(self.pdb.get('4753', None), 'Test_Platform') + """Test that what was added can later be queried""" + self.assertEqual(self.pdb.get("4753", None), None) + self.pdb.add("4753", "Test_Platform", permanent=False) + self.assertEqual(self.pdb.get("4753", None), "Test_Platform") def test_remove(self): - """Test that once something is removed it no longer shows up when queried - """ - self.assertEqual(self.pdb.get('4753', None), None) - self.pdb.add('4753', 'Test_Platform', permanent=False) - self.assertEqual(self.pdb.get('4753', None), 'Test_Platform') - self.assertEqual(self.pdb.remove('4753', permanent=False), 'Test_Platform') - self.assertEqual(self.pdb.get('4753', None), None) + """Test that once something is removed it no longer shows up when queried""" + self.assertEqual(self.pdb.get("4753", None), None) + self.pdb.add("4753", "Test_Platform", permanent=False) + self.assertEqual(self.pdb.get("4753", None), "Test_Platform") + self.assertEqual(self.pdb.remove("4753", permanent=False), "Test_Platform") + self.assertEqual(self.pdb.get("4753", None), None) def test_remove_all(self): - """Test that multiple entries can be removed at once - """ - self.assertEqual(self.pdb.get('4753', None), None) - self.assertEqual(self.pdb.get('4754', None), None) - self.pdb.add('4753', 'Test_Platform1', permanent=False) - self.pdb.add('4754', 'Test_Platform2', permanent=False) - self.assertEqual(self.pdb.get('4753', None), 'Test_Platform1') - self.assertEqual(self.pdb.get('4754', None), 'Test_Platform2') - self.pdb.remove('*', permanent=False) - self.assertEqual(self.pdb.get('4753', None), None) - self.assertEqual(self.pdb.get('4754', None), None) + """Test that multiple entries can be removed at once""" + self.assertEqual(self.pdb.get("4753", None), None) + self.assertEqual(self.pdb.get("4754", None), None) + self.pdb.add("4753", "Test_Platform1", permanent=False) + self.pdb.add("4754", "Test_Platform2", permanent=False) + self.assertEqual(self.pdb.get("4753", None), "Test_Platform1") + self.assertEqual(self.pdb.get("4754", None), "Test_Platform2") + self.pdb.remove("*", permanent=False) + self.assertEqual(self.pdb.get("4753", None), None) + self.assertEqual(self.pdb.get("4754", None), None) def test_remove_permanent(self): """Test that once something is removed permanently it no longer shows up when queried """ - self.assertEqual(self.pdb.get('4753', None), None) - self.pdb.add('4753', 'Test_Platform', permanent=True) - self.assertEqual(self.pdb.get('4753', None), 'Test_Platform') + self.assertEqual(self.pdb.get("4753", None), None) + self.pdb.add("4753", "Test_Platform", permanent=True) + self.assertEqual(self.pdb.get("4753", None), "Test_Platform") # Recreate platform database to simulate rerunning mbedls self.pdb = PlatformDatabase([self.base_db_path]) - self.assertEqual(self.pdb.get('4753', None), 'Test_Platform') - self.assertEqual(self.pdb.remove('4753', permanent=True), 'Test_Platform') - self.assertEqual(self.pdb.get('4753', None), None) + self.assertEqual(self.pdb.get("4753", None), "Test_Platform") + self.assertEqual(self.pdb.remove("4753", permanent=True), "Test_Platform") + self.assertEqual(self.pdb.get("4753", None), None) # Recreate platform database to simulate rerunning mbedls self.pdb = PlatformDatabase([self.base_db_path]) - self.assertEqual(self.pdb.get('4753', None), None) + self.assertEqual(self.pdb.get("4753", None), None) def test_remove_all_permanent(self): - """Test that multiple entries can be removed permanently at once - """ - self.assertEqual(self.pdb.get('4753', None), None) - self.assertEqual(self.pdb.get('4754', None), None) - self.pdb.add('4753', 'Test_Platform1', permanent=True) - self.pdb.add('4754', 'Test_Platform2', permanent=True) - self.assertEqual(self.pdb.get('4753', None), 'Test_Platform1') - self.assertEqual(self.pdb.get('4754', None), 'Test_Platform2') + """Test that multiple entries can be removed permanently at once""" + self.assertEqual(self.pdb.get("4753", None), None) + self.assertEqual(self.pdb.get("4754", None), None) + self.pdb.add("4753", "Test_Platform1", permanent=True) + self.pdb.add("4754", "Test_Platform2", permanent=True) + self.assertEqual(self.pdb.get("4753", None), "Test_Platform1") + self.assertEqual(self.pdb.get("4754", None), "Test_Platform2") # Recreate platform database to simulate rerunning mbedls self.pdb = PlatformDatabase([self.base_db_path]) - self.assertEqual(self.pdb.get('4753', None), 'Test_Platform1') - self.assertEqual(self.pdb.get('4754', None), 'Test_Platform2') - self.pdb.remove('*', permanent=True) - self.assertEqual(self.pdb.get('4753', None), None) - self.assertEqual(self.pdb.get('4754', None), None) + self.assertEqual(self.pdb.get("4753", None), "Test_Platform1") + self.assertEqual(self.pdb.get("4754", None), "Test_Platform2") + self.pdb.remove("*", permanent=True) + self.assertEqual(self.pdb.get("4753", None), None) + self.assertEqual(self.pdb.get("4754", None), None) # Recreate platform database to simulate rerunning mbedls self.pdb = PlatformDatabase([self.base_db_path]) - self.assertEqual(self.pdb.get('4753', None), None) - self.assertEqual(self.pdb.get('4754', None), None) + self.assertEqual(self.pdb.get("4753", None), None) + self.assertEqual(self.pdb.get("4754", None), None) def test_bogus_add(self): - """Test that add requires properly formatted platform ids - """ - self.assertEqual(self.pdb.get('NOTVALID', None), None) + """Test that add requires properly formatted platform ids""" + self.assertEqual(self.pdb.get("NOTVALID", None), None) with self.assertRaises(ValueError): - self.pdb.add('NOTVALID', 'Test_Platform', permanent=False) + self.pdb.add("NOTVALID", "Test_Platform", permanent=False) def test_bogus_remove(self): - """Test that removing a not present platform does nothing - """ - self.assertEqual(self.pdb.get('NOTVALID', None), None) - self.assertEqual(self.pdb.remove('NOTVALID', permanent=False), None) + """Test that removing a not present platform does nothing""" + self.assertEqual(self.pdb.get("NOTVALID", None), None) + self.assertEqual(self.pdb.remove("NOTVALID", permanent=False), None) def test_simplify_verbose_data(self): """Test that fetching a verbose entry without verbose data correctly returns just the 'platform_name' """ - platform_data = { - 'platform_name': 'VALID', - 'other_data': 'data' - } - self.pdb.add('1337', platform_data, permanent=False) - self.assertEqual(self.pdb.get('1337', verbose_data=True), platform_data) - self.assertEqual(self.pdb.get('1337'), platform_data['platform_name']) + platform_data = {"platform_name": "VALID", "other_data": "data"} + self.pdb.add("1337", platform_data, permanent=False) + self.assertEqual(self.pdb.get("1337", verbose_data=True), platform_data) + self.assertEqual(self.pdb.get("1337"), platform_data["platform_name"]) + class OverriddenPlatformDatabaseTests(unittest.TestCase): - """ Test that for one database overriding another - """ + """Test that for one database overriding another""" def setUp(self): self.temp_dir = tempfile.mkdtemp() - self.base_db_path = os.path.join(self.temp_dir, 'base') - self.base_db = open(self.base_db_path, 'w+b') - self.base_db.write(json.dumps(dict([('0123', 'Base_Platform')])). - encode('utf-8')) + self.base_db_path = os.path.join(self.temp_dir, "base") + self.base_db = open(self.base_db_path, "w+b") + self.base_db.write(json.dumps(dict([("0123", "Base_Platform")])).encode("utf-8")) self.base_db.seek(0) - self.overriding_db_path = os.path.join(self.temp_dir, 'overriding') - self.overriding_db = open(self.overriding_db_path, 'w+b') - self.overriding_db.write(b'{}') + self.overriding_db_path = os.path.join(self.temp_dir, "overriding") + self.overriding_db = open(self.overriding_db_path, "w+b") + self.overriding_db.write(b"{}") self.overriding_db.seek(0) - self.pdb = PlatformDatabase([self.overriding_db_path, self.base_db_path], - primary_database=self.overriding_db_path) + self.pdb = PlatformDatabase( + [self.overriding_db_path, self.base_db_path], primary_database=self.overriding_db_path + ) self.base_db.seek(0) self.overriding_db.seek(0) @@ -230,31 +217,25 @@ def tearDown(self): self.overriding_db.close() def assertBaseUnchanged(self): - """Assert that the base database has not changed - """ + """Assert that the base database has not changed""" self.base_db.seek(0) - self.assertEqual(self.base_db.read(), - json.dumps(dict([('0123', 'Base_Platform')])) - .encode('utf-8')) + self.assertEqual(self.base_db.read(), json.dumps(dict([("0123", "Base_Platform")])).encode("utf-8")) def assertOverrideUnchanged(self): - """Assert that the override database has not changed - """ + """Assert that the override database has not changed""" self.overriding_db.seek(0) - self.assertEqual(self.overriding_db.read(), b'{}') + self.assertEqual(self.overriding_db.read(), b"{}") def test_basline(self): - """Sanity check that the base database does what we expect - """ - self.assertEqual(list(self.pdb.items()), [('0123', 'Base_Platform')]) - self.assertEqual(list(self.pdb.all_ids()), ['0123']) + """Sanity check that the base database does what we expect""" + self.assertEqual(list(self.pdb.items()), [("0123", "Base_Platform")]) + self.assertEqual(list(self.pdb.all_ids()), ["0123"]) def test_add_non_override(self): - """Check that adding keys goes to the Override database - """ - self.pdb.add('1234', 'Another_Platform') - self.assertEqual(list(self.pdb.items()), [('1234', 'Another_Platform'), ('0123', 'Base_Platform')]) - self.assertEqual(set(self.pdb.all_ids()), set(['0123', '1234'])) + """Check that adding keys goes to the Override database""" + self.pdb.add("1234", "Another_Platform") + self.assertEqual(list(self.pdb.items()), [("1234", "Another_Platform"), ("0123", "Base_Platform")]) + self.assertEqual(set(self.pdb.all_ids()), set(["0123", "1234"])) self.assertBaseUnchanged() def test_load_override(self): @@ -262,14 +243,14 @@ def test_load_override(self): you can no longer query for the base database definition and that the override database was not written to disk """ - self.overriding_db.write(json.dumps(dict([('0123', 'Overriding_Platform')])). - encode('utf-8')) + self.overriding_db.write(json.dumps(dict([("0123", "Overriding_Platform")])).encode("utf-8")) self.overriding_db.seek(0) - self.pdb = PlatformDatabase([self.overriding_db_path, self.base_db_path], - primary_database=self.overriding_db_path) - self.assertIn(('0123', 'Overriding_Platform'), list(self.pdb.items())) - self.assertEqual(set(self.pdb.all_ids()), set(['0123'])) - self.assertEqual(self.pdb.get('0123'), 'Overriding_Platform') + self.pdb = PlatformDatabase( + [self.overriding_db_path, self.base_db_path], primary_database=self.overriding_db_path + ) + self.assertIn(("0123", "Overriding_Platform"), list(self.pdb.items())) + self.assertEqual(set(self.pdb.all_ids()), set(["0123"])) + self.assertEqual(self.pdb.get("0123"), "Overriding_Platform") self.assertBaseUnchanged() def test_add_override_permanent(self): @@ -277,14 +258,15 @@ def test_add_override_permanent(self): you can no longer query for the base database definition and that the override database was written to disk """ - self.pdb.add('0123', 'Overriding_Platform', permanent=True) - self.assertIn(('0123', 'Overriding_Platform'), list(self.pdb.items())) - self.assertEqual(set(self.pdb.all_ids()), set(['0123'])) - self.assertEqual(self.pdb.get('0123'), 'Overriding_Platform') + self.pdb.add("0123", "Overriding_Platform", permanent=True) + self.assertIn(("0123", "Overriding_Platform"), list(self.pdb.items())) + self.assertEqual(set(self.pdb.all_ids()), set(["0123"])) + self.assertEqual(self.pdb.get("0123"), "Overriding_Platform") self.overriding_db.seek(0) - self.assertEqual(self.overriding_db.read(), - json.dumps(dict([('daplink', dict([('0123', 'Overriding_Platform')]))])) - .encode('utf-8')) + self.assertEqual( + self.overriding_db.read(), + json.dumps(dict([("daplink", dict([("0123", "Overriding_Platform")]))])).encode("utf-8"), + ) self.assertBaseUnchanged() def test_remove_override(self): @@ -292,12 +274,12 @@ def test_remove_override(self): the original base database definition and that that the override database was not written to disk """ - self.pdb.add('0123', 'Overriding_Platform') - self.assertIn(('0123', 'Overriding_Platform'), list(self.pdb.items())) - self.assertEqual(set(self.pdb.all_ids()), set(['0123'])) - self.assertEqual(self.pdb.get('0123'), 'Overriding_Platform') - self.assertEqual(self.pdb.remove('0123'), 'Overriding_Platform') - self.assertEqual(self.pdb.get('0123'), 'Base_Platform') + self.pdb.add("0123", "Overriding_Platform") + self.assertIn(("0123", "Overriding_Platform"), list(self.pdb.items())) + self.assertEqual(set(self.pdb.all_ids()), set(["0123"])) + self.assertEqual(self.pdb.get("0123"), "Overriding_Platform") + self.assertEqual(self.pdb.remove("0123"), "Overriding_Platform") + self.assertEqual(self.pdb.get("0123"), "Base_Platform") self.assertOverrideUnchanged() self.assertBaseUnchanged() @@ -306,8 +288,8 @@ def test_remove_from_base(self): the original base database definition and that that the base database was not written to disk """ - self.assertEqual(self.pdb.remove('0123'), 'Base_Platform') - self.assertEqual(self.pdb.get('0123'), None) + self.assertEqual(self.pdb.remove("0123"), "Base_Platform") + self.assertEqual(self.pdb.get("0123"), None) self.assertOverrideUnchanged() self.assertBaseUnchanged() @@ -316,20 +298,20 @@ def test_remove_from_base_permanent(self): the original base database definition and that that the base database was not modified on disk """ - self.assertEqual(self.pdb.remove('0123', permanent=True), 'Base_Platform') - self.assertEqual(self.pdb.get('0123'), None) + self.assertEqual(self.pdb.remove("0123", permanent=True), "Base_Platform") + self.assertEqual(self.pdb.get("0123"), None) self.assertBaseUnchanged() -class InternalLockingChecks(unittest.TestCase): +class InternalLockingChecks(unittest.TestCase): def setUp(self): - self.mocked_lock = patch('mbed_os_tools.detect.platform_database.InterProcessLock', spec=True).start() + self.mocked_lock = patch("mbed_os_tools.detect.platform_database.InterProcessLock", spec=True).start() self.acquire = self.mocked_lock.return_value.acquire self.release = self.mocked_lock.return_value.release - self.base_db_path = os.path.join(tempfile.mkdtemp(), 'base') - self.base_db = open(self.base_db_path, 'w+b') - self.base_db.write(b'{}') + self.base_db_path = os.path.join(tempfile.mkdtemp(), "base") + self.base_db = open(self.base_db_path, "w+b") + self.base_db.write(b"{}") self.base_db.seek(0) self.pdb = PlatformDatabase([self.base_db_path]) self.addCleanup(patch.stopall) @@ -338,33 +320,29 @@ def tearDown(self): self.base_db.close() def test_no_update(self): - """Test that no locks are used when no modifications are specified - """ - self.pdb.add('7155', 'Junk') + """Test that no locks are used when no modifications are specified""" + self.pdb.add("7155", "Junk") self.acquire.assert_not_called() self.release.assert_not_called() def test_update(self): - """Test that locks are used when modifications are specified - """ - self.pdb.add('7155', 'Junk', permanent=True) - assert self.acquire.called, 'Lock acquire should have been called' + """Test that locks are used when modifications are specified""" + self.pdb.add("7155", "Junk", permanent=True) + assert self.acquire.called, "Lock acquire should have been called" assert self.release.called def test_update_fail_acquire(self): - """Test that the backing file is not updated when lock acquisition fails - """ + """Test that the backing file is not updated when lock acquisition fails""" self.acquire.return_value = False - self.pdb.add('7155', 'Junk', permanent=True) - assert self.acquire.called, 'Lock acquire should have been called' + self.pdb.add("7155", "Junk", permanent=True) + assert self.acquire.called, "Lock acquire should have been called" self.base_db.seek(0) - self.assertEqual(self.base_db.read(), b'{}') + self.assertEqual(self.base_db.read(), b"{}") def test_update_ambiguous(self): - """Test that the backing file is not updated when lock acquisition fails - """ + """Test that the backing file is not updated when lock acquisition fails""" self.pdb._prim_db = None - self.pdb.add('7155', 'Junk', permanent=True) + self.pdb.add("7155", "Junk", permanent=True) self.acquire.assert_not_called() self.release.assert_not_called() - self.assertEqual(self.base_db.read(), b'{}') + self.assertEqual(self.base_db.read(), b"{}") diff --git a/tools/python_tests/mbed_os_tools/detect/platform_detection.py b/tools/python_tests/mbed_os_tools/detect/platform_detection.py index 960553b4cda..e361c259639 100644 --- a/tools/python_tests/mbed_os_tools/detect/platform_detection.py +++ b/tools/python_tests/mbed_os_tools/detect/platform_detection.py @@ -20,13 +20,16 @@ from mbed_os_tools.detect.lstools_base import MbedLsToolsBase -TEST_DATA_PATH = 'test_data' +TEST_DATA_PATH = "test_data" + class DummyLsTools(MbedLsToolsBase): return_value = [] + def find_candidates(self): return self.return_value + try: basestring except NameError: @@ -39,12 +42,11 @@ def get_case_insensitive_path(path, file_name): if entry.lower() == file_name.lower(): return os.path.join(path, entry) - raise Exception('No matching file for %s found in $s' % (file_name, path)) + raise Exception("No matching file for %s found in $s" % (file_name, path)) class PlatformDetectionTestCase(unittest.TestCase): - """ Basic test cases checking trivial asserts - """ + """Basic test cases checking trivial asserts""" def setUp(self): self.base = DummyLsTools() @@ -54,13 +56,16 @@ def tearDown(self): def run_test(self, test_data_case, candidate_data, expected_data): # Add necessary candidate data - candidate_data['mount_point'] = 'dummy_mount_point' + candidate_data["mount_point"] = "dummy_mount_point" # Find the test data in the test_data folder test_script_path = os.path.dirname(os.path.abspath(__file__)) test_data_path = os.path.join(test_script_path, TEST_DATA_PATH) test_data_cases = os.listdir(test_data_path) - self.assertTrue(test_data_case in test_data_cases, 'Expected %s to be present in %s folder' % (test_data_case, test_data_path)) + self.assertTrue( + test_data_case in test_data_cases, + "Expected %s to be present in %s folder" % (test_data_case, test_data_path), + ) test_data_case_path = os.path.join(test_data_path, test_data_case) # NOTE a limitation of this mocked test is that it only allows mocking of one directory level. @@ -68,15 +73,17 @@ def run_test(self, test_data_case, candidate_data, expected_data): # If this changes in the future, this mocking framework can be extended to support this. test_data_case_file_names = os.listdir(test_data_case_path) - mocked_open_file_paths = [os.path.join(candidate_data['mount_point'], file_name ) for file_name in test_data_case_file_names] + mocked_open_file_paths = [ + os.path.join(candidate_data["mount_point"], file_name) for file_name in test_data_case_file_names + ] # Setup all the mocks self.base.return_value = [candidate_data] - def do_open(path, mode='r'): + def do_open(path, mode="r"): file_name = os.path.basename(path) try: - with open(get_case_insensitive_path(test_data_case_path, file_name), 'r') as test_data_file: + with open(get_case_insensitive_path(test_data_case_path, file_name), "r") as test_data_file: test_data_file_data = test_data_file.read() except OSError: raise OSError("(mocked open) No such file or directory: '%s'" % (path)) @@ -85,9 +92,9 @@ def do_open(path, mode='r'): file_object.__iter__.return_value = test_data_file_data.splitlines(True) return file_object - with patch("mbed_os_tools.detect.lstools_base.MbedLsToolsBase.mount_point_ready") as _mpr,\ - patch('mbed_os_tools.detect.lstools_base.open', do_open) as _,\ - patch('mbed_os_tools.detect.lstools_base.listdir') as _listdir: + with patch("mbed_os_tools.detect.lstools_base.MbedLsToolsBase.mount_point_ready") as _mpr, patch( + "mbed_os_tools.detect.lstools_base.open", do_open + ) as _, patch("mbed_os_tools.detect.lstools_base.listdir") as _listdir: _mpr.return_value = True _listdir.return_value = test_data_case_file_names results = self.base.list_mbeds(read_details_txt=True) @@ -104,108 +111,116 @@ def do_open(path, mode='r'): if actual_value != expected_data[key]: differing_map[key] = (actual_value, expected_data[key]) - if differing_map: - differing_string = '' + differing_string = "" for differing_key in sorted(list(differing_map.keys())): actual, expected = differing_map[differing_key] differing_string += ' "%s": "%s" (expected "%s")\n' % (differing_key, actual, expected) - assert_string = 'Expected data mismatch:\n\n{\n%s}' % (differing_string) + assert_string = "Expected data mismatch:\n\n{\n%s}" % (differing_string) self.assertTrue(False, assert_string) - - def test_efm32pg_stk3401_jlink(self): - self.run_test('efm32pg_stk3401_jlink', { - 'target_id_usb_id': u'000440074453', - 'vendor_id': '1366', - 'product_id': '1015' - }, { - 'platform_name': 'EFM32PG_STK3401', - 'device_type': 'jlink', - 'target_id': '2035022D000122D5D475113A', - 'target_id_usb_id': '000440074453', - 'target_id_mbed_htm': '2035022D000122D5D475113A' - }) + self.run_test( + "efm32pg_stk3401_jlink", + {"target_id_usb_id": "000440074453", "vendor_id": "1366", "product_id": "1015"}, + { + "platform_name": "EFM32PG_STK3401", + "device_type": "jlink", + "target_id": "2035022D000122D5D475113A", + "target_id_usb_id": "000440074453", + "target_id_mbed_htm": "2035022D000122D5D475113A", + }, + ) def test_lpc1768(self): - self.run_test('lpc1768', { - 'target_id_usb_id': u'101000000000000000000002F7F20DF3', - 'vendor_id': '0d28', - 'product_id': '0204' - }, { - 'platform_name': 'LPC1768', - 'device_type': 'daplink', - 'target_id': '101000000000000000000002F7F20DF3d51e6be5ac41795761dc44148e3b7000', - 'target_id_usb_id': '101000000000000000000002F7F20DF3', - 'target_id_mbed_htm': '101000000000000000000002F7F20DF3d51e6be5ac41795761dc44148e3b7000' - }) + self.run_test( + "lpc1768", + {"target_id_usb_id": "101000000000000000000002F7F20DF3", "vendor_id": "0d28", "product_id": "0204"}, + { + "platform_name": "LPC1768", + "device_type": "daplink", + "target_id": "101000000000000000000002F7F20DF3d51e6be5ac41795761dc44148e3b7000", + "target_id_usb_id": "101000000000000000000002F7F20DF3", + "target_id_mbed_htm": "101000000000000000000002F7F20DF3d51e6be5ac41795761dc44148e3b7000", + }, + ) def test_nucleo_f411re_stlink(self): - self.run_test('nucleo_f411re_stlink', { - 'target_id_usb_id': u'0671FF554856805087112815', - 'vendor_id': '0483', - 'product_id': '374b' - }, { - 'platform_name': 'NUCLEO_F411RE', - 'device_type': 'stlink', - 'target_id': '07400221076061193824F764', - 'target_id_usb_id': '0671FF554856805087112815', - 'target_id_mbed_htm': '07400221076061193824F764' - }) + self.run_test( + "nucleo_f411re_stlink", + {"target_id_usb_id": "0671FF554856805087112815", "vendor_id": "0483", "product_id": "374b"}, + { + "platform_name": "NUCLEO_F411RE", + "device_type": "stlink", + "target_id": "07400221076061193824F764", + "target_id_usb_id": "0671FF554856805087112815", + "target_id_mbed_htm": "07400221076061193824F764", + }, + ) def test_nrf51_microbit(self): - self.run_test('nrf51_microbit', { - 'target_id_usb_id': u'9900007031324e45000f9019000000340000000097969901', - 'vendor_id': '0d28', - 'product_id': '0204' - }, { - 'platform_name': 'NRF51_MICROBIT', - 'device_type': 'daplink', - 'target_id': '9900007031324e45000f9019000000340000000097969901', - 'target_id_usb_id': '9900007031324e45000f9019000000340000000097969901' - }) + self.run_test( + "nrf51_microbit", + { + "target_id_usb_id": "9900007031324e45000f9019000000340000000097969901", + "vendor_id": "0d28", + "product_id": "0204", + }, + { + "platform_name": "NRF51_MICROBIT", + "device_type": "daplink", + "target_id": "9900007031324e45000f9019000000340000000097969901", + "target_id_usb_id": "9900007031324e45000f9019000000340000000097969901", + }, + ) def test_k64f_daplink(self): - self.run_test('k64f_daplink', { - 'target_id_usb_id': u'0240000032044e45000a700a997b00356781000097969900', - 'vendor_id': '0d28', - 'product_id': '0204' - }, { - 'platform_name': 'K64F', - 'device_type': 'daplink', - 'target_id': '0240000032044e45000a700a997b00356781000097969900', - 'target_id_usb_id': '0240000032044e45000a700a997b00356781000097969900', - 'target_id_mbed_htm': '0240000032044e45000a700a997b00356781000097969900' - }) + self.run_test( + "k64f_daplink", + { + "target_id_usb_id": "0240000032044e45000a700a997b00356781000097969900", + "vendor_id": "0d28", + "product_id": "0204", + }, + { + "platform_name": "K64F", + "device_type": "daplink", + "target_id": "0240000032044e45000a700a997b00356781000097969900", + "target_id_usb_id": "0240000032044e45000a700a997b00356781000097969900", + "target_id_mbed_htm": "0240000032044e45000a700a997b00356781000097969900", + }, + ) def test_nrf52_dk_daplink(self): - self.run_test('nrf52_dk_daplink', { - 'target_id_usb_id': u'110100004420312043574641323032203233303397969903', - 'vendor_id': '0d28', - 'product_id': '0204' - }, { - 'platform_name': 'NRF52_DK', - 'device_type': 'daplink', - 'target_id': '110100004420312043574641323032203233303397969903', - 'target_id_usb_id': '110100004420312043574641323032203233303397969903', - 'target_id_mbed_htm': '110100004420312043574641323032203233303397969903' - }) + self.run_test( + "nrf52_dk_daplink", + { + "target_id_usb_id": "110100004420312043574641323032203233303397969903", + "vendor_id": "0d28", + "product_id": "0204", + }, + { + "platform_name": "NRF52_DK", + "device_type": "daplink", + "target_id": "110100004420312043574641323032203233303397969903", + "target_id_usb_id": "110100004420312043574641323032203233303397969903", + "target_id_mbed_htm": "110100004420312043574641323032203233303397969903", + }, + ) def test_nrf52_dk_jlink(self): - self.run_test('nrf52_dk_jlink', { - 'target_id_usb_id': u'000682546728', - 'vendor_id': '1366', - 'product_id': '1015' - }, { - 'platform_name': 'NRF52_DK', - 'device_type': 'jlink', - 'target_id': '000682546728', - 'target_id_usb_id': '000682546728' - }) - - - -if __name__ == '__main__': + self.run_test( + "nrf52_dk_jlink", + {"target_id_usb_id": "000682546728", "vendor_id": "1366", "product_id": "1015"}, + { + "platform_name": "NRF52_DK", + "device_type": "jlink", + "target_id": "000682546728", + "target_id_usb_id": "000682546728", + }, + ) + + +if __name__ == "__main__": unittest.main() diff --git a/tools/python_tests/mbed_os_tools/test/host_test_black_box.py b/tools/python_tests/mbed_os_tools/test/host_test_black_box.py index a4bdf89feab..a1cff4c9927 100644 --- a/tools/python_tests/mbed_os_tools/test/host_test_black_box.py +++ b/tools/python_tests/mbed_os_tools/test/host_test_black_box.py @@ -16,7 +16,7 @@ import unittest from copy import copy from mbed_os_tools.test import init_host_test_cli_params -from mbed_os_tools.test.host_tests_runner.host_test_default import DefaultTestSelector +from mbed_os_tools.test.host_tests_runner.host_test_default import DefaultTestSelector from .mocks.environment.linux import MockTestEnvironmentLinux from .mocks.environment.darwin import MockTestEnvironmentDarwin @@ -30,8 +30,8 @@ } mock_image_path = "BUILD/tests/K64F/GCC_ARM/TESTS/network/interface/interface.bin" -class BlackBoxHostTestTestCase(unittest.TestCase): +class BlackBoxHostTestTestCase(unittest.TestCase): def _run_host_test(self, environment): with environment as _env: test_selector = DefaultTestSelector(init_host_test_cli_params()) @@ -41,22 +41,17 @@ def _run_host_test(self, environment): self.assertEqual(result, 0) def test_host_test_linux(self): - self._run_host_test( - MockTestEnvironmentLinux(self, mock_platform_info, mock_image_path) - ) + self._run_host_test(MockTestEnvironmentLinux(self, mock_platform_info, mock_image_path)) def test_host_test_darwin(self): - self._run_host_test( - MockTestEnvironmentDarwin(self, mock_platform_info, mock_image_path) - ) + self._run_host_test(MockTestEnvironmentDarwin(self, mock_platform_info, mock_image_path)) def test_host_test_windows(self): win_mock_platform_info = copy(mock_platform_info) win_mock_platform_info["serial_port"] = "COM5" - self._run_host_test( - MockTestEnvironmentWindows(self, win_mock_platform_info, mock_image_path) - ) + self._run_host_test(MockTestEnvironmentWindows(self, win_mock_platform_info, mock_image_path)) + -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tools/python_tests/mbed_os_tools/test/host_test_default.py b/tools/python_tests/mbed_os_tools/test/host_test_default.py index ef9836ec7a3..05d4ba0832a 100644 --- a/tools/python_tests/mbed_os_tools/test/host_test_default.py +++ b/tools/python_tests/mbed_os_tools/test/host_test_default.py @@ -18,13 +18,8 @@ class HostTestDefaultTestCase(unittest.TestCase): - def test_os_info(self): - expected = { - "grm_module" : "module_name", - "grm_host" : "10.2.123.43", - "grm_port" : "3334", - } + expected = {"grm_module": "module_name", "grm_host": "10.2.123.43", "grm_port": "3334"} # Case that includes an IP address but no protocol arg = [expected["grm_module"], expected["grm_host"], expected["grm_port"]] @@ -50,5 +45,6 @@ def test_os_info(self): result = DefaultTestSelector._parse_grm(":".join(arg)) self.assertEqual(result, expected) -if __name__ == '__main__': + +if __name__ == "__main__": unittest.main() diff --git a/tools/python_tests/mbed_os_tools/test/mocks/environment/__init__.py b/tools/python_tests/mbed_os_tools/test/mocks/environment/__init__.py index adb2527befa..b2227af70d0 100644 --- a/tools/python_tests/mbed_os_tools/test/mocks/environment/__init__.py +++ b/tools/python_tests/mbed_os_tools/test/mocks/environment/__init__.py @@ -23,8 +23,8 @@ from ..mbed_device import MockMbedDevice from ..process import MockProcess -class MockTestEnvironment(object): +class MockTestEnvironment(object): def __init__(self, test_case, platform_info, image_path): self._test_case = test_case self._tempdir = tempfile.mkdtemp() @@ -34,51 +34,46 @@ def __init__(self, test_case, platform_info, image_path): # Clean and retarget path to tempdir self._image_path = self._clean_path(image_path) - self._platform_info['mount_point'] = self._clean_path( - self._platform_info['mount_point'] - ) + self._platform_info["mount_point"] = self._clean_path(self._platform_info["mount_point"]) # Need to remove the drive letter in this case - self._platform_info['serial_port'] = os.path.splitdrive( - self._clean_path(self._platform_info['serial_port']) - )[1] + self._platform_info["serial_port"] = os.path.splitdrive(self._clean_path(self._platform_info["serial_port"]))[1] args = ( - 'mbedhtrun -m {} -p {}:9600 -f ' - '"{}" -e "TESTS/host_tests" -d {} -c default ' - '-t {} -r default ' - '-C 4 --sync 5 -P 60' - ).format( - self._platform_info['platform_name'], - self._platform_info['serial_port'], - self._image_path, - self._platform_info['mount_point'], - self._platform_info['target_id'] - ).split() - self.patch('sys.argv', new=args) + ( + "mbedhtrun -m {} -p {}:9600 -f " + '"{}" -e "TESTS/host_tests" -d {} -c default ' + "-t {} -r default " + "-C 4 --sync 5 -P 60" + ) + .format( + self._platform_info["platform_name"], + self._platform_info["serial_port"], + self._image_path, + self._platform_info["mount_point"], + self._platform_info["target_id"], + ) + .split() + ) + self.patch("sys.argv", new=args) # Mock detect detect_mock = MagicMock() - detect_mock.return_value.list_mbeds.return_value = [ - self._platform_info - ] - self.patch('mbed_os_tools.detect.create', new=detect_mock) + detect_mock.return_value.list_mbeds.return_value = [self._platform_info] + self.patch("mbed_os_tools.detect.create", new=detect_mock) # Mock process calls and move them to threads to preserve mocks self.patch( - 'mbed_os_tools.test.host_tests_runner.host_test_default.Process', - new=MagicMock(side_effect=self._process_side_effect) - ) - self.patch( - 'mbed_os_tools.test.host_tests_plugins.host_test_plugins.call', - new=MagicMock(return_value=0) + "mbed_os_tools.test.host_tests_runner.host_test_default.Process", + new=MagicMock(side_effect=self._process_side_effect), ) + self.patch("mbed_os_tools.test.host_tests_plugins.host_test_plugins.call", new=MagicMock(return_value=0)) mock_serial = MockSerial() mock_device = MockMbedDevice(mock_serial) self.patch( - 'mbed_os_tools.test.host_tests_conn_proxy.conn_primitive_serial.Serial', - new=MagicMock(return_value=mock_serial) + "mbed_os_tools.test.host_tests_conn_proxy.conn_primitive_serial.Serial", + new=MagicMock(return_value=mock_serial), ) def _clean_path(self, path): @@ -95,10 +90,10 @@ def patch(self, path, **kwargs): def __enter__(self): os.makedirs(os.path.dirname(self._image_path)) - with open(self._image_path, 'w') as _: + with open(self._image_path, "w") as _: pass - os.makedirs(self._platform_info['mount_point']) + os.makedirs(self._platform_info["mount_point"]) for path, patcher in self._patch_definitions: self.patches[path] = patcher.start() diff --git a/tools/python_tests/mbed_os_tools/test/mocks/environment/darwin.py b/tools/python_tests/mbed_os_tools/test/mocks/environment/darwin.py index e0b0346a7c1..2098f44150a 100644 --- a/tools/python_tests/mbed_os_tools/test/mocks/environment/darwin.py +++ b/tools/python_tests/mbed_os_tools/test/mocks/environment/darwin.py @@ -18,16 +18,12 @@ from .posix import MockTestEnvironmentPosix -class MockTestEnvironmentDarwin(MockTestEnvironmentPosix): +class MockTestEnvironmentDarwin(MockTestEnvironmentPosix): def __init__(self, test_case, platform_info, image_path): super().__init__(test_case, platform_info, image_path) - self.patch( - 'os.uname', - new=MagicMock(return_value=('Darwin',)), - create=True - ) + self.patch("os.uname", new=MagicMock(return_value=("Darwin",)), create=True) def __exit__(self, type, value, traceback): super().__exit__(type, value, traceback) @@ -36,9 +32,7 @@ def __exit__(self, type, value, traceback): return False # Assert for proper image copy - mocked_call = self.patches[ - 'mbed_os_tools.test.host_tests_plugins.host_test_plugins.call' - ] + mocked_call = self.patches["mbed_os_tools.test.host_tests_plugins.host_test_plugins.call"] second_call_args = mocked_call.call_args_list[1][0][0] self._test_case.assertEqual(second_call_args, ["sync"]) diff --git a/tools/python_tests/mbed_os_tools/test/mocks/environment/linux.py b/tools/python_tests/mbed_os_tools/test/mocks/environment/linux.py index 1c91f8bbd07..98fe828e808 100644 --- a/tools/python_tests/mbed_os_tools/test/mocks/environment/linux.py +++ b/tools/python_tests/mbed_os_tools/test/mocks/environment/linux.py @@ -19,16 +19,12 @@ from .posix import MockTestEnvironmentPosix -class MockTestEnvironmentLinux(MockTestEnvironmentPosix): +class MockTestEnvironmentLinux(MockTestEnvironmentPosix): def __init__(self, test_case, platform_info, image_path): super().__init__(test_case, platform_info, image_path) - self.patch( - 'os.uname', - new=MagicMock(return_value=('Linux',)), - create=True - ) + self.patch("os.uname", new=MagicMock(return_value=("Linux",)), create=True) def __exit__(self, type, value, traceback): super().__exit__(type, value, traceback) @@ -37,22 +33,14 @@ def __exit__(self, type, value, traceback): return False # Assert for proper image copy - mocked_call = self.patches[ - 'mbed_os_tools.test.host_tests_plugins.host_test_plugins.call' - ] + mocked_call = self.patches["mbed_os_tools.test.host_tests_plugins.host_test_plugins.call"] second_call_args = mocked_call.call_args_list[1][0][0] destination_path = os.path.normpath( - os.path.join( - self._platform_info["mount_point"], - os.path.basename(self._image_path) - ) + os.path.join(self._platform_info["mount_point"], os.path.basename(self._image_path)) ) - self._test_case.assertEqual( - second_call_args, - ["sync", "-f", destination_path] - ) + self._test_case.assertEqual(second_call_args, ["sync", "-f", destination_path]) # Ensure only two subprocesses were started self._test_case.assertEqual(len(mocked_call.call_args_list), 2) diff --git a/tools/python_tests/mbed_os_tools/test/mocks/environment/posix.py b/tools/python_tests/mbed_os_tools/test/mocks/environment/posix.py index 6806b5764c6..8d240c26c86 100644 --- a/tools/python_tests/mbed_os_tools/test/mocks/environment/posix.py +++ b/tools/python_tests/mbed_os_tools/test/mocks/environment/posix.py @@ -18,12 +18,12 @@ from . import MockTestEnvironment -class MockTestEnvironmentPosix(MockTestEnvironment): +class MockTestEnvironmentPosix(MockTestEnvironment): def __init__(self, test_case, platform_info, image_path): super().__init__(test_case, platform_info, image_path) - self.patch('os.name', new='posix') + self.patch("os.name", new="posix") def __exit__(self, type, value, traceback): super().__exit__(type, value, traceback) @@ -32,9 +32,7 @@ def __exit__(self, type, value, traceback): return False # Assert for proper image copy - mocked_call = self.patches[ - 'mbed_os_tools.test.host_tests_plugins.host_test_plugins.call' - ] + mocked_call = self.patches["mbed_os_tools.test.host_tests_plugins.host_test_plugins.call"] first_call_args = mocked_call.call_args_list[0][0][0] self._test_case.assertEqual(first_call_args[0], "cp") diff --git a/tools/python_tests/mbed_os_tools/test/mocks/environment/windows.py b/tools/python_tests/mbed_os_tools/test/mocks/environment/windows.py index 590dc4f425f..5d8570dde92 100644 --- a/tools/python_tests/mbed_os_tools/test/mocks/environment/windows.py +++ b/tools/python_tests/mbed_os_tools/test/mocks/environment/windows.py @@ -18,12 +18,12 @@ from . import MockTestEnvironment -class MockTestEnvironmentWindows(MockTestEnvironment): +class MockTestEnvironmentWindows(MockTestEnvironment): def __init__(self, test_case, platform_info, image_path): super().__init__(test_case, platform_info, image_path) - self.patch('os.name', new='nt') + self.patch("os.name", new="nt") def __exit__(self, type, value, traceback): super().__exit__(type, value, traceback) @@ -32,9 +32,7 @@ def __exit__(self, type, value, traceback): return False # Assert for proper image copy - mocked_call = self.patches[ - 'mbed_os_tools.test.host_tests_plugins.host_test_plugins.call' - ] + mocked_call = self.patches["mbed_os_tools.test.host_tests_plugins.host_test_plugins.call"] first_call_args = mocked_call.call_args_list[0][0][0] self._test_case.assertEqual(first_call_args[0], "copy") diff --git a/tools/python_tests/mbed_os_tools/test/mocks/mbed_device.py b/tools/python_tests/mbed_os_tools/test/mocks/mbed_device.py index d1dde7119b8..72b76d39669 100644 --- a/tools/python_tests/mbed_os_tools/test/mocks/mbed_device.py +++ b/tools/python_tests/mbed_os_tools/test/mocks/mbed_device.py @@ -15,8 +15,8 @@ import re + class MockMbedDevice(object): - KV_REGEX = re.compile("\{\{([\w\d_-]+);([^\}]+)\}\}") def __init__(self, serial): @@ -46,8 +46,5 @@ def on_write(self, data): def on_sync(self): self._serial.downstream_write( - "{{__timeout;15}}\r\n" - "{{__host_test_name;default_auto}}\r\n" - "{{end;success}}\n" - "{{__exit;0}}\r\n" + "{{__timeout;15}}\r\n{{__host_test_name;default_auto}}\r\n{{end;success}}\n{{__exit;0}}\r\n" ) diff --git a/tools/python_tests/mbed_os_tools/test/mocks/process.py b/tools/python_tests/mbed_os_tools/test/mocks/process.py index a7b7dc937d7..ff79d6b7561 100644 --- a/tools/python_tests/mbed_os_tools/test/mocks/process.py +++ b/tools/python_tests/mbed_os_tools/test/mocks/process.py @@ -17,6 +17,7 @@ from builtins import super from threading import Thread + class MockProcess(Thread): def __init__(self, target=None, args=None): super().__init__(target=target, args=args) diff --git a/tools/python_tests/mbed_os_tools/test/mocks/serial.py b/tools/python_tests/mbed_os_tools/test/mocks/serial.py index 777a552b7d2..af397f9116d 100644 --- a/tools/python_tests/mbed_os_tools/test/mocks/serial.py +++ b/tools/python_tests/mbed_os_tools/test/mocks/serial.py @@ -15,6 +15,7 @@ from builtins import super + class MockSerial(object): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) @@ -27,7 +28,7 @@ def __init__(self, *args, **kwargs): self._upstream_write_cb = None def read(self, count): - contents = self._rx_buffer[self._rx_counter:count] + contents = self._rx_buffer[self._rx_counter : count] self._rx_counter += len(contents) return contents diff --git a/tools/python_tests/mbed_os_tools/test/test_conn_primitive_serial.py b/tools/python_tests/mbed_os_tools/test/test_conn_primitive_serial.py index 30d57ae2bc9..6c99d8c304a 100644 --- a/tools/python_tests/mbed_os_tools/test/test_conn_primitive_serial.py +++ b/tools/python_tests/mbed_os_tools/test/test_conn_primitive_serial.py @@ -19,6 +19,7 @@ from mbed_os_tools.test.host_tests_conn_proxy.conn_primitive_serial import SerialConnectorPrimitive from mbed_os_tools.test.host_tests_conn_proxy.conn_primitive import ConnectorPrimitiveException + @mock.patch("mbed_os_tools.test.host_tests_conn_proxy.conn_primitive_serial.Serial") @mock.patch("mbed_os_tools.test.host_tests_plugins.host_test_plugins.detect") class ConnPrimitiveSerialTestCase(unittest.TestCase): @@ -33,9 +34,7 @@ def test_provided_serial_port_used_with_target_id(self, mock_detect, mock_serial # check_serial_port_ready() function we are testing will sleep waiting # for the serial port to become ready. mock_detect.create().list_mbeds.return_value = [ - {"target_id": target_id, - "serial_port": port, - "platform_name": platform_name}, + {"target_id": target_id, "serial_port": port, "platform_name": platform_name} ] # Set skip_reset to avoid the use of a physical serial port. @@ -58,9 +57,7 @@ def test_discovers_serial_port_with_target_id(self, mock_detect, mock_serial): baudrate = "9600" mock_detect.create().list_mbeds.return_value = [ - {"target_id": target_id, - "serial_port": port, - "platform_name": platform_name}, + {"target_id": target_id, "serial_port": port, "platform_name": platform_name} ] # Set skip_reset to avoid the use of a physical serial port. Don't pass @@ -82,5 +79,6 @@ def test_discovers_serial_port_with_target_id(self, mock_detect, mock_serial): mock_detect.create().list_mbeds.assert_called_once() -if __name__ == '__main__': - unittest.main() + +if __name__ == "__main__": + unittest.main() diff --git a/tools/python_tests/mbed_os_tools/test/test_mbed_base.py b/tools/python_tests/mbed_os_tools/test/test_mbed_base.py index 56daa4b26b6..5e59a737298 100644 --- a/tools/python_tests/mbed_os_tools/test/test_mbed_base.py +++ b/tools/python_tests/mbed_os_tools/test/test_mbed_base.py @@ -20,6 +20,7 @@ from mbed_os_tools.test.host_tests_runner.mbed_base import Mbed + class TemporaryDirectory(object): def __init__(self): self.fname = "tempdir" @@ -31,12 +32,11 @@ def __enter__(self): def __exit__(self, *args, **kwargs): shutil.rmtree(self.fname) + @mock.patch("mbed_os_tools.test.host_tests_runner.mbed_base.ht_plugins") @mock.patch("mbed_os_tools.test.host_tests_runner.mbed_base.detect") class TestMbed(unittest.TestCase): - def test_skips_discover_mbed_if_non_mbed_copy_method_used( - self, mock_detect, mock_ht_plugins - ): + def test_skips_discover_mbed_if_non_mbed_copy_method_used(self, mock_detect, mock_ht_plugins): with TemporaryDirectory() as tmpdir: image_path = os.path.join(tmpdir, "test.elf") with open(image_path, "w") as f: @@ -71,9 +71,7 @@ def test_skips_discover_mbed_if_non_mbed_copy_method_used( format=options.format, ) - def test_discovers_mbed_if_mbed_copy_method_used( - self, mock_detect, mock_ht_plugins - ): + def test_discovers_mbed_if_mbed_copy_method_used(self, mock_detect, mock_ht_plugins): with TemporaryDirectory() as tmpdir: image_path = os.path.join(tmpdir, "test.elf") with open(image_path, "w") as f: diff --git a/tools/python_tests/mbed_tools/build/__init__.py b/tools/python_tests/mbed_tools/build/__init__.py index 9c276726b5f..1879be349a3 100644 --- a/tools/python_tests/mbed_tools/build/__init__.py +++ b/tools/python_tests/mbed_tools/build/__init__.py @@ -1,4 +1,4 @@ # # Copyright (c) 2024 Jamie Smith. All rights reserved. # SPDX-License-Identifier: Apache-2.0 -# \ No newline at end of file +# diff --git a/tools/python_tests/mbed_tools/build/_internal/__init__.py b/tools/python_tests/mbed_tools/build/_internal/__init__.py index 9c276726b5f..1879be349a3 100644 --- a/tools/python_tests/mbed_tools/build/_internal/__init__.py +++ b/tools/python_tests/mbed_tools/build/_internal/__init__.py @@ -1,4 +1,4 @@ # # Copyright (c) 2024 Jamie Smith. All rights reserved. # SPDX-License-Identifier: Apache-2.0 -# \ No newline at end of file +# diff --git a/tools/python_tests/mbed_tools/build/_internal/test_memory_banks.py b/tools/python_tests/mbed_tools/build/_internal/test_memory_banks.py index 01bc2f352b9..1d33afabbc6 100644 --- a/tools/python_tests/mbed_tools/build/_internal/test_memory_banks.py +++ b/tools/python_tests/mbed_tools/build/_internal/test_memory_banks.py @@ -25,12 +25,12 @@ def test_simple_memory_layout(self): "peripheral": False, "read": True, "secure": False, - "write": True + "write": True, }, "default": True, "size": 0x100000, "start": 0x08000000, - "startup": False + "startup": False, }, "IROM1": { "access": { @@ -40,13 +40,13 @@ def test_simple_memory_layout(self): "peripheral": False, "read": True, "secure": False, - "write": False + "write": False, }, "default": True, "size": 0x200000, "start": 0x10000000, - "startup": True - } + "startup": True, + }, } } @@ -54,29 +54,26 @@ def test_simple_memory_layout(self): assert config["memory_bank_macros"] == { # New style definitions (ROM) - 'MBED_ROM_BANK_IROM1_START=0x10000000', - 'MBED_ROM_BANK_IROM1_SIZE=0x200000', - 'MBED_CONFIGURED_ROM_BANK_IROM1_START=0x10000000', - 'MBED_CONFIGURED_ROM_BANK_IROM1_SIZE=0x200000', - + "MBED_ROM_BANK_IROM1_START=0x10000000", + "MBED_ROM_BANK_IROM1_SIZE=0x200000", + "MBED_CONFIGURED_ROM_BANK_IROM1_START=0x10000000", + "MBED_CONFIGURED_ROM_BANK_IROM1_SIZE=0x200000", # Old style definitions (ROM) - 'MBED_ROM_START=0x10000000', - 'MBED_ROM_SIZE=0x200000', - 'MBED_CONFIGURED_ROM_START=0x10000000', - 'MBED_CONFIGURED_ROM_SIZE=0x200000', - + "MBED_ROM_START=0x10000000", + "MBED_ROM_SIZE=0x200000", + "MBED_CONFIGURED_ROM_START=0x10000000", + "MBED_CONFIGURED_ROM_SIZE=0x200000", # New style definitions (RAM) - 'MBED_RAM_BANK_IRAM1_START=0x8000000', - 'MBED_RAM_BANK_IRAM1_SIZE=0x100000', - 'MBED_CONFIGURED_RAM_BANK_IRAM1_START=0x8000000', - 'MBED_CONFIGURED_RAM_BANK_IRAM1_SIZE=0x100000', - + "MBED_RAM_BANK_IRAM1_START=0x8000000", + "MBED_RAM_BANK_IRAM1_SIZE=0x100000", + "MBED_CONFIGURED_RAM_BANK_IRAM1_START=0x8000000", + "MBED_CONFIGURED_RAM_BANK_IRAM1_SIZE=0x100000", # Old style definitions (RAM) - 'MBED_RAM_START=0x8000000', - 'MBED_RAM_SIZE=0x100000', - 'MBED_CONFIGURED_RAM_START=0x8000000', - 'MBED_CONFIGURED_RAM_SIZE=0x100000', - } + "MBED_RAM_START=0x8000000", + "MBED_RAM_SIZE=0x100000", + "MBED_CONFIGURED_RAM_START=0x8000000", + "MBED_CONFIGURED_RAM_SIZE=0x100000", + } def test_memory_configuration(self): """ @@ -96,12 +93,12 @@ def test_memory_configuration(self): "peripheral": False, "read": True, "secure": False, - "write": True + "write": True, }, "default": True, "size": 0x100000, "start": 0x08000000, - "startup": False + "startup": False, }, "IROM1": { "access": { @@ -111,54 +108,51 @@ def test_memory_configuration(self): "peripheral": False, "read": True, "secure": False, - "write": False + "write": False, }, "default": True, "size": 0x200000, "start": 0x10000000, - "startup": True - } + "startup": True, + }, }, "memory_bank_config": { "IRAM1": { # Configure size only - "size": 0xa0000, + "size": 0xA0000 }, "IROM1": { # Configure size and address - "size": 0x1f0000, - "start": 0x10010000 - } - } + "size": 0x1F0000, + "start": 0x10010000, + }, + }, } process_memory_banks(config) assert config["memory_bank_macros"] == { # New style definitions (ROM) - 'MBED_ROM_BANK_IROM1_START=0x10000000', - 'MBED_ROM_BANK_IROM1_SIZE=0x200000', - 'MBED_CONFIGURED_ROM_BANK_IROM1_START=0x10010000', - 'MBED_CONFIGURED_ROM_BANK_IROM1_SIZE=0x1f0000', - + "MBED_ROM_BANK_IROM1_START=0x10000000", + "MBED_ROM_BANK_IROM1_SIZE=0x200000", + "MBED_CONFIGURED_ROM_BANK_IROM1_START=0x10010000", + "MBED_CONFIGURED_ROM_BANK_IROM1_SIZE=0x1f0000", # Old style definitions (ROM) - 'MBED_ROM_START=0x10000000', - 'MBED_ROM_SIZE=0x200000', - 'MBED_CONFIGURED_ROM_START=0x10010000', - 'MBED_CONFIGURED_ROM_SIZE=0x1f0000', - + "MBED_ROM_START=0x10000000", + "MBED_ROM_SIZE=0x200000", + "MBED_CONFIGURED_ROM_START=0x10010000", + "MBED_CONFIGURED_ROM_SIZE=0x1f0000", # New style definitions (RAM) - 'MBED_RAM_BANK_IRAM1_START=0x8000000', - 'MBED_RAM_BANK_IRAM1_SIZE=0x100000', - 'MBED_CONFIGURED_RAM_BANK_IRAM1_START=0x8000000', - 'MBED_CONFIGURED_RAM_BANK_IRAM1_SIZE=0xa0000', - + "MBED_RAM_BANK_IRAM1_START=0x8000000", + "MBED_RAM_BANK_IRAM1_SIZE=0x100000", + "MBED_CONFIGURED_RAM_BANK_IRAM1_START=0x8000000", + "MBED_CONFIGURED_RAM_BANK_IRAM1_SIZE=0xa0000", # Old style definitions (RAM) - 'MBED_RAM_START=0x8000000', - 'MBED_RAM_SIZE=0x100000', - 'MBED_CONFIGURED_RAM_START=0x8000000', - 'MBED_CONFIGURED_RAM_SIZE=0xa0000', - } + "MBED_RAM_START=0x8000000", + "MBED_RAM_SIZE=0x100000", + "MBED_CONFIGURED_RAM_START=0x8000000", + "MBED_CONFIGURED_RAM_SIZE=0xa0000", + } def test_two_ram_banks(self): """ @@ -177,12 +171,12 @@ def test_two_ram_banks(self): "peripheral": False, "read": True, "secure": False, - "write": True + "write": True, }, "default": True, "size": 0x100000, "start": 0x08000000, - "startup": False + "startup": False, }, "IRAM2": { "access": { @@ -192,12 +186,12 @@ def test_two_ram_banks(self): "peripheral": False, "read": True, "secure": False, - "write": True + "write": True, }, "default": False, "size": 0x400000, "start": 0x08100000, - "startup": False + "startup": False, }, "IROM1": { "access": { @@ -207,13 +201,13 @@ def test_two_ram_banks(self): "peripheral": False, "read": True, "secure": False, - "write": False + "write": False, }, "default": True, "size": 0x200000, "start": 0x10000000, - "startup": True - } + "startup": True, + }, } } @@ -223,38 +217,33 @@ def test_two_ram_banks(self): # in the dictionary assert config["memory_bank_macros"] == { # New style definitions (ROM) - 'MBED_ROM_BANK_IROM1_START=0x10000000', - 'MBED_ROM_BANK_IROM1_SIZE=0x200000', - 'MBED_CONFIGURED_ROM_BANK_IROM1_START=0x10000000', - 'MBED_CONFIGURED_ROM_BANK_IROM1_SIZE=0x200000', - + "MBED_ROM_BANK_IROM1_START=0x10000000", + "MBED_ROM_BANK_IROM1_SIZE=0x200000", + "MBED_CONFIGURED_ROM_BANK_IROM1_START=0x10000000", + "MBED_CONFIGURED_ROM_BANK_IROM1_SIZE=0x200000", # Old style definitions (ROM) - 'MBED_ROM_START=0x10000000', - 'MBED_ROM_SIZE=0x200000', - 'MBED_CONFIGURED_ROM_START=0x10000000', - 'MBED_CONFIGURED_ROM_SIZE=0x200000', - + "MBED_ROM_START=0x10000000", + "MBED_ROM_SIZE=0x200000", + "MBED_CONFIGURED_ROM_START=0x10000000", + "MBED_CONFIGURED_ROM_SIZE=0x200000", # New style definitions (RAM) - 'MBED_RAM_BANK_IRAM1_START=0x8000000', - 'MBED_RAM_BANK_IRAM1_SIZE=0x100000', - 'MBED_CONFIGURED_RAM_BANK_IRAM1_START=0x8000000', - 'MBED_CONFIGURED_RAM_BANK_IRAM1_SIZE=0x100000', - + "MBED_RAM_BANK_IRAM1_START=0x8000000", + "MBED_RAM_BANK_IRAM1_SIZE=0x100000", + "MBED_CONFIGURED_RAM_BANK_IRAM1_START=0x8000000", + "MBED_CONFIGURED_RAM_BANK_IRAM1_SIZE=0x100000", # Old style definitions (RAM) - 'MBED_RAM_START=0x8000000', - 'MBED_RAM_SIZE=0x100000', - 'MBED_CONFIGURED_RAM_START=0x8000000', - 'MBED_CONFIGURED_RAM_SIZE=0x100000', - + "MBED_RAM_START=0x8000000", + "MBED_RAM_SIZE=0x100000", + "MBED_CONFIGURED_RAM_START=0x8000000", + "MBED_CONFIGURED_RAM_SIZE=0x100000", # New style definitions (RAM 2) - 'MBED_RAM_BANK_IRAM2_START=0x8100000', - 'MBED_RAM_BANK_IRAM2_SIZE=0x400000', - 'MBED_CONFIGURED_RAM_BANK_IRAM2_START=0x8100000', - 'MBED_CONFIGURED_RAM_BANK_IRAM2_SIZE=0x400000', - + "MBED_RAM_BANK_IRAM2_START=0x8100000", + "MBED_RAM_BANK_IRAM2_SIZE=0x400000", + "MBED_CONFIGURED_RAM_BANK_IRAM2_START=0x8100000", + "MBED_CONFIGURED_RAM_BANK_IRAM2_SIZE=0x400000", # Old style definitions (RAM 2) - 'MBED_RAM1_START=0x8100000', - 'MBED_RAM1_SIZE=0x400000', - 'MBED_CONFIGURED_RAM1_START=0x8100000', - 'MBED_CONFIGURED_RAM1_SIZE=0x400000', - } + "MBED_RAM1_START=0x8100000", + "MBED_RAM1_SIZE=0x400000", + "MBED_CONFIGURED_RAM1_START=0x8100000", + "MBED_CONFIGURED_RAM1_SIZE=0x400000", + } diff --git a/tools/python_tests/mbed_tools/cli/test_configure.py b/tools/python_tests/mbed_tools/cli/test_configure.py index 43ade6acb18..960761e043f 100644 --- a/tools/python_tests/mbed_tools/cli/test_configure.py +++ b/tools/python_tests/mbed_tools/cli/test_configure.py @@ -22,7 +22,9 @@ def test_generate_config_called_with_correct_arguments(self, program, generate_c @mock.patch("mbed_tools.cli.configure.generate_config") @mock.patch("mbed_tools.cli.configure.MbedProgram") def test_generate_config_called_with_mbed_os_path(self, program, generate_config): - CliRunner().invoke(configure, ["-m", "k64f", "-t", "gcc_arm", "--mbed-os-path", "./extern/mbed-os", "-o", "some_output_dir"]) + CliRunner().invoke( + configure, ["-m", "k64f", "-t", "gcc_arm", "--mbed-os-path", "./extern/mbed-os", "-o", "some_output_dir"] + ) generate_config.assert_called_once_with("K64F", "GCC_ARM", program.from_existing()) @@ -32,7 +34,8 @@ def test_custom_targets_location_used_when_passed(self, program, generate_config program = program.from_existing() custom_targets_json_path = pathlib.Path("custom", "custom_targets.json") CliRunner().invoke( - configure, ["-t", "gcc_arm", "-m", "k64f", "--custom-targets-json", custom_targets_json_path, "-o", "some_output_dir"] + configure, + ["-t", "gcc_arm", "-m", "k64f", "--custom-targets-json", custom_targets_json_path, "-o", "some_output_dir"], ) generate_config.assert_called_once_with("K64F", "GCC_ARM", program) @@ -59,12 +62,7 @@ def test_output_dir_passed(self, program, generate_config): toolchain = "gcc_arm" target = "k64f" - CliRunner().invoke( - configure, ["-t", toolchain, "-m", target, "-o", "some_output_dir"] - ) + CliRunner().invoke(configure, ["-t", toolchain, "-m", target, "-o", "some_output_dir"]) - program.from_existing.assert_called_once_with( - pathlib.Path("."), - pathlib.Path("some_output_dir") - ) + program.from_existing.assert_called_once_with(pathlib.Path("."), pathlib.Path("some_output_dir")) generate_config.assert_called_once_with("K64F", "GCC_ARM", test_program) diff --git a/tools/python_tests/mbed_tools/cli/test_list_connected_devices.py b/tools/python_tests/mbed_tools/cli/test_list_connected_devices.py index 38402ed9bd1..92a30b27fed 100644 --- a/tools/python_tests/mbed_tools/cli/test_list_connected_devices.py +++ b/tools/python_tests/mbed_tools/cli/test_list_connected_devices.py @@ -246,8 +246,8 @@ def test_build_targets_included(self, get_connected_devices): f"{board.board_type}_{board.build_variant[0]}", f"{board.board_type}_{board.build_variant[1]}", f"{board.board_type}", - ], - }, + ] + } ] assert result.exit_code == 0 @@ -268,14 +268,14 @@ def test_identifiers_appended_when_identical_boards_found(self, get_connected_de f"{board.board_type}_{board.build_variant[0]}[0]", f"{board.board_type}_{board.build_variant[1]}[0]", f"{board.board_type}[0]", - ], + ] }, { "build_targets": [ f"{board.board_type}_{board.build_variant[0]}[1]", f"{board.board_type}_{board.build_variant[1]}[1]", f"{board.board_type}[1]", - ], + ] }, ] diff --git a/tools/python_tests/mbed_tools/devices/_internal/darwin/test_device_detector.py b/tools/python_tests/mbed_tools/devices/_internal/darwin/test_device_detector.py index 1bf3bf2e968..d6f60173f81 100644 --- a/tools/python_tests/mbed_tools/devices/_internal/darwin/test_device_detector.py +++ b/tools/python_tests/mbed_tools/devices/_internal/darwin/test_device_detector.py @@ -50,9 +50,7 @@ def test_builds_candidate_using_assembled_data(self, _assemble_candidate_data): } _assemble_candidate_data.return_value = device_data - self.assertEqual( - _build_candidate(device_data), CandidateDevice(**device_data), - ) + self.assertEqual(_build_candidate(device_data), CandidateDevice(**device_data)) @mock.patch("mbed_tools.devices._internal.darwin.device_detector.CandidateDevice") def test_raises_if_candidate_cannot_be_built(self, CandidateDevice, _assemble_candidate_data): @@ -65,11 +63,7 @@ def test_raises_if_candidate_cannot_be_built(self, CandidateDevice, _assemble_ca @mock.patch("mbed_tools.devices._internal.darwin.device_detector._get_mount_points") class TestAssembleCandidateDeviceData(TestCase): def test_glues_device_data_from_various_sources(self, _get_mount_points, _get_serial_port): - device_data = { - "vendor_id": "0xff", - "product_id": "0x24", - "serial_num": "123456", - } + device_data = {"vendor_id": "0xff", "product_id": "0x24", "serial_num": "123456"} _get_serial_port.return_value = "port-1" _get_mount_points.return_value = ["/Volumes/A"] @@ -106,14 +100,11 @@ class TestGetSerialPort(TestCase): @mock.patch("mbed_tools.devices._internal.darwin.device_detector.ioreg", spec_set=ioreg) def test_returns_retrieved_io_dialin_device(self, ioreg): """Given enough data, it constructs an ioreg device name and fetches serial port information.""" - device_data = { - "location_id": "0x12345 / 2", - "_name": "SomeDevice", - } + device_data = {"location_id": "0x12345 / 2", "_name": "SomeDevice"} serial_port = "/dev/tty.usb1234" ioreg.get_io_dialin_device.return_value = serial_port ioreg_device_name = _build_ioreg_device_name( - device_name=device_data["_name"], location_id=device_data["location_id"], + device_name=device_data["_name"], location_id=device_data["location_id"] ) self.assertEqual(_get_serial_port(device_data), serial_port) @@ -127,6 +118,6 @@ def test_returns_none_when_cant_determine_ioreg_name(self, ioreg): class TestBuildIoregDeviceName(TestCase): def test_builds_ioreg_device_name_from_system_profiler_data(self): self.assertEqual( - _build_ioreg_device_name(device_name="VeryNiceDevice Really", location_id="0x14420000 / 2",), + _build_ioreg_device_name(device_name="VeryNiceDevice Really", location_id="0x14420000 / 2"), "VeryNiceDevice Really@14420000", ) diff --git a/tools/python_tests/mbed_tools/devices/_internal/darwin/test_system_profiler.py b/tools/python_tests/mbed_tools/devices/_internal/darwin/test_system_profiler.py index e1887b199b0..e42592875a9 100644 --- a/tools/python_tests/mbed_tools/devices/_internal/darwin/test_system_profiler.py +++ b/tools/python_tests/mbed_tools/devices/_internal/darwin/test_system_profiler.py @@ -5,10 +5,7 @@ import plistlib from unittest import TestCase, mock -from mbed_tools.devices._internal.darwin.system_profiler import ( - get_all_usb_devices_data, - get_end_usb_devices_data, -) +from mbed_tools.devices._internal.darwin.system_profiler import get_all_usb_devices_data, get_end_usb_devices_data class TestGetAllUSBDevicesData(TestCase): diff --git a/tools/python_tests/mbed_tools/devices/_internal/linux/test_linux_device_detector.py b/tools/python_tests/mbed_tools/devices/_internal/linux/test_linux_device_detector.py index e36fdddc48a..89a8b2e90f3 100644 --- a/tools/python_tests/mbed_tools/devices/_internal/linux/test_linux_device_detector.py +++ b/tools/python_tests/mbed_tools/devices/_internal/linux/test_linux_device_detector.py @@ -61,7 +61,7 @@ def test_handles_filesystem_mountpoint_error_and_skips_device(self, mock_find_fs mock_find_fs_mounts.return_value = [] devs = [ mock_device_factory( - ID_SERIAL_SHORT="2090290209", ID_VENDOR_ID="0x45", ID_MODEL_ID="0x48", DEVNAME="/dev/sdabcde", + ID_SERIAL_SHORT="2090290209", ID_VENDOR_ID="0x45", ID_MODEL_ID="0x48", DEVNAME="/dev/sdabcde" ) ] mock_udev_context().list_devices.return_value = devs diff --git a/tools/python_tests/mbed_tools/devices/_internal/test_detect_candidate_devices.py b/tools/python_tests/mbed_tools/devices/_internal/test_detect_candidate_devices.py index a8af0e7602a..54439939b36 100644 --- a/tools/python_tests/mbed_tools/devices/_internal/test_detect_candidate_devices.py +++ b/tools/python_tests/mbed_tools/devices/_internal/test_detect_candidate_devices.py @@ -8,10 +8,7 @@ from python_tests.mbed_tools.devices.markers import windows_only, darwin_only, linux_only from mbed_tools.devices._internal.base_detector import DeviceDetector from mbed_tools.devices.exceptions import UnknownOSError -from mbed_tools.devices._internal.detect_candidate_devices import ( - detect_candidate_devices, - _get_detector_for_current_os, -) +from mbed_tools.devices._internal.detect_candidate_devices import detect_candidate_devices, _get_detector_for_current_os class TestDetectCandidateDevices: diff --git a/tools/python_tests/mbed_tools/devices/_internal/windows/test_disk_identifier.py b/tools/python_tests/mbed_tools/devices/_internal/windows/test_disk_identifier.py index c03143d00c3..b7eb4c4652c 100644 --- a/tools/python_tests/mbed_tools/devices/_internal/windows/test_disk_identifier.py +++ b/tools/python_tests/mbed_tools/devices/_internal/windows/test_disk_identifier.py @@ -67,9 +67,7 @@ def test_uid_linking_between_usb_and_disk(self): from mbed_tools.devices._internal.windows.usb_device_identifier import UsbIdentifier, WindowsUID disk_uid = WindowsUID( - uid="000440112138", - raw_uid="9&DBDECF6&0&000440112138&0", - serial_number=" 134657890", + uid="000440112138", raw_uid="9&DBDECF6&0&000440112138&0", serial_number=" 134657890" ) usb_uid = UsbIdentifier( diff --git a/tools/python_tests/mbed_tools/devices/_internal/windows/test_windows_component.py b/tools/python_tests/mbed_tools/devices/_internal/windows/test_windows_component.py index cd5281492bb..f8ff4faa4bc 100644 --- a/tools/python_tests/mbed_tools/devices/_internal/windows/test_windows_component.py +++ b/tools/python_tests/mbed_tools/devices/_internal/windows/test_windows_component.py @@ -48,7 +48,6 @@ def test_parameters(self): self.assertListEqual([name for name in ComponentDefinition._fields], get_test_class()().field_names) def test_set_values(self): - a_component = get_test_class()() self.assertTrue(is_undefined_data_object(a_component.to_tuple())) valid_values = {k: generate_valid_values() for k in a_component.field_names} diff --git a/tools/python_tests/mbed_tools/devices/_internal/windows/test_windows_identifier.py b/tools/python_tests/mbed_tools/devices/_internal/windows/test_windows_identifier.py index ecf174955c7..bae27cd00c4 100644 --- a/tools/python_tests/mbed_tools/devices/_internal/windows/test_windows_identifier.py +++ b/tools/python_tests/mbed_tools/devices/_internal/windows/test_windows_identifier.py @@ -11,7 +11,7 @@ def generateUID() -> WindowsUID: return WindowsUID( - uid=str(uuid.uuid4()), raw_uid=f"/{uuid.uuid4()}&001", serial_number=f"SN{str(uuid.uuid4()).replace('-','')}" + uid=str(uuid.uuid4()), raw_uid=f"/{uuid.uuid4()}&001", serial_number=f"SN{str(uuid.uuid4()).replace('-', '')}" ) diff --git a/tools/python_tests/mbed_tools/devices/markers.py b/tools/python_tests/mbed_tools/devices/markers.py index 94abce4029c..e48624f305d 100644 --- a/tools/python_tests/mbed_tools/devices/markers.py +++ b/tools/python_tests/mbed_tools/devices/markers.py @@ -3,6 +3,7 @@ # SPDX-License-Identifier: Apache-2.0 # """Shared pytest functionality.""" + import platform import unittest diff --git a/tools/python_tests/mbed_tools/devices/test_mbed_devices.py b/tools/python_tests/mbed_tools/devices/test_mbed_devices.py index 40dad932192..2d25fa9c351 100644 --- a/tools/python_tests/mbed_tools/devices/test_mbed_devices.py +++ b/tools/python_tests/mbed_tools/devices/test_mbed_devices.py @@ -15,11 +15,7 @@ from mbed_tools.devices.device import Device from mbed_tools.devices._internal.exceptions import NoBoardForCandidate, ResolveBoardError -from mbed_tools.devices.devices import ( - get_connected_devices, - find_connected_device, - find_all_connected_devices, -) +from mbed_tools.devices.devices import get_connected_devices, find_connected_device, find_all_connected_devices from mbed_tools.devices.exceptions import DeviceLookupFailed, NoDevicesFound diff --git a/tools/python_tests/mbed_tools/project/_internal/test_project_data.py b/tools/python_tests/mbed_tools/project/_internal/test_project_data.py index f0d8c9fd6bf..6955dbb0219 100644 --- a/tools/python_tests/mbed_tools/project/_internal/test_project_data.py +++ b/tools/python_tests/mbed_tools/project/_internal/test_project_data.py @@ -3,6 +3,7 @@ # SPDX-License-Identifier: Apache-2.0 # """Tests for project_data.py.""" + import pathlib import pytest @@ -12,9 +13,13 @@ MbedProgramFiles, MbedOS, MAIN_CPP_FILE_NAME, - APP_CONFIG_FILE_NAME_JSON + APP_CONFIG_FILE_NAME_JSON, +) +from python_tests.mbed_tools.project.factories import ( + make_mbed_lib_reference, + make_mbed_program_files, + make_mbed_os_files, ) -from python_tests.mbed_tools.project.factories import make_mbed_lib_reference, make_mbed_program_files, make_mbed_os_files class TestMbedProgramFiles: @@ -76,7 +81,6 @@ def test_from_existing_finds_existing_program_data_app_json(self, tmp_path): assert program.cmakelists_file.exists() - class TestMbedLibReference: def test_is_resolved_returns_true_if_source_code_dir_exists(self, tmp_path): root = pathlib.Path(tmp_path, "foo") diff --git a/tools/python_tests/mbed_tools/project/test_mbed_program.py b/tools/python_tests/mbed_tools/project/test_mbed_program.py index 69945d10f0c..d7d6239f253 100644 --- a/tools/python_tests/mbed_tools/project/test_mbed_program.py +++ b/tools/python_tests/mbed_tools/project/test_mbed_program.py @@ -40,7 +40,9 @@ def test_from_new_local_dir_generates_valid_program_creating_directory(self, tmp program = from_new_set_target_toolchain(program_root) - assert program.files == MbedProgramFiles.from_existing(program_root, program_root / BUILD_DIR / DEFAULT_BUILD_SUBDIR) + assert program.files == MbedProgramFiles.from_existing( + program_root, program_root / BUILD_DIR / DEFAULT_BUILD_SUBDIR + ) def test_from_new_local_dir_generates_valid_program_creating_directory_in_cwd(self, tmp_path): old_cwd = os.getcwd() @@ -52,7 +54,9 @@ def test_from_new_local_dir_generates_valid_program_creating_directory_in_cwd(se program = from_new_set_target_toolchain(program_root) - assert program.files == MbedProgramFiles.from_existing(program_root, program_root / BUILD_DIR / DEFAULT_BUILD_SUBDIR) + assert program.files == MbedProgramFiles.from_existing( + program_root, program_root / BUILD_DIR / DEFAULT_BUILD_SUBDIR + ) finally: os.chdir(old_cwd) @@ -64,7 +68,9 @@ def test_from_new_local_dir_generates_valid_program_existing_directory(self, tmp program = from_new_set_target_toolchain(program_root) - assert program.files == MbedProgramFiles.from_existing(program_root, program_root / BUILD_DIR / DEFAULT_BUILD_SUBDIR) + assert program.files == MbedProgramFiles.from_existing( + program_root, program_root / BUILD_DIR / DEFAULT_BUILD_SUBDIR + ) def test_from_existing_raises_if_path_is_not_a_program(self, tmp_path): fs_root = pathlib.Path(tmp_path, "foo") diff --git a/tools/python_tests/mbed_tools/regression/test_configure.py b/tools/python_tests/mbed_tools/regression/test_configure.py index eabb93111f4..de7592ad130 100644 --- a/tools/python_tests/mbed_tools/regression/test_configure.py +++ b/tools/python_tests/mbed_tools/regression/test_configure.py @@ -56,6 +56,8 @@ def test_generate_config_called_with_correct_arguments(self): pathlib.Path(tmpDirPath / "cmake-build-debug").mkdir() result = CliRunner().invoke( - configure, ["-m", "Target", "-t", "gcc_arm", "-p", tmpDir, "-o", str(tmpDirPath / "cmake-build-debug")], catch_exceptions=False + configure, + ["-m", "Target", "-t", "gcc_arm", "-p", tmpDir, "-o", str(tmpDirPath / "cmake-build-debug")], + catch_exceptions=False, ) self.assertIn("mbed_config.cmake has been generated and written to", result.output) diff --git a/tools/python_tests/mbed_tools/targets/_internal/targets_json_parsers/test_accumulating_attribute_parser.py b/tools/python_tests/mbed_tools/targets/_internal/targets_json_parsers/test_accumulating_attribute_parser.py index 37f96f8ce78..21aaceb4cd3 100644 --- a/tools/python_tests/mbed_tools/targets/_internal/targets_json_parsers/test_accumulating_attribute_parser.py +++ b/tools/python_tests/mbed_tools/targets/_internal/targets_json_parsers/test_accumulating_attribute_parser.py @@ -3,6 +3,7 @@ # SPDX-License-Identifier: Apache-2.0 # """Tests for parsing the attributes for targets in targets.json that accumulate.""" + from unittest import TestCase, mock import copy @@ -40,8 +41,7 @@ def test_expected_list(self): class TestGetAccumulatingAttributes(TestCase): @mock.patch( - "mbed_tools.targets._internal.targets_json_parsers." - "accumulating_attribute_parser._targets_accumulate_hierarchy" + "mbed_tools.targets._internal.targets_json_parsers.accumulating_attribute_parser._targets_accumulate_hierarchy" ) @mock.patch( "mbed_tools.targets._internal.targets_json_parsers." @@ -121,10 +121,7 @@ def test_combination_multiple_attributes(self): {ALL_ACCUMULATING_ATTRIBUTES[0]: ["1"]}, {ALL_ACCUMULATING_ATTRIBUTES[1]: ["A", "B", "C"]}, ] - expected_attributes = { - ALL_ACCUMULATING_ATTRIBUTES[0]: ["1", "2", "3"], - ALL_ACCUMULATING_ATTRIBUTES[1]: ["A"], - } + expected_attributes = {ALL_ACCUMULATING_ATTRIBUTES[0]: ["1", "2", "3"], ALL_ACCUMULATING_ATTRIBUTES[1]: ["A"]} result = _determine_accumulated_attributes(accumulation_order) self.assertEqual(result, expected_attributes) @@ -136,10 +133,7 @@ def test_combination_later_check_no_unwanted_overrides(self): {ALL_ACCUMULATING_ATTRIBUTES[1]: ["A", "B", "C"]}, {ALL_ACCUMULATING_ATTRIBUTES[1]: []}, ] - expected_attributes = { - ALL_ACCUMULATING_ATTRIBUTES[0]: ["1", "2", "3"], - ALL_ACCUMULATING_ATTRIBUTES[1]: ["A"], - } + expected_attributes = {ALL_ACCUMULATING_ATTRIBUTES[0]: ["1", "2", "3"], ALL_ACCUMULATING_ATTRIBUTES[1]: ["A"]} result = _determine_accumulated_attributes(accumulation_order) self.assertEqual(result, expected_attributes) @@ -159,6 +153,7 @@ def test_determine_accumulated_attributes_basic_add(self): self.assertEqual(orig_accumulation_order, accumulation_order) + class TestElementMatches(TestCase): def test_element_matches_exactly(self): element_to_remove = "SOMETHING" diff --git a/tools/python_tests/mbed_tools/targets/_internal/targets_json_parsers/test_overriding_attribute_parser.py b/tools/python_tests/mbed_tools/targets/_internal/targets_json_parsers/test_overriding_attribute_parser.py index 8fc09ab15b8..6d00cde485b 100644 --- a/tools/python_tests/mbed_tools/targets/_internal/targets_json_parsers/test_overriding_attribute_parser.py +++ b/tools/python_tests/mbed_tools/targets/_internal/targets_json_parsers/test_overriding_attribute_parser.py @@ -3,6 +3,7 @@ # SPDX-License-Identifier: Apache-2.0 # """Tests for parsing the attributes for targets in targets.json that override.""" + from unittest import TestCase, mock from mbed_tools.targets._internal.targets_json_parsers.overriding_attribute_parser import ( @@ -18,11 +19,10 @@ class TestGetOverridingAttributes(TestCase): @mock.patch( - "mbed_tools.targets._internal.targets_json_parsers." "overriding_attribute_parser._targets_override_hierarchy" + "mbed_tools.targets._internal.targets_json_parsers.overriding_attribute_parser._targets_override_hierarchy" ) @mock.patch( - "mbed_tools.targets._internal.targets_json_parsers." - "overriding_attribute_parser._determine_overridden_attributes" + "mbed_tools.targets._internal.targets_json_parsers.overriding_attribute_parser._determine_overridden_attributes" ) def test_correctly_calls(self, _determine_overridden_attributes, _targets_override_hierarchy): target_name = "Target_Name" @@ -133,9 +133,7 @@ def test_multiple_inheritance(self): self.assertSetEqual(result, expected_result) def test_no_inheritance(self): - all_targets_data = { - "A": {"attribute_3": ["some things"]}, - } + all_targets_data = {"A": {"attribute_3": ["some things"]}} target_name = "A" expected_result = {"A"} result = get_labels_for_target(all_targets_data, target_name) diff --git a/tools/python_tests/mbed_tools/targets/_internal/test_board_database.py b/tools/python_tests/mbed_tools/targets/_internal/test_board_database.py index 7f04151bc95..c602241ac7f 100644 --- a/tools/python_tests/mbed_tools/targets/_internal/test_board_database.py +++ b/tools/python_tests/mbed_tools/targets/_internal/test_board_database.py @@ -22,12 +22,12 @@ def test_401(self, caplog, requests_mock): requests_mock.get(board_database._BOARD_API, status_code=401, text="Who are you?") with pytest.raises(board_database.BoardAPIError): board_database.get_online_board_data() - assert any( - x for x in caplog.records if x.levelno == logging.WARNING and "MBED_API_AUTH_TOKEN" in x.msg - ), "Auth token should be mentioned" - assert any( - x for x in caplog.records if x.levelno == logging.DEBUG and "Who are you?" in x.msg - ), "Message content should be in the debug message" + assert any(x for x in caplog.records if x.levelno == logging.WARNING and "MBED_API_AUTH_TOKEN" in x.msg), ( + "Auth token should be mentioned" + ) + assert any(x for x in caplog.records if x.levelno == logging.DEBUG and "Who are you?" in x.msg), ( + "Message content should be in the debug message" + ) def test_404(self, caplog, requests_mock): """Given a 404 error code, TargetAPIError is raised.""" @@ -35,12 +35,12 @@ def test_404(self, caplog, requests_mock): requests_mock.get(board_database._BOARD_API, status_code=404, text="Not Found") with pytest.raises(board_database.BoardAPIError): board_database.get_online_board_data() - assert any( - x for x in caplog.records if x.levelno == logging.WARNING and "404" in x.msg - ), "HTTP status code should be mentioned" - assert any( - x for x in caplog.records if x.levelno == logging.DEBUG and "Not Found" in x.msg - ), "Message content should be in the debug message" + assert any(x for x in caplog.records if x.levelno == logging.WARNING and "404" in x.msg), ( + "HTTP status code should be mentioned" + ) + assert any(x for x in caplog.records if x.levelno == logging.DEBUG and "Not Found" in x.msg), ( + "Message content should be in the debug message" + ) def test_200_invalid_json(self, caplog, requests_mock): """Given a valid response but invalid json, JSONDecodeError is raised.""" @@ -48,12 +48,12 @@ def test_200_invalid_json(self, caplog, requests_mock): requests_mock.get(board_database._BOARD_API, text="some text") with pytest.raises(board_database.ResponseJSONError): board_database.get_online_board_data() - assert any( - x for x in caplog.records if x.levelno == logging.WARNING and "Invalid JSON" in x.msg - ), "Invalid JSON should be mentioned" - assert any( - x for x in caplog.records if x.levelno == logging.DEBUG and "some text" in x.msg - ), "Message content should be in the debug message" + assert any(x for x in caplog.records if x.levelno == logging.WARNING and "Invalid JSON" in x.msg), ( + "Invalid JSON should be mentioned" + ) + assert any(x for x in caplog.records if x.levelno == logging.DEBUG and "some text" in x.msg), ( + "Message content should be in the debug message" + ) def test_200_no_data_field(self, caplog, requests_mock): """Given a valid response but no data field, ResponseJSONError is raised.""" @@ -61,12 +61,12 @@ def test_200_no_data_field(self, caplog, requests_mock): requests_mock.get(board_database._BOARD_API, json={"notdata": [], "stillnotdata": {}}) with pytest.raises(board_database.ResponseJSONError): board_database.get_online_board_data() - assert any( - x for x in caplog.records if x.levelno == logging.WARNING and "missing the 'data' field" in x.msg - ), "Data field should be mentioned" - assert any( - x for x in caplog.records if x.levelno == logging.DEBUG and "notdata, stillnotdata" in x.msg - ), "JSON keys from message should be in the debug message" + assert any(x for x in caplog.records if x.levelno == logging.WARNING and "missing the 'data' field" in x.msg), ( + "Data field should be mentioned" + ) + assert any(x for x in caplog.records if x.levelno == logging.DEBUG and "notdata, stillnotdata" in x.msg), ( + "JSON keys from message should be in the debug message" + ) def test_200_value_data(self, requests_mock): """Given a valid response, target data is set from the returned json.""" diff --git a/tools/python_tests/mbed_tools/targets/_internal/test_target_attributes.py b/tools/python_tests/mbed_tools/targets/_internal/test_target_attributes.py index 2ca79eedcf6..865e9b63387 100644 --- a/tools/python_tests/mbed_tools/targets/_internal/test_target_attributes.py +++ b/tools/python_tests/mbed_tools/targets/_internal/test_target_attributes.py @@ -3,6 +3,7 @@ # SPDX-License-Identifier: Apache-2.0 # """Tests for `mbed_tools.targets.target_attributes`.""" + from unittest import TestCase, mock from mbed_tools.targets._internal.target_attributes import ( @@ -16,28 +17,19 @@ class TestExtractTargetAttributes(TestCase): def test_no_target_found(self): - all_targets_data = { - "Target_1": "some attributes", - "Target_2": "some more attributes", - } + all_targets_data = {"Target_1": "some attributes", "Target_2": "some more attributes"} with self.assertRaises(TargetNotFoundError): _extract_target_attributes(all_targets_data, "Unlisted_Target", False) def test_target_found(self): target_attributes = {"attribute1": "something"} - all_targets_data = { - "Target_1": target_attributes, - "Target_2": "some more attributes", - } + all_targets_data = {"Target_1": target_attributes, "Target_2": "some more attributes"} # When not explicitly included public is assumed to be True self.assertEqual(_extract_target_attributes(all_targets_data, "Target_1", False), target_attributes) def test_target_public(self): - all_targets_data = { - "Target_1": {"attribute1": "something", "public": True}, - "Target_2": "some more attributes", - } + all_targets_data = {"Target_1": {"attribute1": "something", "public": True}, "Target_2": "some more attributes"} # The public attribute affects visibility but is removed from result self.assertEqual(_extract_target_attributes(all_targets_data, "Target_1", False), {"attribute1": "something"}) diff --git a/tools/python_tests/mbed_tools/targets/test_get_board.py b/tools/python_tests/mbed_tools/targets/test_get_board.py index cc7d9aca17e..93351651d9a 100644 --- a/tools/python_tests/mbed_tools/targets/test_get_board.py +++ b/tools/python_tests/mbed_tools/targets/test_get_board.py @@ -3,6 +3,7 @@ # SPDX-License-Identifier: Apache-2.0 # """Tests for `mbed_tools.targets.get_board`.""" + import pytest from unittest import mock @@ -11,11 +12,7 @@ # Import from top level as this is the expected interface for users from mbed_tools.targets import get_board_by_online_id, get_board_by_product_code, get_board_by_jlink_slug -from mbed_tools.targets.get_board import ( - _DatabaseMode, - _get_database_mode, - get_board, -) +from mbed_tools.targets.get_board import _DatabaseMode, _get_database_mode, get_board from mbed_tools.targets.env import env from mbed_tools.targets.exceptions import UnknownBoard, UnsupportedMode from python_tests.mbed_tools.targets.factories import make_board diff --git a/tools/python_tests/memap/__init__.py b/tools/python_tests/memap/__init__.py index 848835c6c77..d211719ea09 100644 --- a/tools/python_tests/memap/__init__.py +++ b/tools/python_tests/memap/__init__.py @@ -14,4 +14,4 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. -""" \ No newline at end of file +""" diff --git a/tools/python_tests/memap/memap_test.py b/tools/python_tests/memap/memap_test.py index 37af53d2122..b6111404139 100644 --- a/tools/python_tests/memap/memap_test.py +++ b/tools/python_tests/memap/memap_test.py @@ -15,6 +15,7 @@ See the License for the specific language governing permissions and limitations under the License. """ + import sys from os.path import isfile, join import json @@ -29,6 +30,7 @@ Tests for test_api.py """ + @pytest.fixture def memap_parser(): """ @@ -45,23 +47,23 @@ def memap_parser(): ".bss": 3, ".heap": 0, ".stack": 0, - ".interrupts_ram":0, - ".init":0, - ".ARM.extab":0, - ".ARM.exidx":0, - ".ARM.attributes":0, - ".eh_frame":0, - ".init_array":0, - ".fini_array":0, - ".jcr":0, - ".stab":0, - ".stabstr":0, - ".ARM.exidx":0, - ".ARM":0, - ".interrupts":0, - ".flash_config":0, - "unknown":0, - "OUTPUT":0, + ".interrupts_ram": 0, + ".init": 0, + ".ARM.extab": 0, + ".ARM.exidx": 0, + ".ARM.attributes": 0, + ".eh_frame": 0, + ".init_array": 0, + ".fini_array": 0, + ".jcr": 0, + ".stab": 0, + ".stabstr": 0, + ".ARM.exidx": 0, + ".ARM": 0, + ".interrupts": 0, + ".flash_config": 0, + "unknown": 0, + "OUTPUT": 0, }, "[lib]/libc.a/lib_a-printf.o": { ".text": 4, @@ -69,23 +71,23 @@ def memap_parser(): ".bss": 6, ".heap": 0, ".stack": 0, - ".interrupts_ram":0, - ".init":0, - ".ARM.extab":0, - ".ARM.exidx":0, - ".ARM.attributes":0, - ".eh_frame":0, - ".init_array":0, - ".fini_array":0, - ".jcr":0, - ".stab":0, - ".stabstr":0, - ".ARM.exidx":0, - ".ARM":0, - ".interrupts":0, - ".flash_config":0, - "unknown":0, - "OUTPUT":0, + ".interrupts_ram": 0, + ".init": 0, + ".ARM.extab": 0, + ".ARM.exidx": 0, + ".ARM.attributes": 0, + ".eh_frame": 0, + ".init_array": 0, + ".fini_array": 0, + ".jcr": 0, + ".stab": 0, + ".stabstr": 0, + ".ARM.exidx": 0, + ".ARM": 0, + ".interrupts": 0, + ".flash_config": 0, + "unknown": 0, + "OUTPUT": 0, }, "main.o": { ".text": 7, @@ -93,23 +95,23 @@ def memap_parser(): ".bss": 0, ".heap": 0, ".stack": 0, - ".interrupts_ram":0, - ".init":0, - ".ARM.extab":0, - ".ARM.exidx":0, - ".ARM.attributes":0, - ".eh_frame":0, - ".init_array":0, - ".fini_array":0, - ".jcr":0, - ".stab":0, - ".stabstr":0, - ".ARM.exidx":0, - ".ARM":0, - ".interrupts":0, - ".flash_config":0, - "unknown":0, - "OUTPUT":0, + ".interrupts_ram": 0, + ".init": 0, + ".ARM.extab": 0, + ".ARM.exidx": 0, + ".ARM.attributes": 0, + ".eh_frame": 0, + ".init_array": 0, + ".fini_array": 0, + ".jcr": 0, + ".stab": 0, + ".stabstr": 0, + ".ARM.exidx": 0, + ".ARM": 0, + ".interrupts": 0, + ".flash_config": 0, + "unknown": 0, + "OUTPUT": 0, }, "test.o": { ".text": 0, @@ -117,41 +119,33 @@ def memap_parser(): ".bss": 0, ".heap": 0, ".stack": 0, - ".interrupts_ram":0, - ".init":0, - ".ARM.extab":0, - ".ARM.exidx":0, - ".ARM.attributes":0, - ".eh_frame":0, - ".init_array":0, - ".fini_array":0, - ".jcr":0, - ".stab":0, - ".stabstr":0, - ".ARM.exidx":0, - ".ARM":0, - ".interrupts":0, - ".flash_config":0, - "unknown":0, - "OUTPUT":0, + ".interrupts_ram": 0, + ".init": 0, + ".ARM.extab": 0, + ".ARM.exidx": 0, + ".ARM.attributes": 0, + ".eh_frame": 0, + ".init_array": 0, + ".fini_array": 0, + ".jcr": 0, + ".stab": 0, + ".stabstr": 0, + ".ARM.exidx": 0, + ".ARM": 0, + ".interrupts": 0, + ".flash_config": 0, + "unknown": 0, + "OUTPUT": 0, }, } memap_parser.memory_banks = { - "RAM": [ - MemoryBankInfo(name="IRAM1", start_addr=0x20000000, total_size=32768, used_size=2000) - ], - "ROM": [ - MemoryBankInfo(name="IROM1", start_addr=0x20000000, total_size=65536, used_size=10000) - ] + "RAM": [MemoryBankInfo(name="IRAM1", start_addr=0x20000000, total_size=32768, used_size=2000)], + "ROM": [MemoryBankInfo(name="IROM1", start_addr=0x20000000, total_size=65536, used_size=10000)], } memap_parser.old_memory_banks = { - "RAM": [ - MemoryBankInfo(name="IRAM1", start_addr=0x20000000, total_size=32768, used_size=2014) - ], - "ROM": [ - MemoryBankInfo(name="IROM1", start_addr=0x20000000, total_size=65536, used_size=9000) - ] + "RAM": [MemoryBankInfo(name="IRAM1", start_addr=0x20000000, total_size=32768, used_size=2014)], + "ROM": [MemoryBankInfo(name="IROM1", start_addr=0x20000000, total_size=65536, used_size=9000)], } return memap_parser @@ -172,11 +166,10 @@ def generate_test_helper(memap_parser, format, depth, sep, file_output=None): memap.sep = sep memap_parser.generate_output(format, depth, file_output=file_output) - assert memap_parser.modules == old_modules,\ - "generate_output modified the 'modules' property" + assert memap_parser.modules == old_modules, "generate_output modified the 'modules' property" for file_name in memap_parser.short_modules: - assert(len(file_name.split(memap.sep)) <= depth) + assert len(file_name.split(memap.sep)) <= depth @pytest.mark.parametrize("depth", [1, 2, 20]) @@ -189,7 +182,7 @@ def test_report_computed(memap_parser, depth, sep): :param depth: the detail of the output """ - memap_parser.generate_output('table', depth, sep) + memap_parser.generate_output("table", depth, sep) # Report is created after generating output assert memap_parser.mem_summary @@ -204,7 +197,7 @@ def test_generate_output_table(memap_parser, depth, sep): :param memap_parser: Mocked parser :param depth: the detail of the output """ - generate_test_helper(memap_parser, 'table', depth, sep) + generate_test_helper(memap_parser, "table", depth, sep) @pytest.mark.parametrize("depth", [1, 2, 20]) @@ -216,8 +209,8 @@ def test_generate_output_json(memap_parser, tmpdir, depth, sep): :param tmpdir: a unique location to place an output file :param depth: the detail of the output """ - file_name = str(tmpdir.join('output.json').realpath()) - generate_test_helper(memap_parser, 'json', depth, sep, file_name) + file_name = str(tmpdir.join("output.json").realpath()) + generate_test_helper(memap_parser, "json", depth, sep, file_name) assert isfile(file_name), "Failed to create json file" json.load(open(file_name)) @@ -232,8 +225,8 @@ def test_generate_output_csv_ci(memap_parser, tmpdir, depth, sep): :param tmpdir: a unique location to place an output file :param depth: the detail of the output """ - file_name = str(tmpdir.join('output.csv').realpath()) - generate_test_helper(memap_parser, 'csv-ci', depth, sep, file_name) + file_name = str(tmpdir.join("output.csv").realpath()) + generate_test_helper(memap_parser, "csv-ci", depth, sep, file_name) assert isfile(file_name), "Failed to create csv-ci file" @@ -242,7 +235,7 @@ def test_memory_bank_summary(memap_parser: MemapParser): Test that the memory bank summary has the expected information in it. """ - memap_parser.generate_output('table', 1, "/") + memap_parser.generate_output("table", 1, "/") assert memap_parser.memory_bank_summary["RAM"].keys() == {"IRAM1"} assert memap_parser.memory_bank_summary["ROM"].keys() == {"IROM1"} @@ -252,4 +245,4 @@ def test_memory_bank_summary(memap_parser: MemapParser): assert memap_parser.memory_bank_summary["ROM"]["IROM1"]["total_size"] == 65536 assert memap_parser.memory_bank_summary["ROM"]["IROM1"]["delta_bytes_used"] == 1000 assert memap_parser.memory_bank_summary["ROM"]["IROM1"]["percent_used"] == pytest.approx(15.3, abs=0.1) - assert memap_parser.memory_bank_summary["ROM"]["IROM1"]["delta_percent_used"] == pytest.approx(1.5, abs=0.1) \ No newline at end of file + assert memap_parser.memory_bank_summary["ROM"]["IROM1"]["delta_percent_used"] == pytest.approx(1.5, abs=0.1) diff --git a/tools/python_tests/memap/parse_test.py b/tools/python_tests/memap/parse_test.py index 71b6debccf1..5c27851497d 100644 --- a/tools/python_tests/memap/parse_test.py +++ b/tools/python_tests/memap/parse_test.py @@ -13,7 +13,7 @@ distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and -limitations +limitations """ import sys @@ -29,14 +29,15 @@ PARSED_GCC_DATA = { - "startup/startup.o": defaultdict(int, {".text": 0xc0}), + "startup/startup.o": defaultdict(int, {".text": 0xC0}), "[lib]/d16M_tlf.a/__main.o": defaultdict(int, {".text": 8}), "[lib]/misc/foo.o": defaultdict(int, {".text": 8}), "irqs/irqs.o": defaultdict(int, {".text": 0x98}), - "data/data.o":defaultdict(int, {".data": 0x18, ".bss": 0x198}), + "data/data.o": defaultdict(int, {".data": 0x18, ".bss": 0x198}), "main.o": defaultdict(int, {".text": 0x36}), } + def test_parse_gcc(): memap = MemapParser() @@ -45,7 +46,7 @@ def test_parse_gcc(): parsed_data_os_agnostic = dict() for k in PARSED_GCC_DATA: - parsed_data_os_agnostic[k.replace('/', sep)] = PARSED_GCC_DATA[k] + parsed_data_os_agnostic[k.replace("/", sep)] = PARSED_GCC_DATA[k] # Sum of everything in .text and .data assert memap.memory_banks["ROM"][0].used_size == 0x1B6 @@ -60,16 +61,16 @@ def test_add_symbol_missing_info(): memap = _GccParser() old_symbols = deepcopy(memap.modules) memap.add_symbol(".data.some_func", "", 8, 10, ".data", 1000) - assert(old_symbols == memap.modules) + assert old_symbols == memap.modules memap.add_symbol(".data.some_func", "foo.o", 8, 0, ".data", 1000) - assert(old_symbols == memap.modules) + assert old_symbols == memap.modules def test_add_full_module(): memap = _GccParser() old_modules = deepcopy(memap.modules) memap.add_symbol(".data.foo", "main.o", 5, 8, ".data", 1000) - assert(old_modules != memap.modules) - assert("main.o" in memap.modules) - assert(".data" in memap.modules["main.o"]) - assert(memap.modules["main.o"][".data"] == 8) + assert old_modules != memap.modules + assert "main.o" in memap.modules + assert ".data" in memap.modules["main.o"] + assert memap.modules["main.o"][".data"] == 8 diff --git a/tools/python_tests/scancode_evaluate/__init__.py b/tools/python_tests/scancode_evaluate/__init__.py index 2bae17afc88..04d33f049d1 100644 --- a/tools/python_tests/scancode_evaluate/__init__.py +++ b/tools/python_tests/scancode_evaluate/__init__.py @@ -1,4 +1,4 @@ # # Copyright (c) 2020-2023 Arm Limited and Contributors. All rights reserved. # SPDX-License-Identifier: Apache-2.0 -# \ No newline at end of file +# diff --git a/tools/python_tests/scancode_evaluate/scancode_evaluate_test.py b/tools/python_tests/scancode_evaluate/scancode_evaluate_test.py index 8448cd16d43..ea202ef9c4a 100644 --- a/tools/python_tests/scancode_evaluate/scancode_evaluate_test.py +++ b/tools/python_tests/scancode_evaluate/scancode_evaluate_test.py @@ -7,41 +7,40 @@ import pytest from scancode_evaluate.scancode_evaluate import license_check -STUBS_PATH = os.path.join( - os.path.abspath(os.path.join(os.path.dirname(__file__))), "scancode_test" -) +STUBS_PATH = os.path.join(os.path.abspath(os.path.join(os.path.dirname(__file__))), "scancode_test") HEADER_WITHOUT_SPDX = "/* Copyright (C) Arm Limited, Inc - All Rights Reserved\ * Unauthorized copying of this. file, via any medium is strictly prohibited\ * Proprietary and confidential\ */" -HEADER_WITH_SPDX = "/* mbed Microcontroller Library\ +HEADER_WITH_SPDX = '/* mbed Microcontroller Library\ * Copyright (c) 2006-2013 ARM Limited\ *\ * SPDX-License-Identifier: Apache-2.0\ - * Licensed under the Apache License, Version 2.0 (the \"License\");\ + * Licensed under the Apache License, Version 2.0 (the "License");\ * you may not use this file except in compliance with the License.\ * You may obtain a copy of the License at\ *\ * http://www.apache.org/licenses/LICENSE-2.0\ *\ * Unless required by applicable law or agreed to in writing, software\ - * distributed under the License is distributed on an \"AS IS\" BASIS,\ + * distributed under the License is distributed on an "AS IS" BASIS,\ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\ * See the License for the specific language governing permissions and\ * limitations under the License.\ - */" + */' -HEADER_WITH_BINARY_LICENSE = "/*\ +HEADER_WITH_BINARY_LICENSE = '/*\ * Copyright (c) 2019, Arm Limited, All Rights Reserved\ * SPDX-License-Identifier: LicenseRef-PBL\ *\ * This file and the related binary are licensed under the\ - * Permissive Binary License, Version 1.0 (the \"License\");\ + * Permissive Binary License, Version 1.0 (the "License");\ * you may not use these files except in compliance with the License.\ *\ - */" + */' + @pytest.fixture() def create_scanned_files(): @@ -55,7 +54,7 @@ def create_scanned_files(): os.path.join(STUBS_PATH, "test3.h"), os.path.join(STUBS_PATH, "test4.h"), os.path.join(STUBS_PATH, "test5.h"), - os.path.join(STUBS_PATH, "test6.h") + os.path.join(STUBS_PATH, "test6.h"), ] for file_path in file_paths: with open(file_path, "w") as new_file: @@ -71,39 +70,38 @@ def create_scanned_files(): class TestScancodeEvaluate: - def test_missing_files_attribute(self): - """ Missing `files` attribute in JSON. - @inputs scancode_test/scancode_test_1.json - @outputs -1 + """Missing `files` attribute in JSON. + @inputs scancode_test/scancode_test_1.json + @outputs -1 """ assert license_check(os.path.join(STUBS_PATH, "scancode_test_1.json")) == -1 def test_various_combinations_permissive_license_with_spdx(self): - """ Various combinations where at least one license in - a file is permissive and has spdx in the match.identifier - attribute. - @inputs scancode_test/scancode_test_2.json - @outputs 0 + """Various combinations where at least one license in + a file is permissive and has spdx in the match.identifier + attribute. + @inputs scancode_test/scancode_test_2.json + @outputs 0 """ assert license_check(os.path.join(STUBS_PATH, "scancode_test_2.json")) == 0 def test_missing_license_permissive_license_and_spdx(self, create_scanned_files): - """ Test four files scanned with various issues. - test.h: Missing license text (error count += 1) - test3.h: Missing `Permissive` license text and `spdx` in match.identifier and not in file tested by ScanCode (error count += 1) - test4.h: Missing `Permissive` license text and `spdx` in match.identifier but found in file tested by ScanCode (error count += 1) - test5.h: Missing `spdx` in match.identifier but found in file tested by ScanCode. (error count += 0) - test6.h: Matching `spdx` in match.identifier but Permissive Binary License header (error count += 0) - @inputs scancode_test/scancode_test_2.json - @output 3 + """Test four files scanned with various issues. + test.h: Missing license text (error count += 1) + test3.h: Missing `Permissive` license text and `spdx` in match.identifier and not in file tested by ScanCode (error count += 1) + test4.h: Missing `Permissive` license text and `spdx` in match.identifier but found in file tested by ScanCode (error count += 1) + test5.h: Missing `spdx` in match.identifier but found in file tested by ScanCode. (error count += 0) + test6.h: Matching `spdx` in match.identifier but Permissive Binary License header (error count += 0) + @inputs scancode_test/scancode_test_2.json + @output 3 """ assert license_check(os.path.join(STUBS_PATH, "scancode_test_3.json")) == 3 def test_permissive_license_no_spdx(self, create_scanned_files): - """ Multiple `Permissive` licenses in one file but none with `spdx` in - match.identifier and not in file tested by ScanCode (error count += 0) - @inputs scancode_test/scancode_test_2.json - @outputs 0 + """Multiple `Permissive` licenses in one file but none with `spdx` in + match.identifier and not in file tested by ScanCode (error count += 0) + @inputs scancode_test/scancode_test_2.json + @outputs 0 """ - assert license_check(os.path.join(STUBS_PATH, "scancode_test_4.json")) == 0 \ No newline at end of file + assert license_check(os.path.join(STUBS_PATH, "scancode_test_4.json")) == 0 diff --git a/tools/run_python_linters.sh b/tools/run_python_linters.sh new file mode 100644 index 00000000000..aa0cecfba79 --- /dev/null +++ b/tools/run_python_linters.sh @@ -0,0 +1,31 @@ +#!/bin/bash -e + +# +# Copyright (c) 2025 Jamie Smith. +# SPDX-License-Identifier: Apache-2.0 +# + +# Script which runs Python formatters and linters +# This is executed by the GitHub Actions CI build but also can be run locally. + +# Go to script directory +cd "$(dirname $0)" + +# Activate Mbed OS virtualenv +if command -v mbedhtrun >/dev/null 2>&1; then + echo "Mbed OS python environment appears to already be activated." +elif [ -e "../venv/Scripts/activate" ]; then + source "../venv/Scripts/activate" +elif [ -e "../venv/bin/activate" ]; then + source "../venv/bin/activate" +else + echo "Failed to find Mbed OS virtualenv in ../venv and Python packages not installed to global environment." + exit 1 +fi + +if ! command -v ruff >/dev/null 2>&1; then + echo "Linters optional dependency of Mbed not installed. Please run 'mbed-os/venv/bin/pip install mbed-os/tools[linters]'." +fi + +echo ">> Formatting with Ruff..." +ruff format \ No newline at end of file diff --git a/tools/test/__init__.py b/tools/test/__init__.py index 0119582f3fc..d99c41d2b48 100644 --- a/tools/test/__init__.py +++ b/tools/test/__init__.py @@ -13,5 +13,5 @@ distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and -limitations +limitations """ diff --git a/tools/test/examples/elf_float_checker.py b/tools/test/examples/elf_float_checker.py index bc60e218069..200b3d0facc 100644 --- a/tools/test/examples/elf_float_checker.py +++ b/tools/test/examples/elf_float_checker.py @@ -12,6 +12,7 @@ import sys from enum import Enum + class ReturnCode(Enum): """Return codes.""" @@ -26,6 +27,7 @@ class ReturnCode(Enum): FLOATING_POINT_SYMBOL_REGEX = r"__aeabi_(cd.+|cf.+|h2f.+|d.+|f.+|.+2d|.+2f)" OBJECT_FILE_ANALYSIS_CMD = ["objdump", "-t"] + class SymbolParser: """Parse the given ELF format file.""" @@ -46,9 +48,7 @@ def get_symbols_from_table(self, symbol_table, symbol_regex): def get_symbol_table(self, elf_file): """Get the symbol table from an ELF format file.""" - log.debug( - "Get the symbol table for ELF format file '{}'".format(elf_file) - ) + log.debug("Get the symbol table for ELF format file '{}'".format(elf_file)) cmd = [OBJECT_FILE_ANALYSIS_CMD[0], OBJECT_FILE_ANALYSIS_CMD[1], elf_file] log.debug("command: '{}'".format(cmd)) @@ -56,16 +56,14 @@ def get_symbol_table(self, elf_file): process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) except OSError as error: raise SymbolTableError( - "Getting symbol table for ELF format file '{}' failed," - " error: {}".format(elf_file, error) + "Getting symbol table for ELF format file '{}' failed, error: {}".format(elf_file, error) ) stdout, _ = process.communicate() if process.returncode: raise SymbolTableError( - "Getting symbol table for ELF format file '{}' failed," - " error: {}".format(elf_file, stdout.decode()) + "Getting symbol table for ELF format file '{}' failed, error: {}".format(elf_file, stdout.decode()) ) symbol_table = stdout.decode() @@ -82,6 +80,7 @@ class SymbolTableError(Exception): class FloatSymbolsFound(Exception): """An exception generated when floating point symbols are found.""" + class ArgumentParserWithDefaultHelp(argparse.ArgumentParser): """Subclass that always shows the help message on invalid arguments.""" @@ -109,9 +108,7 @@ def check_float_symbols(elf_file): parser = SymbolParser() symbol_table = parser.get_symbol_table(elf_file) - float_symbols = parser.get_symbols_from_table( - symbol_table, FLOATING_POINT_SYMBOL_REGEX - ) + float_symbols = parser.get_symbols_from_table(symbol_table, FLOATING_POINT_SYMBOL_REGEX) return float_symbols @@ -131,25 +128,16 @@ def check_action(args): def parse_args(): """Parse the command line args.""" parser = ArgumentParserWithDefaultHelp( - description="ELF floats checker", - formatter_class=argparse.ArgumentDefaultsHelpFormatter, + description="ELF floats checker", formatter_class=argparse.ArgumentDefaultsHelpFormatter ) parser.add_argument( "elf_file", type=str, - help=( - "the Executable and Linkable Format (ELF) file to check" - " for floating point instruction inclusion." - ), + help=("the Executable and Linkable Format (ELF) file to check for floating point instruction inclusion."), ) - parser.add_argument( - "-v", - "--verbose", - action="store_true", - help="increase verbosity of status information.", - ) + parser.add_argument("-v", "--verbose", action="store_true", help="increase verbosity of status information.") parser.set_defaults(func=check_action) @@ -186,5 +174,6 @@ def _main(): else: return ReturnCode.SUCCESS.value + if __name__ == "__main__": sys.exit(_main()) diff --git a/tools/test/examples/examples.py b/tools/test/examples/examples.py index 8a87ff43893..45a74ccc423 100644 --- a/tools/test/examples/examples.py +++ b/tools/test/examples/examples.py @@ -15,7 +15,7 @@ distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and -limitations +limitations """ from argparse import ArgumentParser @@ -38,95 +38,96 @@ import examples_lib as lib from examples_lib import SUPPORTED_TOOLCHAINS, SUPPORTED_IDES -def parse_args(): + +def parse_args(): """Parse the arguments passed to the script.""" official_targets = get_mbed_official_release("5") official_target_names = [x[0] for x in official_targets] - parser = ArgumentParser() parser.add_argument("-c", dest="config", default="examples.json") - parser.add_argument("-e", "--example", - help=("filter the examples used in the script"), - type=argparse_many(lambda x: x), - default=[]) + parser.add_argument( + "-e", "--example", help=("filter the examples used in the script"), type=argparse_many(lambda x: x), default=[] + ) subparsers = parser.add_subparsers() - import_cmd = subparsers.add_parser("import", help="import of examples in config file" ) + import_cmd = subparsers.add_parser("import", help="import of examples in config file") import_cmd.set_defaults(fn=do_import) - clone_cmd = subparsers.add_parser("clone", help="clone examples in config file" ) + clone_cmd = subparsers.add_parser("clone", help="clone examples in config file") clone_cmd.set_defaults(fn=do_clone) - list_cmd = subparsers.add_parser("list", help="list examples in config file in a table") + list_cmd = subparsers.add_parser("list", help="list examples in config file in a table") list_cmd.set_defaults(fn=do_list) symlink_cmd = subparsers.add_parser("symlink", help="create symbolic link to given mbed-os PATH") symlink_cmd.add_argument("PATH", help=" path of mbed-os to be symlinked") symlink_cmd.set_defaults(fn=do_symlink) - deploy_cmd = subparsers.add_parser("deploy", help="mbed deploy for examples in config file" ) + deploy_cmd = subparsers.add_parser("deploy", help="mbed deploy for examples in config file") deploy_cmd.set_defaults(fn=do_deploy) version_cmd = subparsers.add_parser("update", help="update mbed-os to sepcific tags") version_cmd.add_argument("TAG", help=" tag of mbed-os") version_cmd.set_defaults(fn=do_update) - compile_cmd = subparsers.add_parser("compile", help="compile of examples" ) - compile_cmd.set_defaults(fn=do_compile), + compile_cmd = subparsers.add_parser("compile", help="compile of examples") + (compile_cmd.set_defaults(fn=do_compile),) + ( + compile_cmd.add_argument( + "toolchains", + nargs="*", + default=SUPPORTED_TOOLCHAINS, + type=argparse_force_uppercase_type(SUPPORTED_TOOLCHAINS, "toolchain"), + ), + ) + compile_cmd.add_argument( + "-m", + "--mcu", + help=("build for the given MCU (%s)" % ", ".join(official_target_names)), + metavar="MCU", + type=argparse_many(argparse_force_uppercase_type(official_target_names, "MCU")), + default=official_target_names, + ) + + compile_cmd.add_argument("--profiles", nargs="+", metavar="profile", help="build profile(s)") + + compile_cmd.add_argument( + "-j", + "--jobs", + dest="jobs", + metavar="NUMBER", + type=int, + default=0, + help="Number of concurrent jobs. Default: 0/auto (based on host machine's number of CPUs)", + ) + + compile_cmd.add_argument( + "--cmake", action="store_true", dest="cmake", default=False, help="Use Cmake to build example" + ) compile_cmd.add_argument( - "toolchains", nargs="*", default=SUPPORTED_TOOLCHAINS, - type=argparse_force_uppercase_type(SUPPORTED_TOOLCHAINS, - "toolchain")), - compile_cmd.add_argument("-m", "--mcu", - help=("build for the given MCU (%s)" % - ', '.join(official_target_names)), - metavar="MCU", - type=argparse_many( - argparse_force_uppercase_type( - official_target_names, "MCU")), - default=official_target_names) - - compile_cmd.add_argument("--profiles", - nargs='+', - metavar="profile", - help="build profile(s)") - - compile_cmd.add_argument("-j", "--jobs", - dest='jobs', - metavar="NUMBER", - type=int, - default=0, - help="Number of concurrent jobs. Default: 0/auto (based on host machine's number of CPUs)") - - compile_cmd.add_argument("--cmake", action="store_true", dest="cmake", default=False, help="Use Cmake to build example") - compile_cmd.add_argument("-v", "--verbose", - action="store_true", - dest="verbose", - default=False, - help="Verbose diagnostic output") + "-v", "--verbose", action="store_true", dest="verbose", default=False, help="Verbose diagnostic output" + ) export_cmd = subparsers.add_parser("export", help="export of examples") export_cmd.set_defaults(fn=do_export) export_cmd.add_argument( - "ide", nargs="*", default=SUPPORTED_IDES, - type=argparse_force_uppercase_type(SUPPORTED_IDES, - "ide")) - export_cmd.add_argument("-m", "--mcu", - help=("build for the given MCU (%s)" % - ', '.join(official_target_names)), - metavar="MCU", - type=argparse_many( - argparse_force_uppercase_type( - official_target_names, "MCU")), - default=official_target_names) + "ide", nargs="*", default=SUPPORTED_IDES, type=argparse_force_uppercase_type(SUPPORTED_IDES, "ide") + ) + export_cmd.add_argument( + "-m", + "--mcu", + help=("build for the given MCU (%s)" % ", ".join(official_target_names)), + metavar="MCU", + type=argparse_many(argparse_force_uppercase_type(official_target_names, "MCU")), + default=official_target_names, + ) return parser.parse_args() def main(): """Entry point""" args = parse_args() - config = json.load(open(os.path.join(os.path.dirname(__file__), - args.config))) + config = json.load(open(os.path.join(os.path.dirname(__file__), args.config))) all_examples = [] - for example in config['examples']: - name = basename(example['github']) - if name != example['name']: - exit("ERROR : repo basename '%s' and example name '%s' not match " % (name, example['name'])) + for example in config["examples"]: + name = basename(example["github"]) + if name != example["name"]: + exit("ERROR : repo basename '%s' and example name '%s' not match " % (name, example["name"])) all_examples.append(name) exp_filter = [x for x in all_examples if x in args.example] if args.example else all_examples @@ -159,27 +160,42 @@ def do_deploy(_, config, examples): def do_compile(args, config, examples): """Do the compile step""" - results = lib.compile_repos(config, args.toolchains, args.mcu, args.profiles, args.verbose, examples, args.cmake, args.jobs) + results = lib.compile_repos( + config, args.toolchains, args.mcu, args.profiles, args.verbose, examples, args.cmake, args.jobs + ) failures = lib.get_build_summary(results) - return failures - + return failures + + def do_update(args, config, examples): - """ Test update the example to the version specified by the tag """ + """Test update the example to the version specified by the tag""" return lib.update_example_version(config, args.TAG, examples) + def do_list(_, config, examples): """List the examples in the config file""" exp_table = PrettyTable() - exp_table.hrules = 1 + exp_table.hrules = 1 exp_table.field_names = ["Name", "Subs", "Feature", "Targets", "Compile", "Test"] for example in config["examples"]: - exp_table.add_row([example['name'], '\n'.join(example['subs']),'\n'.join(example['features']),'\n'.join(example['targets']),example['compile'],example['test']]) + exp_table.add_row( + [ + example["name"], + "\n".join(example["subs"]), + "\n".join(example["features"]), + "\n".join(example["targets"]), + example["compile"], + example["test"], + ] + ) print(exp_table) return 0 + def do_symlink(args, config, examples): """Create Symbolic link for given mbed-os PATH""" return lib.symlink_mbedos(config, args.PATH, examples) + if __name__ == "__main__": sys.exit(main()) diff --git a/tools/test/examples/examples_lib.py b/tools/test/examples/examples_lib.py index 13cc1c97961..7c1de93da17 100644 --- a/tools/test/examples/examples_lib.py +++ b/tools/test/examples/examples_lib.py @@ -15,8 +15,9 @@ distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and -limitations +limitations """ + import os from os.path import dirname, abspath, basename, join, normpath import os.path @@ -28,7 +29,7 @@ import json import logging -logging.basicConfig(level=logging.DEBUG, format='[EXAMPLES]> %(levelname)-8s %(message)s') +logging.basicConfig(level=logging.DEBUG, format="[EXAMPLES]> %(levelname)-8s %(message)s") """ Import and bulid a bunch of example programs @@ -49,9 +50,9 @@ from tools.utils import write_json_to_file from prettytable import PrettyTable -SUPPORTED_TOOLCHAINS = list(TOOLCHAINS - set(u'uARM')) -SUPPORTED_IDES = [exp for exp in list(EXPORTERS) + list(EXPORTER_ALIASES) - if exp != "cmsis" and exp != "zip"] +SUPPORTED_TOOLCHAINS = list(TOOLCHAINS - set("uARM")) +SUPPORTED_IDES = [exp for exp in list(EXPORTERS) + list(EXPORTER_ALIASES) if exp != "cmsis" and exp != "zip"] + def get_table_from_pretty_table(pretty_table): rows = [] @@ -64,12 +65,13 @@ def get_table_from_pretty_table(pretty_table): rows.append(row) return rows + def get_build_summary(results): """Prints to screen the complication results of example programs. Args: results - results of the compilation stage. which is the output of compile_repos() - + Returns: Numbers of failed results """ pass_table = PrettyTable() @@ -83,10 +85,10 @@ def get_build_summary(results): pass_table.add_row([summary["name"], summary["target"], summary["toolchain"], summary["test"], "PASSED"]) for summary in status[3]: fail_table.add_row([summary["name"], summary["target"], summary["toolchain"], summary["test"], "FAILED"]) - failure_counter+=1 + failure_counter += 1 print("\n\nPassed Example Compilation:") print(pass_table) - if (failure_counter > 0): + if failure_counter > 0: print("\n\nFailed Example Compilation:") print(fail_table) print("Number of failures = %d" % failure_counter) @@ -96,40 +98,42 @@ def get_build_summary(results): json.dump(rows, write_file, indent=4, sort_keys=True) return failure_counter + def get_export_summary(results): """Prints to screen the exporting results of example programs. Args: results - results of the compilation stage. which is the output of and export_repos() - + Returns: Numbers of failed results """ pass_table = PrettyTable() pass_table.field_names = ["EXAMPLE NAME", "TARGET", "IDE", "EXPORT RESULT", "BUILD RESULT"] pass_table.align["EXAMPLE NAME"] = "l" fail_table = copy.deepcopy(pass_table) - + failure_counter = 0 for exp, status in list(results.items()): for summary in status[2]: pass_table.add_row([summary["name"], summary["target"], summary["ide"], "PASSED", "PASSED"]) for summary in status[3]: fail_table.add_row([summary["name"], summary["target"], summary["ide"], "FAILED", ""]) - failure_counter+=1 + failure_counter += 1 for summary in status[4]: fail_table.add_row([summary["name"], summary["target"], summary["ide"], "PASSED", "FAILED"]) - failure_counter+=1 + failure_counter += 1 for summary in status[5]: pass_table.add_row([summary["name"], summary["target"], summary["ide"], "PASSED", "SKIPPED"]) - + print("\n\nPassed Example Exporting:") print(pass_table) - if (failure_counter > 0): + if failure_counter > 0: print("\n\nFailed Example Exporting:") print(fail_table) print("Number of failures = %d" % failure_counter) return failure_counter + def valid_choices(allowed_choices, all_choices): if len(allowed_choices) > 0: return [t for t in all_choices if t in allowed_choices] @@ -150,8 +154,7 @@ def target_cross_toolchain(allowed_targets, allowed_toolchains, features=[]): """ for target in allowed_targets: for toolchain in allowed_toolchains: - if all(feature in TARGET_MAP[target].features - for feature in features): + if all(feature in TARGET_MAP[target].features for feature in features): yield target, toolchain @@ -168,25 +171,27 @@ def target_cross_ide(allowed_targets, allowed_ides, features=[], toolchains=[]): """ for target in allowed_targets: for ide in allowed_ides: - if (EXPORTERS[ide].is_target_supported(target) and - (not toolchains or EXPORTERS[ide].TOOLCHAIN in toolchains) and - all(feature in TARGET_MAP[target].features - for feature in features)): + if ( + EXPORTERS[ide].is_target_supported(target) + and (not toolchains or EXPORTERS[ide].TOOLCHAIN in toolchains) + and all(feature in TARGET_MAP[target].features for feature in features) + ): yield target, ide def get_sub_examples_list(example): - """ Get the names of sub examples. if no sub examples, return the name of main example""" + """Get the names of sub examples. if no sub examples, return the name of main example""" sub_examples = [] - if example['sub-repo-example']: - for sub in example['subs']: + if example["sub-repo-example"]: + for sub in example["subs"]: sub_examples.append("%s/%s" % (example["name"], sub)) else: sub_examples.append(example["name"]) return sub_examples + def source_repos(config, exp_filter): - """ Imports each of the repos and its dependencies (.lib files) associated + """Imports each of the repos and its dependencies (.lib files) associated with the specific examples name from the json config file. Note if there is already a clone of the repo then it will first be removed to ensure a clean, up to date cloning. @@ -195,22 +200,23 @@ def source_repos(config, exp_filter): """ print("\nImporting example repos....\n") - for example in config['examples']: - name = example['name'] + for example in config["examples"]: + name = example["name"] if name in exp_filter: if os.path.exists(name): logging.warning("'%s' example directory already exists. Deleting..." % name) rmtree(name) - - cmd = "mbed-cli import %s" % example['github'] + + cmd = "mbed-cli import %s" % example["github"] logging.info("Executing command '%s'..." % cmd) result = subprocess.call(cmd, shell=True) if result: return result - return 0 + return 0 -def clone_repos(config, exp_filter , retry = 3): - """ Clones each of the repos associated with the specific examples name from the + +def clone_repos(config, exp_filter, retry=3): + """Clones each of the repos associated with the specific examples name from the json config file. Note if there is already a clone of the repo then it will first be removed to ensure a clean, up to date cloning. Args: @@ -218,33 +224,34 @@ def clone_repos(config, exp_filter , retry = 3): """ print("\nCloning example repos....\n") - for example in config['examples']: - name = example['name'] + for example in config["examples"]: + name = example["name"] if name in exp_filter: if os.path.exists(name): logging.warning("'%s' example directory already exists. Deleting..." % name) rmtree(name, onerror=remove_readonly) - cmd = "git clone %s" % example['github'] + cmd = "git clone %s" % example["github"] for i in range(0, retry): logging.info("Executing command '%s'..." % cmd) - if not subprocess.call(cmd, shell=True): + if not subprocess.call(cmd, shell=True): break else: logging.error("unable to clone the repo '%s'" % name) return 1 return 0 + def deploy_repos(config, exp_filter): - """ If the example directory exists as provided by the json config file, + """If the example directory exists as provided by the json config file, pull in the examples dependencies by using `mbed-cli deploy`. Args: config - the json object imported from the file. """ print("\nDeploying example repos....\n") - for example in config['examples']: - name = example['name'] + for example in config["examples"]: + name = example["name"] if name in exp_filter: if os.path.exists(name): os.chdir(name) @@ -255,41 +262,42 @@ def deploy_repos(config, exp_filter): os.chdir(CWD) if result: logging.error("mbed-cli deploy command failed for '%s'" % name) - return result + return result else: logging.info("'%s' example directory doesn't exist. Skipping..." % name) return 1 - return 0 + return 0 + def export_repos(config, ides, targets, exp_filter): """Exports and builds combinations of example programs, targets and IDEs. - The results are returned in a [key: value] dictionary format: - Where key = The example name from the json config file - value = a list containing: pass_status, successes, export failures, build_failures, - and build_skips - - where pass_status = The overall pass status for the export of the full - set of example programs comprising the example suite. - IE they must build and export) True if all examples pass, false otherwise - successes = list of examples that exported and built (if possible) - If the exporter has no build functionality, then it is a pass - if exported - export_failures = list of examples that failed to export. - build_failures = list of examples that failed to build - build_skips = list of examples that cannot build - - Both successes and failures contain the example name, target and IDE - - Args: - config - the json object imported from the file. - ides - List of IDES to export to + The results are returned in a [key: value] dictionary format: + Where key = The example name from the json config file + value = a list containing: pass_status, successes, export failures, build_failures, + and build_skips + + where pass_status = The overall pass status for the export of the full + set of example programs comprising the example suite. + IE they must build and export) True if all examples pass, false otherwise + successes = list of examples that exported and built (if possible) + If the exporter has no build functionality, then it is a pass + if exported + export_failures = list of examples that failed to export. + build_failures = list of examples that failed to build + build_skips = list of examples that cannot build + + Both successes and failures contain the example name, target and IDE + + Args: + config - the json object imported from the file. + ides - List of IDES to export to """ results = {} print("\nExporting example repos....\n") - for example in config['examples']: - if example['name'] not in exp_filter: + for example in config["examples"]: + if example["name"] not in exp_filter: continue export_failures = [] build_failures = [] @@ -297,53 +305,55 @@ def export_repos(config, ides, targets, exp_filter): successes = [] exported = True pass_status = True - if example['export']: - for name in get_sub_examples_list(example): + if example["export"]: + for name in get_sub_examples_list(example): os.chdir(name) logging.info("In folder '%s'" % name) # Check that the target, IDE, and features combinations are valid and return a # list of valid combinations to work through - for target, ide in target_cross_ide(valid_choices(example['targets'], targets), - valid_choices(example['exporters'], ides), - example['features'], example['toolchains']): - example_summary = {"name" : name, "target" : target, "ide" : ide } + for target, ide in target_cross_ide( + valid_choices(example["targets"], targets), + valid_choices(example["exporters"], ides), + example["features"], + example["toolchains"], + ): + example_summary = {"name": name, "target": target, "ide": ide} summary_string = "%s %s %s" % (name, target, ide) - logging.info("Exporting %s" % summary_string) - + logging.info("Exporting %s" % summary_string) + cmd = ["mbed-cli", "export", "-i", ide, "-m", target] logging.info("Executing command '%s'..." % " ".join(cmd)) proc = subprocess.Popen(cmd) proc.wait() if proc.returncode: export_failures.append(example_summary) - logging.error("FAILURE exporting %s" % summary_string) + logging.error("FAILURE exporting %s" % summary_string) else: - logging.info("SUCCESS exporting %s" % summary_string) - logging.info("Building %s" % summary_string) + logging.info("SUCCESS exporting %s" % summary_string) + logging.info("Building %s" % summary_string) try: if EXPORTERS[ide].build(name, cleanup=False): - logging.error("FAILURE building %s" % summary_string) + logging.error("FAILURE building %s" % summary_string) build_failures.append(example_summary) else: - logging.info("SUCCESS building %s" % summary_string) + logging.info("SUCCESS building %s" % summary_string) successes.append(example_summary) except TypeError: successes.append(example_summary) build_skips.append(example_summary) os.chdir(CWD) - if len(build_failures+export_failures) > 0: - pass_status= False + if len(build_failures + export_failures) > 0: + pass_status = False else: exported = False - results[example['name']] = [exported, pass_status, successes, - export_failures, build_failures, build_skips] + results[example["name"]] = [exported, pass_status, successes, export_failures, build_failures, build_skips] return results -def compile_repos(config, toolchains, targets, profiles, verbose, exp_filter, cmake=False ,jobs=0): +def compile_repos(config, toolchains, targets, profiles, verbose, exp_filter, cmake=False, jobs=0): """Compiles combinations of example programs, targets and compile chains. The results are returned in a [key: value] dictionary format: @@ -369,37 +379,43 @@ def compile_repos(config, toolchains, targets, profiles, verbose, exp_filter, cm """ results = {} - test_json = {"builds":{}} + test_json = {"builds": {}} base_path = os.getcwd() print("\nCompiling example repos....\n") - for example in config['examples']: - if example['name'] not in exp_filter: + for example in config["examples"]: + if example["name"] not in exp_filter: continue failures = [] successes = [] compiled = True pass_status = True - if example['test']: - if not ('baud_rate' in example and 'compare_log'in example): + if example["test"]: + if not ("baud_rate" in example and "compare_log" in example): logging.warning("'baud_rate' or 'compare_log' keys are missing from config json file") - example['test'] = False - if example['compile']: + example["test"] = False + if example["compile"]: for name in get_sub_examples_list(example): os.chdir(name) logging.info("In folder '%s'" % name) # Check that the target, toolchain and features combinations are valid and return a # list of valid combinations to work through - for target, toolchain in target_cross_toolchain(valid_choices(example['targets'], targets), - valid_choices(example['toolchains'], toolchains), - example['features']): - example_summary = {"name" : name, "target" : target, "toolchain" : toolchain, "test": "UNSET"} + for target, toolchain in target_cross_toolchain( + valid_choices(example["targets"], targets), + valid_choices(example["toolchains"], toolchains), + example["features"], + ): + example_summary = {"name": name, "target": target, "toolchain": toolchain, "test": "UNSET"} summary_string = "%s %s %s" % (name, target, toolchain) logging.info("Compiling %s" % summary_string) if cmake: build_command_seq = ["mbed-tools compile -t {} -m {} -c".format(toolchain, target)] else: - build_command_seq = ["mbed-cli compile -t {} -m {} -j {} {}".format(toolchain, target, str(jobs), '-vv' if verbose else '') ] + build_command_seq = [ + "mbed-cli compile -t {} -m {} -j {} {}".format( + toolchain, target, str(jobs), "-vv" if verbose else "" + ) + ] if profiles: for profile in profiles: build_command_seq[0] += " --profile {}".format(profile) @@ -412,39 +428,44 @@ def compile_repos(config, toolchains, targets, profiles, verbose, exp_filter, cm std_out, std_err = proc.communicate() std_out = std_out.decode() std_err = std_err.decode() - print ("\n#### STDOUT ####\n%s\n#### STDERR ####\n%s\n#### End of STDOUT/STDERR ####\n" % (std_out,std_err)) + print( + "\n#### STDOUT ####\n%s\n#### STDERR ####\n%s\n#### End of STDOUT/STDERR ####\n" + % (std_out, std_err) + ) if proc.returncode: failures.append(example_summary) failed_flag = True break - - + if not failed_flag: - if example['test']: - log = example['compare_log'].pop(0) - # example['compare_log'] is a list of log file/files, which matches each examples/sub-examples from same repo. + if example["test"]: + log = example["compare_log"].pop(0) + # example['compare_log'] is a list of log file/files, which matches each examples/sub-examples from same repo. # pop the log file out of list regardless the compilation for each example pass of fail - image = fetch_output_image(std_out,cmake) + image = fetch_output_image(std_out, cmake) if image: - image_info = [{"binary_type": "bootable","path": normpath(join(name,image)),"compare_log":log}] - test_group = "{}-{}-{}".format(target, toolchain, example['baud_rate']) - if not test_group in test_json['builds']: - test_json['builds'][test_group] = { - "platform":target , - "toolchain": toolchain , - "base_path": base_path , - "baud_rate": int(example['baud_rate']), - "tests":{} } - test_json['builds'][test_group]['tests'][name]={"binaries":image_info} + image_info = [ + {"binary_type": "bootable", "path": normpath(join(name, image)), "compare_log": log} + ] + test_group = "{}-{}-{}".format(target, toolchain, example["baud_rate"]) + if not test_group in test_json["builds"]: + test_json["builds"][test_group] = { + "platform": target, + "toolchain": toolchain, + "base_path": base_path, + "baud_rate": int(example["baud_rate"]), + "tests": {}, + } + test_json["builds"][test_group]["tests"][name] = {"binaries": image_info} example_summary["test"] = "TEST_ON" - + else: logging.warning("could not find built image for example %s" % name) - example_summary["test"] = "NO_IMAGE" + example_summary["test"] = "NO_IMAGE" else: logging.warning("Test for %s will not be generated." % name) - example_summary["test"] = "TEST_OFF" + example_summary["test"] = "TEST_OFF" successes.append(example_summary) os.chdir(CWD) @@ -455,14 +476,14 @@ def compile_repos(config, toolchains, targets, profiles, verbose, exp_filter, cm else: compiled = False - results[example['name']] = [compiled, pass_status, successes, failures] + results[example["name"]] = [compiled, pass_status, successes, failures] write_json_to_file(test_json, "test_spec.json") return results def update_example_version(config, tag, exp_filter): - """ For each example repo identified in the config json object, update the version of + """For each example repo identified in the config json object, update the version of example to that specified by the supplied GitHub tag. This function assumes that each example repo has already been cloned. @@ -472,12 +493,12 @@ def update_example_version(config, tag, exp_filter): """ print("\nUpdating example to version(branch) '%s'\n" % tag) - for example in config['examples']: - name = example['name'] + for example in config["examples"]: + name = example["name"] if name in exp_filter: os.chdir(name) logging.info("In folder '%s'" % name) - cmd = "git checkout -B %s origin/%s" %(tag, tag) + cmd = "git checkout -B %s origin/%s" % (tag, tag) logging.info("Executing command '%s'..." % cmd) result = subprocess.call(cmd, shell=True) os.chdir(CWD) @@ -485,18 +506,19 @@ def update_example_version(config, tag, exp_filter): return result return 0 + def symlink_mbedos(config, path, exp_filter): - """ Create a symbolic link in each example folder to given path - If a mbed-os.lib can be found in the folder, it will be removed + """Create a symbolic link in each example folder to given path + If a mbed-os.lib can be found in the folder, it will be removed """ print("\nCreating mbed-os Symbolic link to '%s'\n" % path) - for example in config['examples']: - if example['name'] not in exp_filter: + for example in config["examples"]: + if example["name"] not in exp_filter: continue # traverse the path and find directories with "mbed-os.lib" dirs_with_mbed_os_lib = [] - for root, dirs, files in os.walk(example['name']): + for root, dirs, files in os.walk(example["name"]): for the_file in files: if the_file.endswith("mbed-os.lib"): dirs_with_mbed_os_lib.append(root) @@ -507,7 +529,7 @@ def symlink_mbedos(config, path, exp_filter): logging.info("In folder '%s'" % name) if os.path.exists("mbed-os.lib"): logging.info("Replacing 'mbed-os.lib' with empty file in '%s'" % name) - open("mbed-os.lib", 'w').close() + open("mbed-os.lib", "w").close() else: logging.warning("No 'mbed-os.lib' found in '%s'" % name) if os.path.exists("mbed-os"): @@ -515,17 +537,18 @@ def symlink_mbedos(config, path, exp_filter): else: logging.info("Creating Symbolic link '%s'->'mbed-os'" % path) os.symlink(path, "mbed-os") - #Cmake tool currently require 'mbed-os.lib' to be present to perform build. - #Add a empty 'mbed-os.lib' as a workaround - open('mbed-os.lib', 'a').close() + # Cmake tool currently require 'mbed-os.lib' to be present to perform build. + # Add a empty 'mbed-os.lib' as a workaround + open("mbed-os.lib", "a").close() os.chdir(CWD) return 0 -def fetch_output_image(output,cmake): + +def fetch_output_image(output, cmake): """Find the build image from the last 30 lines of a given log""" lines = output.splitlines() - last_index = -31 if len(lines)>29 else (-1 - len(lines)) - for index in range(-1,last_index,-1): + last_index = -31 if len(lines) > 29 else (-1 - len(lines)) + for index in range(-1, last_index, -1): if cmake: if lines[index].startswith("-- built:") and lines[index].endswith(".bin"): image = lines[index][10:] @@ -538,4 +561,3 @@ def fetch_output_image(output,cmake): if os.path.isfile(image): return image return False - diff --git a/tools/test/examples/test_elf_float_checker.py b/tools/test/examples/test_elf_float_checker.py index 2359da59fb6..a087ef6c50b 100644 --- a/tools/test/examples/test_elf_float_checker.py +++ b/tools/test/examples/test_elf_float_checker.py @@ -39,19 +39,19 @@ ] SYMBOL_TABLE_WITH_FLOATS = ( - " Symbol table '.symtab' contains 2723 entries:\n"+ - "Num: Value Size Type Bind Vis Ndx Name\n"+ - " 0: 00000000 0 NOTYPE LOCAL DEFAULT UND \n"+ - " 1: 000045fd 16 FUNC GLOBAL HIDDEN 3 {}\n".format(FLOAT_SYMBOLS[0])+ - " 2: 00004609 16 FUNC GLOBAL HIDDEN 3 lp_ticker_disable_interrupt\n"+ - " 3: 00004615 16 FUNC GLOBAL HIDDEN 3 {}\n".format(FLOAT_SYMBOLS[1])+ - " 4: 00004625 36 FUNC GLOBAL HIDDEN 3 {}\n".format(FLOAT_SYMBOLS[2])+ - " 5: 00004645 8 FUNC GLOBAL HIDDEN 3 lp_ticker_get_info\n"+ - " 6: 0000464d 116 FUNC GLOBAL HIDDEN 3 {}\n".format(FLOAT_SYMBOLS[3])+ - " 7: 000046bd 20 FUNC GLOBAL HIDDEN 3 lp_ticker_irq_handler\n"+ - " 8: 000046d1 16 FUNC GLOBAL HIDDEN 3 {}\n".format(FLOAT_SYMBOLS[4])+ - " 9: 000046e1 52 FUNC GLOBAL HIDDEN 3 {}\n".format(FLOAT_SYMBOLS[5])+ - " 10: 000046f1 52 FUNC GLOBAL HIDDEN 3 {}\n".format(FLOAT_SYMBOLS[6]) + " Symbol table '.symtab' contains 2723 entries:\n" + + "Num: Value Size Type Bind Vis Ndx Name\n" + + " 0: 00000000 0 NOTYPE LOCAL DEFAULT UND \n" + + " 1: 000045fd 16 FUNC GLOBAL HIDDEN 3 {}\n".format(FLOAT_SYMBOLS[0]) + + " 2: 00004609 16 FUNC GLOBAL HIDDEN 3 lp_ticker_disable_interrupt\n" + + " 3: 00004615 16 FUNC GLOBAL HIDDEN 3 {}\n".format(FLOAT_SYMBOLS[1]) + + " 4: 00004625 36 FUNC GLOBAL HIDDEN 3 {}\n".format(FLOAT_SYMBOLS[2]) + + " 5: 00004645 8 FUNC GLOBAL HIDDEN 3 lp_ticker_get_info\n" + + " 6: 0000464d 116 FUNC GLOBAL HIDDEN 3 {}\n".format(FLOAT_SYMBOLS[3]) + + " 7: 000046bd 20 FUNC GLOBAL HIDDEN 3 lp_ticker_irq_handler\n" + + " 8: 000046d1 16 FUNC GLOBAL HIDDEN 3 {}\n".format(FLOAT_SYMBOLS[4]) + + " 9: 000046e1 52 FUNC GLOBAL HIDDEN 3 {}\n".format(FLOAT_SYMBOLS[5]) + + " 10: 000046f1 52 FUNC GLOBAL HIDDEN 3 {}\n".format(FLOAT_SYMBOLS[6]) ) @@ -59,9 +59,10 @@ OBJECT_FILE_ANALYSIS_CMD = [ TARGET.OBJECT_FILE_ANALYSIS_CMD[0], TARGET.OBJECT_FILE_ANALYSIS_CMD[1], - "{}".format(ELF_FORMAT_FILE) + "{}".format(ELF_FORMAT_FILE), ] + class TestElfFloatChecker: """Test class""" @@ -69,7 +70,8 @@ class TestElfFloatChecker: def setup_class(cls): # Create a dummy ELF format file if not os.path.exists(ELF_FORMAT_FILE): - with open(ELF_FORMAT_FILE, 'w'): pass + with open(ELF_FORMAT_FILE, "w"): + pass @classmethod def teardown_class(cls): @@ -78,45 +80,25 @@ def teardown_class(cls): os.remove(ELF_FORMAT_FILE) @mock.patch("subprocess.Popen") - def test_correctly_detect_absence_of_float_symbols( - self, mock_subprocess_popen - ): + def test_correctly_detect_absence_of_float_symbols(self, mock_subprocess_popen): """Test that no false positive occur.""" process_mock = mock.Mock() - attrs = { - "communicate.return_value":( - SYMBOL_TABLE_WITHOUT_FLOATS.encode(), None - ), - "returncode": 0, - } + attrs = {"communicate.return_value": (SYMBOL_TABLE_WITHOUT_FLOATS.encode(), None), "returncode": 0} process_mock.configure_mock(**attrs) mock_subprocess_popen.return_value = process_mock assert [] == TARGET.check_float_symbols(ELF_FORMAT_FILE) mock_subprocess_popen.assert_called_with( - OBJECT_FILE_ANALYSIS_CMD, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, + OBJECT_FILE_ANALYSIS_CMD, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) @mock.patch("subprocess.Popen") - def test_correctly_detect_presence_of_float_symbols( - self, mock_subprocess_popen - ): + def test_correctly_detect_presence_of_float_symbols(self, mock_subprocess_popen): """Test that float symbols can be discovered in a symbol table.""" process_mock = mock.Mock() - attrs = { - "communicate.return_value":( - SYMBOL_TABLE_WITH_FLOATS.encode(), None - ), - "returncode": 0, - } + attrs = {"communicate.return_value": (SYMBOL_TABLE_WITH_FLOATS.encode(), None), "returncode": 0} process_mock.configure_mock(**attrs) mock_subprocess_popen.return_value = process_mock - assert FLOAT_SYMBOLS == TARGET.check_float_symbols( - ELF_FORMAT_FILE - ) + assert FLOAT_SYMBOLS == TARGET.check_float_symbols(ELF_FORMAT_FILE) mock_subprocess_popen.assert_called_with( - OBJECT_FILE_ANALYSIS_CMD, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, + OBJECT_FILE_ANALYSIS_CMD, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) diff --git a/tools/test/pylint.py b/tools/test/pylint.py index 74031257fd1..d1289ad52dc 100644 --- a/tools/test/pylint.py +++ b/tools/test/pylint.py @@ -13,7 +13,7 @@ distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and -limitations +limitations """ import subprocess @@ -22,8 +22,7 @@ """A test that all code scores above a 9.25 in pylint""" -SCORE_REGEXP = re.compile( - r'^Your\ code\ has\ been\ rated\ at\ (\-?[0-9\.]+)/10') +SCORE_REGEXP = re.compile(r"^Your\ code\ has\ been\ rated\ at\ (\-?[0-9\.]+)/10") TOOLS_ROOT = os.path.dirname(os.path.dirname(__file__)) @@ -38,28 +37,22 @@ def parse_score(pylint_output): return float(match.group(1)) return 0.0 + def execute_pylint(filename): - """Execute a pylint process and collect it's output - """ + """Execute a pylint process and collect it's output""" process = subprocess.Popen( - ["pylint", filename], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - universal_newlines=True + ["pylint", filename], stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True ) stout, sterr = process.communicate() status = process.poll() return status, stout, sterr + FILES = ["python/memap/memap.py", "test/pylint.py"] if __name__ == "__main__": for python_module in FILES: - _, stdout, stderr = execute_pylint(os.path.join(TOOLS_ROOT, - python_module)) + _, stdout, stderr = execute_pylint(os.path.join(TOOLS_ROOT, python_module)) score = parse_score(stdout) if score < 9.25: print(stdout) - - - diff --git a/tools/test_configs/__init__.py b/tools/test_configs/__init__.py index 5d40db40665..ab3d8b14b85 100644 --- a/tools/test_configs/__init__.py +++ b/tools/test_configs/__init__.py @@ -13,7 +13,7 @@ distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and -limitations +limitations """ from os.path import dirname, abspath, join, exists @@ -26,20 +26,22 @@ CONFIG_MAP = json_file_to_dict(join(CONFIG_DIR, "config_paths.json")) TARGET_CONFIGS = json_file_to_dict(join(CONFIG_DIR, "target_configs.json")) + def get_valid_configs(target_name): if target_name in TARGET_CONFIGS: target_config = TARGET_CONFIGS[target_name] - elif (target_name in TARGET_MAP and 'EMAC' in TARGET_MAP[target_name].device_has): - target_config = { "default_test_configuration": "ETHERNET", "test_configurations": ["ETHERNET"] } + elif target_name in TARGET_MAP and "EMAC" in TARGET_MAP[target_name].device_has: + target_config = {"default_test_configuration": "ETHERNET", "test_configurations": ["ETHERNET"]} else: return {} config_dict = {} for attr in CONFIG_MAP: - if attr in target_config['test_configurations']: + if attr in target_config["test_configurations"]: config_dict[attr] = CONFIG_MAP[attr] return config_dict + def get_config_path(conf_name, target_name): configs = get_valid_configs(target_name) if configs and conf_name.upper() in configs: @@ -47,15 +49,16 @@ def get_config_path(conf_name, target_name): else: return None + def get_default_config(source_dir, target_name): if target_name in TARGET_CONFIGS: - config_name = TARGET_CONFIGS[target_name]['default_test_configuration'] + config_name = TARGET_CONFIGS[target_name]["default_test_configuration"] if config_name == "NONE": return None return join(CONFIG_DIR, CONFIG_MAP[config_name]) elif Config.find_app_config(source_dir): return None - elif (target_name in TARGET_MAP and 'EMAC' in TARGET_MAP[target_name].device_has): + elif target_name in TARGET_MAP and "EMAC" in TARGET_MAP[target_name].device_has: return join(CONFIG_DIR, CONFIG_MAP["ETHERNET"]) else: return None