diff --git a/.github/workflows/basic_checks.yml b/.github/workflows/basic_checks.yml index eecd993dda6..9e3d6f97483 100644 --- a/.github/workflows/basic_checks.yml +++ b/.github/workflows/basic_checks.yml @@ -151,6 +151,12 @@ jobs: run: | cd tools ruff format --diff + + - name: Check Python Lint + run: | + cd tools + ruff check --diff python/mbed_tools python/mbed_platformio + basedpyright docs-check: diff --git a/tools/pyproject.toml b/tools/pyproject.toml index 6e1b0ec7c97..a7bb5c6d792 100644 --- a/tools/pyproject.toml +++ b/tools/pyproject.toml @@ -20,11 +20,10 @@ dependencies = [ "jinja2>=2.11.3", "python-dotenv", "Click>=8.0", # Need at least this version for pathlib.Path support - "GitPython", "tqdm", "tabulate", "requests>=2.20", - "typing-extensions", + "typing-extensions>=4.4.0", "pyserial", "appdirs", "pyjson5>=1.6", @@ -34,12 +33,12 @@ dependencies = [ # Needed for downloading CMSIS MCU descriptions "cmsis-pack-manager>=0.5.0", - # USB device detection on Mac + # USB device detection on Windows "pywin32; platform_system=='Windows'", # USB device detection on Linux "psutil; platform_system=='Linux'", - "pyudev; platform_system=='Linux'", + "pyudev>=0.24; platform_system=='Linux'", # 0.23.x seems to have issues with its Six dependency # USB device detection on Mac "beautifulsoup4; sys_platform == 'darwin'", @@ -72,7 +71,16 @@ unit-tests = [ "lxml" ] linters = [ - "ruff" + "ruff", + "basedpyright", + + # To pass Pyright we need all the platform-dependent packages + "platformio", + "SCons", + "psutil", + "pyudev>=0.24", + "beautifulsoup4", + "lxml" ] greentea = [ ## Additional requirements to install into the Mbed environment when running Greentea tests @@ -109,4 +117,155 @@ ambiq_svl = "ambiq_svl.svl:cli" [tool.ruff] line-length = 120 -src = ['python'] \ No newline at end of file +src = ['python'] + +[tool.ruff.lint] +select = [ + 'A', # Builtins + 'ANN', # Annotations + 'ARG', # Unused arguments + 'B', # Bugbear + 'BLE', # Blind except + 'C4', # Comprehensions + 'C90', # mccabe + 'COM', # Commas + 'D2', # Docstring conventions + 'DTZ', # Datetimes + 'EM', # Error messages + 'ERA', # Commented-out code + 'EXE', # Executable + 'F', # Pyflakes + 'FA', # __future__ annotations + 'FLY', # F-strings + 'FURB', # Refurb + 'G', # Logging format + 'I', # Isort + 'ICN', # Import conventions + 'INP', # Disallow PEP-420 (Implicit namespace packages) + 'INT', # gettext + 'ISC', # Implicit str concat + 'LOG', # Logging + 'N', # PEP-8 Naming + 'NPY', # Numpy + 'PERF', # Unnecessary performance costs + 'PGH', # Pygrep hooks + 'PIE', # Unnecessary code + 'PL', # Pylint + 'PT', # Pytest + 'PTH', # Use Pathlib + 'PYI', # Stub files + 'Q', # Quotes + 'RET', # Return + 'RUF', # Ruff + 'RSE', # Raise + 'S', # Bandit + 'SIM', # Code simplification + 'SLF', # Private member access + 'SLOT', # __slots__ + 'T10', # Debugger + 'T20', # Print + 'TCH', # Type checking + 'TID', # Tidy imports + 'TRY', # Exception handling + 'UP', # Pyupgrade + 'W', # Warnings + 'YTT', # sys.version +] +ignore = [ + 'D203', # incorrect-blank-line-before-class + 'D212', # surrounding-whitespace + 'Q000', # bad-quotes-inline-string - Allow using single or double quotes + 'D200', # unnecessary-multiline-docstring + 'RET505', # superfluous-else-return + 'RET506', # superfluous-else-raise + 'TRY003', # raise-vanilla-args - Redundant with EM101 + 'COM812', # missing-trailing-comma - incompatible with formatter + 'ISC001', # single-line-implicit-string-concatenation - incompatible with formatter + 'TRY300', # try-consider-else + 'PLR2004', # magic-value-comparison + 'SIM102', # collapsible-if - Sometimes it's nice for readability + 'PERF203', # try-except-in-loop - Sometimes this is needed! + 'PERF401', # manual-list-comprehension - Sometimes this makes code easier to understand. + 'PLR5501', # collapsible-else-if - Stop collapsing my if statements! + 'TC006', # runtime-cast-value + 'G004', # logging-f-string + 'ANN401', # any-type + 'DTZ005', # call-datetime-now-without-tzinfo - If this lint is enabled, it seems difficult to actually work with datetime objects that should be in the local time zone. + 'S701', # jinja2-autoescape-false - autoescape not needed because we are not rendering HTML + 'BLE001', # blind-except + 'S603', # subprocess-without-shell-equals-true - without disabling this, subprocess cannot be used at all! + 'ERA001', # commented-out-code + 'T201', # print + + # For now allow old-style type annotations. Currently there's lots of code that uses them, and I am + # not sure if there is a way to upgrade them automatically. And since this project still supports + # back to Python 3.8, you can't actually use the new style of annotations without using the future annotations feature. + "FA100", # future-rewritable-type-annotation + "UP045", # non-pep604-annotation-optional + "UP006", # non-pep585-annotation + "UP007" # non-pep604-annotation-union +] + +# Allow "unsafe" fixes to be done automatically for the following: +extend-safe-fixes = [ + "W291", # trailing-whitespace + "W293", # blank-line-with-whitespace + "EM101", # raw-string-in-exception + "TC001", # typing-only-first-party-import + "TC003", # typing-only-standard-library-import +] + +[tool.ruff.lint.mccabe] +max-complexity = 15 + +[tool.ruff.lint.pylint] +max-args = 10 + +[tool.ruff.lint.flake8-annotations] +# Don't require a return type annotation for __init__ in most cases +mypy-init-return = true + +[tool.ruff.lint.isort] +# Still fold imports onto one line if possible, even if the last import ends with a comma +split-on-trailing-comma = false + +[tool.basedpyright] + +# Don't warn about things deprecated more recently than python 3.8 +pythonVersion = "3.8" + +include = [ + "python/mbed_tools/**", + "python/mbed_platformio/**" +] + +# For right now, we can configure basedpyright in relatively permissive mode. +# We will allow code where the types of things are partially unknown, as there is +# lots of legacy code in that category. +# Also the PlatformIO code has to work with SCons, which is fundamentally type +# annotation proof, so it can likely never pass these checks. +reportUnknownVariableType = false +reportUnknownMemberType = false +reportUnknownLambdaType = false +reportMissingTypeArgument = false +reportUnknownArgumentType = false +reportUnknownParameterType = false +reportAny = false +reportExplicitAny = false +reportMissingTypeStubs = false + +# Use "medium strict" member variable annotation rules, where the type checker +# is allowed to infer the types of class variables based on what gets assigned in __init__ +reportIncompatibleUnannotatedOverride = true +reportUnannotatedClassAttribute = false + +# Conflicts with Ruff rules +reportImplicitStringConcatenation = false + +# Allow isinstance() even when it seems unneccessary based on type annotations. +# This call is useful to check that the runtime types match the annotations. +reportUnnecessaryIsInstance = false + +# Some ignore comments are only needed on specific platforms, so failing due to unneeded ignore +# comments creates an impossible situation +reportUnnecessaryTypeIgnoreComment = false \ No newline at end of file diff --git a/tools/python/mbed_platformio/__init__.py b/tools/python/mbed_platformio/__init__.py index a9d0ccc7eeb..5a6d00aeaa4 100644 --- a/tools/python/mbed_platformio/__init__.py +++ b/tools/python/mbed_platformio/__init__.py @@ -1,4 +1,5 @@ """ Copyright (c) 2025 Jamie Smith + SPDX-License-Identifier: Apache-2.0 """ diff --git a/tools/python/mbed_platformio/build_mbed_ce.py b/tools/python/mbed_platformio/build_mbed_ce.py index e8776e8a9fb..9e001aa7a18 100644 --- a/tools/python/mbed_platformio/build_mbed_ce.py +++ b/tools/python/mbed_platformio/build_mbed_ce.py @@ -10,15 +10,19 @@ from __future__ import annotations -import pathlib -from pathlib import Path import json +import pathlib import sys +import typing +from pathlib import Path +from typing import TYPE_CHECKING, Any -from SCons.Script import DefaultEnvironment, ARGUMENTS -from SCons.Environment import Base as Environment +from click.parser import split_arg_string from platformio.proc import exec_command -import click +from SCons.Script import ARGUMENTS, DefaultEnvironment + +if TYPE_CHECKING: + from SCons.Environment import Base as Environment env: Environment = DefaultEnvironment() platform = env.PioPlatform() @@ -26,9 +30,9 @@ # Directories FRAMEWORK_DIR = Path(platform.get_package_dir("framework-mbed-ce")) -BUILD_DIR = Path(env.subst("$BUILD_DIR")) -PROJECT_DIR = Path(env.subst("$PROJECT_DIR")) -PROJECT_SRC_DIR = Path(env.subst("$PROJECT_SRC_DIR")) +BUILD_DIR = Path(typing.cast(str, env.subst("$BUILD_DIR"))) +PROJECT_DIR = Path(typing.cast(str, env.subst("$PROJECT_DIR"))) +PROJECT_SRC_DIR = Path(typing.cast(str, env.subst("$PROJECT_SRC_DIR"))) CMAKE_API_DIR = BUILD_DIR / ".cmake" / "api" / "v1" CMAKE_API_QUERY_DIR = CMAKE_API_DIR / "query" CMAKE_API_REPLY_DIR = CMAKE_API_DIR / "reply" @@ -46,7 +50,6 @@ # This script is run by SCons so it does not have access to any other Python modules by default. sys.path.append(str(FRAMEWORK_DIR / "tools" / "python")) -from mbed_platformio.pio_variants import PIO_VARIANT_TO_MBED_TARGET from mbed_platformio.cmake_to_scons_converter import ( build_library, extract_defines, @@ -55,22 +58,22 @@ extract_link_args, find_included_files, ) +from mbed_platformio.pio_variants import PIO_VARIANT_TO_MBED_TARGET -def get_mbed_target(): - board_type = env.subst("$BOARD") +def get_mbed_target() -> str: + board_type = typing.cast(str, env.subst("$BOARD")) variant = PIO_VARIANT_TO_MBED_TARGET[board_type] if board_type in PIO_VARIANT_TO_MBED_TARGET else board_type.upper() return board.get("build.mbed_variant", variant) -def is_proper_mbed_ce_project(): +def is_proper_mbed_ce_project() -> bool: return all(path.is_file() for path in (PROJECT_MBED_APP_JSON5,)) -def create_default_project_files(): - print("Mbed CE: Creating default project files") +def create_default_project_files() -> None: if not PROJECT_MBED_APP_JSON5.exists(): - PROJECT_MBED_APP_JSON5.write_text( + _ = PROJECT_MBED_APP_JSON5.write_text( """ { "target_overrides": { @@ -87,13 +90,13 @@ def create_default_project_files(): ) -def is_cmake_reconfigure_required(): +def is_cmake_reconfigure_required() -> bool: cmake_cache_file = BUILD_DIR / "CMakeCache.txt" cmake_config_files = [PROJECT_MBED_APP_JSON5, PROJECT_CMAKELISTS_TXT] ninja_buildfile = BUILD_DIR / "build.ninja" if not cmake_cache_file.exists(): - print(f"Mbed CE: Reconfigure required because CMake cache does not exist") + print("Mbed CE: Reconfigure required because CMake cache does not exist") return True if not CMAKE_API_REPLY_DIR.is_dir() or not any(CMAKE_API_REPLY_DIR.iterdir()): print("Mbed CE: Reconfigure required because CMake API reply dir is missing") @@ -116,11 +119,14 @@ def is_cmake_reconfigure_required(): return False -def run_tool(command_and_args: list[str] | None = None): +def run_tool(command_and_args: list[str] | None = None) -> None: result = exec_command(command_and_args) - if result["returncode"] != 0: - sys.stderr.write(result["out"] + "\n") - sys.stderr.write(result["err"] + "\n") + + # Note: Pyright seems to think that this will always fail due to missing type annotations + # for exec_command(), but I believe the actual code is fine. + if result["returncode"] != 0: # pyright: ignore[reportUnnecessaryComparison] + print(result["out"], file=sys.stderr) + print(result["err"], file=sys.stderr) env.Exit(1) if int(ARGUMENTS.get("PIOVERBOSE", 0)): @@ -148,33 +154,35 @@ def get_cmake_code_model(cmake_args: list) -> dict: (BUILD_DIR / "CMakeCache.txt").touch() if not CMAKE_API_REPLY_DIR.is_dir() or not any(CMAKE_API_REPLY_DIR.iterdir()): - sys.stderr.write("Error: Couldn't find CMake API response file\n") + print("Error: Couldn't find CMake API response file", file=sys.stderr) env.Exit(1) codemodel = {} for target in CMAKE_API_REPLY_DIR.iterdir(): if target.name.startswith("codemodel-v2"): - with open(target, "r") as fp: + with target.open(encoding="utf-8") as fp: codemodel = json.load(fp) - assert codemodel["version"]["major"] == 2 + if codemodel["version"]["major"] != 2: + print("Warning: Unexpected CMake code model version, reading compilation data may fail!", file=sys.stderr) + return codemodel -def get_target_config(project_configs: dict, target_index): - target_json = project_configs.get("targets")[target_index].get("jsonFile", "") +def get_target_config(project_configs: dict, target_index: int) -> dict[str, Any]: + target_json = project_configs["targets"][target_index].get("jsonFile", "") target_config_file = CMAKE_API_REPLY_DIR / target_json if not target_config_file.is_file(): - sys.stderr.write("Error: Couldn't find target config %s\n" % target_json) + print(f"Error: Couldn't find target config {target_json}", file=sys.stderr) env.Exit(1) - with open(target_config_file) as fp: + with target_config_file.open(encoding="utf-8") as fp: return json.load(fp) def load_target_configurations(cmake_codemodel: dict) -> dict: configs = {} - project_configs = cmake_codemodel.get("configurations")[0] + project_configs = cmake_codemodel["configurations"][0] for config in project_configs.get("projects", []): for target_index in config.get("targetIndexes", []): target_config = get_target_config(project_configs, target_index) @@ -216,20 +224,20 @@ def get_components_map( result = {} for config in get_targets_by_type(target_configs, target_types, ignore_components): if "nameOnDisk" not in config: - config["nameOnDisk"] = "lib%s.a" % config["name"] + config["nameOnDisk"] = "lib{}.a".format(config["name"]) result[config["id"]] = {"config": config} return result -def build_components(env: Environment, components_map: dict, project_src_dir: pathlib.Path): +def build_components(env: Environment, components_map: dict, project_src_dir: pathlib.Path) -> None: for k, v in components_map.items(): components_map[k]["lib"] = build_library( env, v["config"], project_src_dir, FRAMEWORK_DIR, pathlib.Path("$BUILD_DIR/mbed-os") ) -def get_app_defines(app_config: dict): +def get_app_defines(app_config: dict) -> list[tuple[str, str]]: return extract_defines(app_config["compileGroups"][0]) @@ -250,12 +258,13 @@ def get_app_defines(app_config: dict): "-DPLATFORMIO_PROJECT_PATH=" + str(PROJECT_DIR.as_posix()), "-DMBED_TARGET=" + get_mbed_target(), "-DUPLOAD_METHOD=NONE", # Disable Mbed CE upload method system as PlatformIO has its own + # Add in any extra options from higher layers + *split_arg_string(board.get("build.cmake_extra_args", "")), ] - + click.parser.split_arg_string(board.get("build.cmake_extra_args", "")) ) if not project_codemodel: - sys.stderr.write("Error: Couldn't find code model generated by CMake\n") + print("Error: Couldn't find code model generated by CMake", file=sys.stderr) env.Exit(1) print("Mbed CE: Reading CMake configuration...") @@ -279,7 +288,7 @@ def get_app_defines(app_config: dict): # within this archive. mbed_ce_lib_path = pathlib.Path("$BUILD_DIR") / "mbed-os" / "libmbed-os.a" link_args = ["-Wl,--whole-archive", '"' + str(mbed_ce_lib_path) + '"', "-Wl,--no-whole-archive"] -env.Depends("$BUILD_DIR/$PROGNAME$PROGSUFFIX", str(mbed_ce_lib_path)) +_ = env.Depends("$BUILD_DIR/$PROGNAME$PROGSUFFIX", str(mbed_ce_lib_path)) # Get other linker flags from Mbed. We want these to appear after the application objects and Mbed libraries # because they contain the C/C++ library link flags. @@ -288,7 +297,7 @@ def get_app_defines(app_config: dict): # The CMake build system adds a flag in mbed_set_post_build() to output a map file. # We need to do that here. map_file = BUILD_DIR / "firmware.map" -link_args.append(f"-Wl,-Map={str(map_file)}") +link_args.append(f"-Wl,-Map={map_file!s}") ## Build environment configuration ------------------------------------------------------------------------------------- @@ -307,7 +316,7 @@ def get_app_defines(app_config: dict): # So, we have to do this after we are done using the results of said query. print("Mbed CE: Generating linker script...") project_ld_script = generate_project_ld_script() -env.Depends("$BUILD_DIR/$PROGNAME$PROGSUFFIX", str(project_ld_script)) +_ = env.Depends("$BUILD_DIR/$PROGNAME$PROGSUFFIX", str(project_ld_script)) env.Append(LDSCRIPT_PATH=str(project_ld_script)) print("Mbed CE: Build environment configured.") diff --git a/tools/python/mbed_platformio/cmake_to_scons_converter.py b/tools/python/mbed_platformio/cmake_to_scons_converter.py index cf920d5b825..e59fe968dc8 100644 --- a/tools/python/mbed_platformio/cmake_to_scons_converter.py +++ b/tools/python/mbed_platformio/cmake_to_scons_converter.py @@ -8,14 +8,19 @@ from __future__ import annotations import collections - -from SCons.Environment import Base as Environment import pathlib -import click +import typing +from typing import TYPE_CHECKING, Sequence + +from click.parser import split_arg_string + +if TYPE_CHECKING: + from SCons.Environment import Base as Environment + from SCons.Node import NodeList def extract_defines(compile_group: dict) -> list[tuple[str, str]]: - def _normalize_define(define_string): + def _normalize_define(define_string: str) -> tuple[str, str]: define_string = define_string.strip() if "=" in define_string: define, value = define_string.split("=", maxsplit=1) @@ -24,7 +29,10 @@ def _normalize_define(define_string): elif '"' in value and not value.startswith("\\"): value = value.replace('"', '\\"') return define, value - return define_string + + # If a define is passed without a value on the command line it gets set equal to 1 by the compiler. + # We can replicate that behavior here to turn every define into a key-value pair. + return define_string, "1" result = [_normalize_define(d.get("define", "")) for d in compile_group.get("defines", []) if d] @@ -46,8 +54,8 @@ def prepare_build_envs(target_json: dict, default_env: Environment) -> list[Envi target_compile_groups = target_json.get("compileGroups", []) if not target_compile_groups: print( - "Warning! The `%s` component doesn't register any source files. " - "Check if sources are set in component's CMakeLists.txt!" % target_json["name"] + f"Warning! The `{target_json['name']}` component doesn't register any source files. " + "Check if sources are set in component's CMakeLists.txt!" ) for cg in target_compile_groups: @@ -83,6 +91,7 @@ def compile_source_files( ) -> list: """ Generates SCons rules to compile the source files in a target. + Returns list of object files to build. :param framework_dir: Path to the Mbed CE framework source @@ -106,9 +115,8 @@ def compile_source_files( elif src_path.is_relative_to(framework_dir): obj_path = (framework_obj_dir / src_path.relative_to(framework_dir)).with_suffix(".o") else: - raise RuntimeError( - f"Source path {src_path!s} outside of project source dir and framework dir, don't know where to save object file!" - ) + msg = f"Source path {src_path!s} outside of project source dir and framework dir, don't know where to save object file!" + raise RuntimeError(msg) env = build_envs[compile_group_idx] @@ -128,7 +136,7 @@ def build_library( project_src_dir: pathlib.Path, framework_dir: pathlib.Path, framework_obj_dir: pathlib.Path, -): +) -> NodeList: lib_name = lib_config["nameOnDisk"] lib_path = lib_config["paths"]["build"] lib_objects = compile_source_files(lib_config, default_env, project_src_dir, framework_dir, framework_obj_dir) @@ -147,15 +155,15 @@ def _get_flags_for_compile_group(compile_group_json: dict) -> list[str]: fragment = ccfragment.get("fragment", "").strip() if not fragment or fragment.startswith("-D"): continue - flags.extend(click.parser.split_arg_string(fragment)) + flags.extend(split_arg_string(fragment)) return flags -def extract_flags(target_json: dict) -> dict[str, list[str]]: +def extract_flags(target_json: dict) -> dict[str, list[str] | None]: """ Returns a dictionary with flags for SCons based on a given CMake target """ - default_flags = collections.defaultdict(list) + default_flags: dict[str, list[str]] = collections.defaultdict(list) for cg in target_json["compileGroups"]: default_flags[cg["language"]].extend(_get_flags_for_compile_group(cg)) @@ -169,12 +177,12 @@ def extract_flags(target_json: dict) -> dict[str, list[str]]: def find_included_files(environment: Environment) -> set[str]: """ - Process a list of flags produced by extract_flags() to find files manually included by '-include' + Process an environment produced by prepare_build_envs() to find files manually included by '-include' """ result = set() for flag_var in ["CFLAGS", "CXXFLAGS", "CCFLAGS"]: - language_flags = environment.get(flag_var) - for index in range(0, len(language_flags)): + language_flags = typing.cast(Sequence[str], environment.get(flag_var)) + for index in range(len(language_flags)): if language_flags[index] == "-include" and index < len(language_flags) - 1: result.add(language_flags[index + 1]) return result @@ -201,7 +209,6 @@ def extract_link_args(target_json: dict) -> list[str]: """ Extract the linker flags from a CMake target """ - result = [] for f in target_json.get("link", {}).get("commandFragments", []): @@ -209,7 +216,7 @@ def extract_link_args(target_json: dict) -> list[str]: fragment_role = f.get("role", "").strip() if not fragment or not fragment_role: continue - args = click.parser.split_arg_string(fragment) + args = split_arg_string(fragment) if fragment_role == "flags": result.extend(args) diff --git a/tools/python/mbed_platformio/pio_variants.py b/tools/python/mbed_platformio/pio_variants.py index 7d01713ef96..9f76f485f84 100644 --- a/tools/python/mbed_platformio/pio_variants.py +++ b/tools/python/mbed_platformio/pio_variants.py @@ -1,5 +1,7 @@ """ -Maps PIO variant name to Mbed target name, in the situation where the Mbed target name is different +Maps PIO variant name to Mbed target name. + +This is for the situation where the Mbed target name is different from the uppercased version of the variant name Copyright (c) 2025 Jamie Smith diff --git a/tools/python/mbed_tools/build/__init__.py b/tools/python/mbed_tools/build/__init__.py index 40c5c22f8d4..c91a74a8823 100644 --- a/tools/python/mbed_tools/build/__init__.py +++ b/tools/python/mbed_tools/build/__init__.py @@ -2,15 +2,12 @@ # Copyright (c) 2020-2021 Arm Limited and Contributors. All rights reserved. # SPDX-License-Identifier: Apache-2.0 # -"""Provides the core build system for Mbed OS, which relies on CMake and Ninja as underlying technologies. - -The functionality covered in this package includes the following: +""" +Provides the core Python parts of the build system for Mbed OS. -- Execution of Mbed Pre-Build stages to determine appropriate configuration for Mbed OS and the build process. -- Invocation of the build process for the command line tools and online build service. -- Export of build instructions to third party command line tools and IDEs. +This module contains the configuration generation functionality, which processes JSON files into a set of target labels, +flags, and options. """ -from mbed_tools.build.build import build_project, generate_build_system -from mbed_tools.build.config import generate_config -from mbed_tools.build.flash import flash_binary +from mbed_tools.build.config import generate_config as generate_config +from mbed_tools.build.flash import flash_binary as flash_binary diff --git a/tools/python/mbed_tools/build/_internal/cmake_file.py b/tools/python/mbed_tools/build/_internal/cmake_file.py index b86bcdcb62f..18edf800f4d 100644 --- a/tools/python/mbed_tools/build/_internal/cmake_file.py +++ b/tools/python/mbed_tools/build/_internal/cmake_file.py @@ -5,7 +5,6 @@ """Module in charge of CMake file generation.""" import pathlib - from typing import Any import jinja2 @@ -17,7 +16,8 @@ def render_mbed_config_cmake_template(config: Config, toolchain_name: str, target_name: str) -> str: - """Renders the mbed_config jinja template with the target and project config settings. + """ + Renders the mbed_config jinja template with the target and project config settings. Args: config: Config object holding information parsed from the mbed config system. @@ -30,7 +30,7 @@ def render_mbed_config_cmake_template(config: Config, toolchain_name: str, targe env = jinja2.Environment(loader=jinja2.PackageLoader("mbed_tools.build", str(TEMPLATES_DIRECTORY))) env.filters["to_hex"] = to_hex template = env.get_template(TEMPLATE_NAME) - config["supported_c_libs"] = [x for x in config["supported_c_libs"][toolchain_name.lower()]] + config["supported_c_libs"] = list(config["supported_c_libs"][toolchain_name.lower()]) context = { "target_name": target_name, diff --git a/tools/python/mbed_tools/build/_internal/config/assemble_build_config.py b/tools/python/mbed_tools/build/_internal/config/assemble_build_config.py index 79e2bd360e1..511987b9c25 100644 --- a/tools/python/mbed_tools/build/_internal/config/assemble_build_config.py +++ b/tools/python/mbed_tools/build/_internal/config/assemble_build_config.py @@ -4,22 +4,19 @@ # """Configuration assembly algorithm.""" -import itertools - from dataclasses import dataclass from pathlib import Path from typing import Iterable, List, Optional, Set -from setuptools.build_meta import build_editable - -from mbed_tools.project import MbedProgram -from mbed_tools.build._internal.config.config import Config from mbed_tools.build._internal.config import source +from mbed_tools.build._internal.config.config import Config from mbed_tools.build._internal.find_files import LabelFilter, RequiresFilter, filter_files, find_files +from mbed_tools.project import MbedProgram def assemble_config(target_attributes: dict, program: MbedProgram) -> Config: - """Assemble config for given target and program directory. + """ + Assemble config for given target and program directory. Mbed library and application specific config parameters are parsed from mbed_lib.json and mbed_app.json files located in the project source tree. @@ -53,7 +50,7 @@ def assemble_config(target_attributes: dict, program: MbedProgram) -> Config: config.json_sources.append(program.files.custom_targets_json) # Make all JSON sources relative paths to the program root - def make_relative_if_possible(path: Path): + def make_relative_if_possible(path: Path) -> Path: # Sadly, Pathlib did not gain a better way to do this until newer python versions. try: return path.relative_to(program.root) @@ -68,7 +65,7 @@ def make_relative_if_possible(path: Path): def _assemble_config_from_sources( target_attributes: dict, mbed_lib_files: List[Path], mbed_app_file: Optional[Path] = None ) -> Config: - config = Config(source.prepare(target_attributes, source_name="target")) + config = Config(**source.prepare(target_attributes, source_name="target")) previous_filter_data = None app_data = None if mbed_app_file: diff --git a/tools/python/mbed_tools/build/_internal/config/config.py b/tools/python/mbed_tools/build/_internal/config/config.py index 5fd5fcea6f0..0b938201994 100644 --- a/tools/python/mbed_tools/build/_internal/config/config.py +++ b/tools/python/mbed_tools/build/_internal/config/config.py @@ -4,19 +4,25 @@ # """Build configuration representation.""" -import logging +from __future__ import annotations +import logging from collections import UserDict -from typing import Any, Iterable, Hashable, List -import pathlib +from typing import Any, Hashable, Iterable, List + +import typing_extensions -from mbed_tools.build._internal.config.source import Override, ConfigSetting +if typing_extensions.TYPE_CHECKING: + import pathlib + + from mbed_tools.build._internal.config.source import ConfigSetting, Override logger = logging.getLogger(__name__) class Config(UserDict): - """Mapping of config settings. + """ + Mapping of config settings. This object understands how to populate the different 'config sections' which all have different rules for how the settings are collected. @@ -26,8 +32,13 @@ class Config(UserDict): # List of JSON files used to create this config. Dumped to CMake at the end of configuration # so that it can regenerate configuration if the JSONs change. # All paths will be relative to the Mbed program root directory, or absolute if outside said directory. - json_sources: List[pathlib.Path] = [] + json_sources: List[pathlib.Path] + + def __init__(self, **kwargs: dict[str, Any]) -> None: + self.json_sources = [] + super().__init__(**kwargs) + @typing_extensions.override def __setitem__(self, key: Hashable, item: Any) -> None: """Set an item based on its key.""" if key == CONFIG_SECTION: @@ -86,9 +97,10 @@ def _update_config_section(self, config_settings: List[ConfigSetting]) -> None: for setting in config_settings: logger.debug("Adding config setting: '%s.%s'", setting.namespace, setting.name) if setting in self.data.get(CONFIG_SECTION, []): - raise ValueError( + msg = ( f"Setting {setting.namespace}.{setting.name} already defined. You cannot duplicate config settings!" ) + raise ValueError(msg) self.data[CONFIG_SECTION] = self.data.get(CONFIG_SECTION, []) + config_settings diff --git a/tools/python/mbed_tools/build/_internal/config/source.py b/tools/python/mbed_tools/build/_internal/config/source.py index f0b538b8da5..df49a08b75c 100644 --- a/tools/python/mbed_tools/build/_internal/config/source.py +++ b/tools/python/mbed_tools/build/_internal/config/source.py @@ -6,12 +6,11 @@ import logging import pathlib - from dataclasses import dataclass -from typing import Iterable, Any, Optional, List +from typing import Any, Iterable, List, Optional +from mbed_tools.build.exceptions import InvalidConfigOverrideError from mbed_tools.lib.json_helpers import decode_json_file -from mbed_tools.build.exceptions import InvalidConfigOverride from mbed_tools.lib.python_helpers import flatten_nested logger = logging.getLogger(__name__) @@ -27,7 +26,8 @@ def from_file( def prepare( input_data: dict, source_name: Optional[str] = None, target_filters: Optional[Iterable[str]] = None ) -> dict: - """Prepare a config source for entry into the Config object. + """ + Prepare a config source for entry into the Config object. Extracts config and override settings from the source. Flattens these nested dictionaries out into lists of objects which are namespaced in the way the Mbed config system expects. @@ -63,7 +63,8 @@ def prepare( @dataclass class ConfigSetting: - """Representation of a config setting. + """ + Representation of a config setting. Auto converts any list values to sets for faster operations and de-duplication of values. """ @@ -81,7 +82,8 @@ def __post_init__(self) -> None: @dataclass class Override: - """Representation of a config override. + """ + Representation of a config override. Checks for _add or _remove modifiers and splits them from the name. """ @@ -130,10 +132,10 @@ def _extract_config_settings(namespace: str, config_data: dict) -> List[ConfigSe def _extract_target_overrides( namespace: str, override_data: dict, allowed_target_labels: Iterable[str] ) -> List[Override]: - valid_target_data = dict() - for target_type in override_data: + valid_target_data = {} + for target_type, override in override_data.items(): if target_type == "*" or target_type in allowed_target_labels: - valid_target_data.update(override_data[target_type]) + valid_target_data.update(override) return _extract_overrides(namespace, valid_target_data) @@ -144,11 +146,12 @@ def _extract_overrides(namespace: str, override_data: dict) -> List[Override]: try: override_namespace, override_name = name.split(".") if override_namespace and override_namespace not in [namespace, "target"] and namespace != "app": - raise InvalidConfigOverride( + msg = ( "It is only possible to override config settings defined in an mbed_lib.json from mbed_app.json. " f"An override was defined by the lib `{namespace}` that attempts to override " f"`{override_namespace}.{override_name}`." ) + raise InvalidConfigOverrideError(msg) except ValueError: override_namespace = namespace override_name = name @@ -159,7 +162,8 @@ def _extract_overrides(namespace: str, override_data: dict) -> List[Override]: def _sanitise_value(val: Any) -> Any: - """Convert list values to sets and return scalar values and strings unchanged. + """ + Convert list values to sets and return scalar values and strings unchanged. For whatever reason, we allowed config settings to have values of any type available in the JSON spec. The value type can be a list, nested list, str, int, you name it. diff --git a/tools/python/mbed_tools/build/_internal/find_files.py b/tools/python/mbed_tools/build/_internal/find_files.py index c7b931e1e8d..1d73637592b 100644 --- a/tools/python/mbed_tools/build/_internal/find_files.py +++ b/tools/python/mbed_tools/build/_internal/find_files.py @@ -4,9 +4,9 @@ # """Find files in MbedOS program directory.""" -from pathlib import Path import fnmatch -from typing import Callable, Iterable, Optional, List, Tuple +from pathlib import Path +from typing import Callable, Iterable, List, Optional, Tuple from mbed_tools.lib.json_helpers import decode_json_file @@ -23,7 +23,8 @@ def find_files(filename: str, directory: Path) -> List[Path]: def _find_files(filename: str, directory: Path, filters: Optional[List[Callable]] = None) -> List[Path]: - """Recursively find files by name under a given directory. + """ + Recursively find files by name under a given directory. This function automatically applies rules from .mbedignore files found during traversal. @@ -47,22 +48,21 @@ def _find_files(filename: str, directory: Path, filters: Optional[List[Callable] # as it might contain rules for currently processed directory, as well as its descendants. mbedignore = Path(directory, ".mbedignore") if mbedignore in children: - filters = filters + [MbedignoreFilter.from_file(mbedignore)] + filters = [*filters, MbedignoreFilter.from_file(mbedignore)] # Remove files and directories that don't match current set of filters filtered_children = filter_files(children, filters) for child in filtered_children: - if child.is_symlink(): - child = child.absolute().resolve() + resolved_child = child.absolute().resolve() if child.is_symlink() else child - if child.is_dir(): + if resolved_child.is_dir(): # If processed child is a directory, recurse with current set of filters - result += _find_files(filename, child, filters) + result += _find_files(filename, resolved_child, filters) - if child.is_file() and child.name == filename: + if resolved_child.is_file() and resolved_child.name == filename: # We've got a match - result.append(child) + result.append(resolved_child) return result @@ -73,15 +73,17 @@ def filter_files(files: Iterable[Path], filters: Iterable[Callable]) -> Iterable class RequiresFilter: - """Filter out mbed libraries not needed by application. + """ + Filter out mbed libraries not needed by application. The 'requires' config option in mbed_app.json can specify list of mbed libraries (mbed_lib.json) that application requires. Apply 'requires' filter to remove mbed_lib.json files not required by application. """ - def __init__(self, requires: Iterable[str]): - """Initialise the filter attributes. + def __init__(self, requires: Iterable[str]) -> None: + """ + Initialise the filter attributes. Args: requires: List of required mbed libraries. @@ -94,7 +96,8 @@ def __call__(self, path: Path) -> bool: class LabelFilter: - """Filter out given paths using path labelling rules. + """ + Filter out given paths using path labelling rules. If a path is labelled with given type, but contains label value which is not allowed, it will be filtered out. @@ -108,32 +111,35 @@ class LabelFilter: - "/path/FEATURE_FOO/FEATURE_BAR/somefile.txt" will be filtered out """ - def __init__(self, label_type: str, allowed_label_values: Iterable[str]): - """Initialise the filter attributes. + def __init__(self, label_type: str, allowed_label_values: Iterable[str]) -> None: + """ + Initialise the filter attributes. Args: label_type: Type of the label to filter with. In filtered paths, it prefixes the value. allowed_label_values: Values which are allowed for the given label type. """ self._label_type = label_type - self._allowed_labels = set(f"{label_type}_{label_value}" for label_value in allowed_label_values) + self._allowed_labels = {f"{label_type}_{label_value}" for label_value in allowed_label_values} def __call__(self, path: Path) -> bool: """Return True if given path contains only allowed labels - should not be filtered out.""" - labels = set(part for part in path.parts if self._label_type in part) + labels = {part for part in path.parts if self._label_type in part} return labels.issubset(self._allowed_labels) class MbedignoreFilter: - """Filter out given paths based on rules found in .mbedignore files. + """ + Filter out given paths based on rules found in .mbedignore files. Patterns in .mbedignore use unix shell-style wildcards (fnmatch). It means that functionality, although similar is different to that found in .gitignore and friends. """ - def __init__(self, patterns: Tuple[str, ...]): - """Initialise the filter attributes. + def __init__(self, patterns: Tuple[str, ...]) -> None: + """ + Initialise the filter attributes. Args: patterns: List of patterns from .mbedignore to filter against. @@ -147,7 +153,8 @@ def __call__(self, path: Path) -> bool: @classmethod def from_file(cls, mbedignore_path: Path) -> "MbedignoreFilter": - """Return new instance with patterns read from .mbedignore file. + """ + Return new instance with patterns read from .mbedignore file. Constructed patterns are rooted in the directory of .mbedignore file. """ diff --git a/tools/python/mbed_tools/build/_internal/memory_banks.py b/tools/python/mbed_tools/build/_internal/memory_banks.py index b37afc22a70..1419652abe3 100644 --- a/tools/python/mbed_tools/build/_internal/memory_banks.py +++ b/tools/python/mbed_tools/build/_internal/memory_banks.py @@ -8,18 +8,20 @@ from typing import TYPE_CHECKING if TYPE_CHECKING: - from typing import Dict, Any, Set, TypedDict, NotRequired + from typing import Any, TypedDict + + from typing_extensions import NotRequired + + from mbed_tools.build._internal.config.config import Config + from mbed_tools.project import MbedProgram import copy import logging import humanize -from mbed_tools.lib.json_helpers import decode_json_file -from mbed_tools.project import MbedProgram - from mbed_tools.build.exceptions import MbedBuildError -from mbed_tools.build._internal.config.config import Config +from mbed_tools.lib.json_helpers import decode_json_file logger = logging.getLogger(__name__) @@ -35,15 +37,15 @@ class MemoryBankInfo(TypedDict): start: int default: NotRequired[bool] startup: NotRequired[bool] - access: Dict[str, bool] + access: dict[str, bool] class BanksByType(TypedDict): """ Info about all memory banks, ROM and RAM """ - ROM: Dict[str, MemoryBankInfo] - RAM: Dict[str, MemoryBankInfo] + ROM: dict[str, MemoryBankInfo] + RAM: dict[str, MemoryBankInfo] # Deprecated memory configuration properties from old (Mbed CLI 1) configuration system @@ -62,14 +64,12 @@ class BanksByType(TypedDict): BANK_TYPES = ("RAM", "ROM") -def incorporate_memory_bank_data_from_cmsis(target_attributes: Dict[str, Any], program: MbedProgram) -> None: +def incorporate_memory_bank_data_from_cmsis(target_attributes: dict[str, Any], program: MbedProgram) -> None: """ - Incorporate the memory bank information from the CMSIS JSON file into - the target attributes. + Incorporate the memory bank information from the CMSIS JSON file into the target attributes. :param target_attributes: Merged targets.json content for this target """ - if "device_name" not in target_attributes: # No CMSIS device name for this target return @@ -77,15 +77,14 @@ def incorporate_memory_bank_data_from_cmsis(target_attributes: Dict[str, Any], p cmsis_mcu_descriptions = decode_json_file(program.mbed_os.cmsis_mcu_descriptions_json_file) if target_attributes["device_name"] not in cmsis_mcu_descriptions: - raise MbedBuildError( - f"""Target specifies device_name {target_attributes["device_name"]} but this device is not + msg = f"""Target specifies device_name {target_attributes["device_name"]} but this device is not listed in {program.mbed_os.cmsis_mcu_descriptions_json_file}. Perhaps you need to use the 'python -m mbed_tools.cli.main cmsis-mcu-descr fetch-missing' command to download the missing MCU description?""" - ) + raise MbedBuildError(msg) mcu_description = cmsis_mcu_descriptions[target_attributes["device_name"]] - mcu_memory_description: Dict[str, Dict[str, Any]] = mcu_description["memories"] + mcu_memory_description: dict[str, dict[str, Any]] = mcu_description["memories"] # If a memory bank is not already described in targets.json, import its description from the CMSIS # MCU description. @@ -96,29 +95,29 @@ def incorporate_memory_bank_data_from_cmsis(target_attributes: Dict[str, Any], p target_attributes["memory_banks"] = target_memory_banks_section -def _apply_configured_overrides(banks_by_type: BanksByType, bank_config: Dict[str, Dict[str, int]]) -> BanksByType: +def _apply_configured_overrides(banks_by_type: BanksByType, bank_config: dict[str, dict[str, int]]) -> BanksByType: """ - Apply overrides from configuration to the physical memory bank information, producing the configured - memory bank information. + Apply overrides from configuration to the physical memory bank information, producing the configured memory banks. + :param bank_config: memory_bank_config element from target JSON :param banks_by_type: Physical memory bank information """ - configured_memory_banks = copy.deepcopy(banks_by_type) for bank_name, bank_data in bank_config.items(): if bank_name not in configured_memory_banks["RAM"] and bank_name not in configured_memory_banks["ROM"]: - raise MbedBuildError(f"Attempt to configure memory bank {bank_name} which does not exist for this device.") + msg = f"Attempt to configure memory bank {bank_name} which does not exist for this device." + raise MbedBuildError(msg) bank_type = "RAM" if bank_name in configured_memory_banks["RAM"] else "ROM" if len(set(bank_data.keys()) - {"size", "start"}): - raise MbedBuildError( - "Only the size and start properties of a memory bank can be configured in memory_bank_config" - ) + msg = "Only the size and start properties of a memory bank can be configured in memory_bank_config" + raise MbedBuildError(msg) for property_name, property_value in bank_data.items(): if not isinstance(property_value, int): - raise MbedBuildError(f"Memory bank '{bank_name}': configured {property_name} must be an integer") + msg = f"Memory bank '{bank_name}': configured {property_name} must be an integer" + raise MbedBuildError(msg) configured_memory_banks[bank_type][bank_name][property_name] = property_value @@ -128,10 +127,10 @@ def _apply_configured_overrides(banks_by_type: BanksByType, bank_config: Dict[st def _print_mem_bank_summary(banks_by_type: BanksByType, configured_banks_by_type: BanksByType) -> None: """ Print a summary of the memory banks to the console + :param banks_by_type: Physical memory bank information :param configured_banks_by_type: Configured memory bank information """ - print("Summary of available memory banks:") for bank_type in BANK_TYPES: banks = banks_by_type[bank_type] @@ -171,13 +170,14 @@ def _print_mem_bank_summary(banks_by_type: BanksByType, configured_banks_by_type print() -def _generate_macros_for_memory_banks(banks_by_type: BanksByType, configured_banks_by_type: BanksByType) -> Set[str]: +def _generate_macros_for_memory_banks(banks_by_type: BanksByType, configured_banks_by_type: BanksByType) -> set[str]: """ Generate a set of macros to define to pass the memory bank information into Mbed. + :param banks_by_type: Physical memory bank information :param configured_banks_by_type: Configured memory bank information """ - all_macros: Set[str] = set() + all_macros: set[str] = set() for bank_type in BANK_TYPES: banks = banks_by_type[bank_type] @@ -204,15 +204,15 @@ def _generate_macros_for_memory_banks(banks_by_type: BanksByType, configured_ban return all_macros -def process_memory_banks(config: Config) -> Dict[str, BanksByType]: +def process_memory_banks(config: Config) -> dict[str, BanksByType]: """ - Process memory bank information in the config. Reads the 'memory_banks' and - 'memory_bank_config' sections and adds the memory_bank_macros section accordingly. + Processes memory bank information in the config. + + Reads the 'memory_banks' and 'memory_bank_config' sections and adds the memory_bank_macros section accordingly. :param config: Config structure containing merged data from every JSON file (app, lib, and targets) :return: Memory bank information structure that shall be written to memory_banks.json """ - memory_banks = config.get("memory_banks", {}) # Check for deprecated properties @@ -230,9 +230,11 @@ def process_memory_banks(config: Config) -> Dict[str, BanksByType]: banks_by_type: BanksByType = {"ROM": {}, "RAM": {}} for bank_name, bank_data in memory_banks.items(): if "access" not in bank_data or "start" not in bank_data or "size" not in bank_data: - raise MbedBuildError(f"Memory bank '{bank_name}' must contain 'access', 'size', and 'start' elements") + msg = f"Memory bank '{bank_name}' must contain 'access', 'size', and 'start' elements" + raise MbedBuildError(msg) if not isinstance(bank_data["size"], int) or not isinstance(bank_data["start"], int): - raise MbedBuildError(f"Memory bank '{bank_name}': start and size must be integers") + msg = f"Memory bank '{bank_name}': start and size must be integers" + raise MbedBuildError(msg) if bank_data["access"]["read"] and bank_data["access"]["write"]: banks_by_type["RAM"][bank_name] = bank_data diff --git a/tools/python/mbed_tools/build/_internal/write_files.py b/tools/python/mbed_tools/build/_internal/write_files.py index d4e1d89efb8..b6ca26c1072 100644 --- a/tools/python/mbed_tools/build/_internal/write_files.py +++ b/tools/python/mbed_tools/build/_internal/write_files.py @@ -5,11 +5,13 @@ """Writes out files to specified locations.""" import pathlib -from mbed_tools.build.exceptions import InvalidExportOutputDirectory + +from mbed_tools.build.exceptions import InvalidExportOutputDirectoryError def write_file(file_path: pathlib.Path, file_contents: str) -> None: - """Writes out a string to a file. + """ + Writes out a string to a file. If the intermediate directories to the output directory don't exist, this function will create them. @@ -22,7 +24,8 @@ def write_file(file_path: pathlib.Path, file_contents: str) -> None: """ output_directory = file_path.parent if output_directory.is_file(): - raise InvalidExportOutputDirectory("Output directory cannot be a path to a file.") + msg = "Output directory cannot be a path to a file." + raise InvalidExportOutputDirectoryError(msg) output_directory.mkdir(parents=True, exist_ok=True) - file_path.write_text(file_contents) + _ = file_path.write_text(file_contents) diff --git a/tools/python/mbed_tools/build/build.py b/tools/python/mbed_tools/build/build.py deleted file mode 100644 index 4b4c08270b5..00000000000 --- a/tools/python/mbed_tools/build/build.py +++ /dev/null @@ -1,59 +0,0 @@ -# -# Copyright (c) 2020-2021 Arm Limited and Contributors. All rights reserved. -# SPDX-License-Identifier: Apache-2.0 -# -"""Configure and build a CMake project.""" - -import logging -import pathlib -import subprocess - -from typing import Optional - -from mbed_tools.build.exceptions import MbedBuildError - - -logger = logging.getLogger(__name__) - - -def build_project(build_dir: pathlib.Path, target: Optional[str] = None) -> None: - """Build a project using CMake to invoke Ninja. - - Args: - build_dir: Path to the CMake build tree. - target: The CMake target to build (e.g 'install') - """ - _check_ninja_found() - target_flag = ["--target", target] if target is not None else [] - _cmake_wrapper("--build", str(build_dir), *target_flag) - - -def generate_build_system(source_dir: pathlib.Path, build_dir: pathlib.Path, profile: str) -> None: - """Configure a project using CMake. - - Args: - source_dir: Path to the CMake source tree. - build_dir: Path to the CMake build tree. - profile: The Mbed build profile (develop, debug or release). - """ - _check_ninja_found() - _cmake_wrapper("-S", str(source_dir), "-B", str(build_dir), "-GNinja", f"-DCMAKE_BUILD_TYPE={profile}") - - -def _cmake_wrapper(*cmake_args: str) -> None: - try: - logger.debug("Running CMake with args: %s", cmake_args) - subprocess.run(["cmake", *cmake_args], check=True) - except FileNotFoundError: - raise MbedBuildError("Could not find CMake. Please ensure CMake is installed and added to PATH.") - except subprocess.CalledProcessError: - raise MbedBuildError("CMake invocation failed!") - - -def _check_ninja_found() -> None: - try: - subprocess.run(["ninja", "--version"], check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - except FileNotFoundError: - raise MbedBuildError( - "Could not find the 'Ninja' build program. Please ensure 'Ninja' is installed and added to PATH." - ) diff --git a/tools/python/mbed_tools/build/config.py b/tools/python/mbed_tools/build/config.py index 55db7fed93f..c96582201a1 100644 --- a/tools/python/mbed_tools/build/config.py +++ b/tools/python/mbed_tools/build/config.py @@ -4,19 +4,19 @@ # """Parses the Mbed configuration system and generates a CMake config script.""" +import json import pathlib - from typing import Any, Tuple -import json -from mbed_tools.lib.json_helpers import decode_json_file -from mbed_tools.project import MbedProgram -from mbed_tools.targets import get_target_by_name from mbed_tools.build._internal.cmake_file import render_mbed_config_cmake_template -from mbed_tools.build._internal.config.assemble_build_config import Config, assemble_config +from mbed_tools.build._internal.config.assemble_build_config import assemble_config +from mbed_tools.build._internal.config.config import Config from mbed_tools.build._internal.memory_banks import incorporate_memory_bank_data_from_cmsis, process_memory_banks from mbed_tools.build._internal.write_files import write_file from mbed_tools.build.exceptions import MbedBuildError +from mbed_tools.lib.json_helpers import decode_json_file +from mbed_tools.project import MbedProgram +from mbed_tools.targets import get_target_by_name CMAKE_CONFIG_FILE = "mbed_config.cmake" MEMORY_BANKS_JSON_FILE = "memory_banks.json" @@ -24,7 +24,8 @@ def generate_config(target_name: str, toolchain: str, program: MbedProgram) -> Tuple[Config, pathlib.Path]: - """Generate an Mbed config file after parsing the Mbed config system. + """ + Generate an Mbed config file after parsing the Mbed config system. Args: target_name: Name of the target to configure for. @@ -43,7 +44,9 @@ def generate_config(target_name: str, toolchain: str, program: MbedProgram) -> T # Process memory banks and save JSON data for other tools (e.g. memap) to use memory_banks_json_content = process_memory_banks(config) program.files.cmake_build_dir.mkdir(parents=True, exist_ok=True) - (program.files.cmake_build_dir / MEMORY_BANKS_JSON_FILE).write_text(json.dumps(memory_banks_json_content, indent=4)) + _ = (program.files.cmake_build_dir / MEMORY_BANKS_JSON_FILE).write_text( + json.dumps(memory_banks_json_content, indent=4) + ) cmake_file_contents = render_mbed_config_cmake_template( target_name=target_name, config=config, toolchain_name=toolchain @@ -61,11 +64,12 @@ def _load_raw_targets_data(program: MbedProgram) -> Any: custom_targets_data = decode_json_file(program.files.custom_targets_json) for custom_target in custom_targets_data: if custom_target in targets_data: - raise MbedBuildError( + msg = ( f"Error found in {program.files.custom_targets_json}.\n" f"A target with the name '{custom_target}' already exists in targets.json. " "Please give your custom target a unique name so it can be identified." ) + raise MbedBuildError(msg) targets_data.update(custom_targets_data) diff --git a/tools/python/mbed_tools/build/exceptions.py b/tools/python/mbed_tools/build/exceptions.py index 12c3defdb7d..6863dbcb9e1 100644 --- a/tools/python/mbed_tools/build/exceptions.py +++ b/tools/python/mbed_tools/build/exceptions.py @@ -11,7 +11,7 @@ class MbedBuildError(ToolsError): """Base public exception for the mbed-build package.""" -class InvalidExportOutputDirectory(MbedBuildError): +class InvalidExportOutputDirectoryError(MbedBuildError): """It is not possible to export to the provided output directory.""" @@ -23,5 +23,5 @@ class DeviceNotFoundError(MbedBuildError): """The requested device is not connected to your system.""" -class InvalidConfigOverride(MbedBuildError): +class InvalidConfigOverrideError(MbedBuildError): """A given config setting was invalid.""" diff --git a/tools/python/mbed_tools/build/flash.py b/tools/python/mbed_tools/build/flash.py index 8ee3282aa9c..5fa08c9c685 100644 --- a/tools/python/mbed_tools/build/flash.py +++ b/tools/python/mbed_tools/build/flash.py @@ -4,28 +4,30 @@ # """Flash binary onto the connected device.""" -import shutil import os import pathlib import platform +import shutil from mbed_tools.build.exceptions import BinaryFileNotFoundError def _flash_dev(disk: pathlib.Path, image_path: pathlib.Path) -> None: - """Flash device using copy method. + """ + Flash device using copy method. Args: disk: Device mount point. image_path: Image file to be copied to device. """ - shutil.copy(image_path, disk, follow_symlinks=False) - if not platform.system() == "Windows": + _ = shutil.copy(image_path, disk, follow_symlinks=False) + if platform.system() != "Windows": os.sync() def _build_binary_file_path(program_path: pathlib.Path, build_dir: pathlib.Path, hex_file: bool) -> pathlib.Path: - """Build binary file name. + """ + Build binary file name. Args: program_path: Path to the Mbed project. @@ -41,14 +43,16 @@ def _build_binary_file_path(program_path: pathlib.Path, build_dir: pathlib.Path, fw_fbase = build_dir / program_path.name fw_file = fw_fbase.with_suffix(".hex" if hex_file else ".bin") if not fw_file.exists(): - raise BinaryFileNotFoundError(f"Build program file (firmware) not found {fw_file}") + msg = f"Build program file (firmware) not found {fw_file}" + raise BinaryFileNotFoundError(msg) return fw_file def flash_binary( - mount_point: pathlib.Path, program_path: pathlib.Path, build_dir: pathlib.Path, mbed_target: str, hex_file: bool + mount_point: pathlib.Path, program_path: pathlib.Path, build_dir: pathlib.Path, hex_file: bool ) -> pathlib.Path: - """Flash binary onto a device. + """ + Flash binary onto a device. Look through the connected devices and flash the binary if the connected and built target matches. @@ -56,7 +60,6 @@ def flash_binary( mount_point: Mount point of the target device. program_path: Path to the Mbed project. build_dir: Path to the CMake build folder. - mbed_target: The name of the Mbed target to build for. hex_file: Use hex file. """ fw_file = _build_binary_file_path(program_path, build_dir, hex_file) diff --git a/tools/python/mbed_tools/cli/cmsis_mcu_descr.py b/tools/python/mbed_tools/cli/cmsis_mcu_descr.py index 13494e10f97..59e62e32c44 100644 --- a/tools/python/mbed_tools/cli/cmsis_mcu_descr.py +++ b/tools/python/mbed_tools/cli/cmsis_mcu_descr.py @@ -5,6 +5,7 @@ """ Subcommands to allow managing the list of CMSIS MCU descriptions that comes with Mbed. + The MCU description list is used both for generating docs, and for providing information to the code about the memory banks present on a device. @@ -14,26 +15,27 @@ This is needed since the index is missing certain MCUs and has wrong information about a few others. """ -from mbed_tools.lib.json_helpers import decode_json_file +from __future__ import annotations + +import datetime +import json +import logging +import os +import pathlib +import re +import sys +from typing import Any, Dict, Sequence, Set import click import cmsis_pack_manager import humanize -import pathlib -import os -import datetime -import logging -import json -import sys -import re -import argparse -from typing import Set, Dict, Any +from mbed_tools.lib.json_helpers import decode_json_file LOGGER = logging.getLogger(__name__) # Calculate path to Mbed OS JSON files -THIS_SCRIPT_DIR = pathlib.Path(os.path.dirname(__file__)) +THIS_SCRIPT_DIR = pathlib.Path(__file__).parent PROJECT_ROOT = THIS_SCRIPT_DIR.parent.parent.parent.parent.parent MBED_OS_DIR = THIS_SCRIPT_DIR.parent.parent.parent.parent TARGETS_JSON5_PATH = MBED_OS_DIR / "targets" / "targets.json5" @@ -42,7 +44,7 @@ # Top-level command @click.group(name="cmsis-mcu-descr", help="Manage CMSIS MCU description JSON file") -def cmsis_mcu_descr(): +def cmsis_mcu_descr() -> None: # Set up logger defaults LOGGER.setLevel(logging.INFO) @@ -51,25 +53,27 @@ def open_cmsis_cache(*, must_exist: bool = True) -> cmsis_pack_manager.Cache: """ Open an accessor to the CMSIS cache. Also prints how old the cache is. """ - cmsis_cache = cmsis_pack_manager.Cache(False, False) index_file_path = pathlib.Path(cmsis_cache.index_path) if not index_file_path.exists() and must_exist: - raise RuntimeError( - "CMSIS device descriptor cache does not exist! Run 'python -m mbed_tools.cli.main cmsis-mcu-descr reload-cache' to populate it!" - ) + msg = "CMSIS device descriptor cache does not exist! Run 'python -m mbed_tools.cli.main cmsis-mcu-descr reload-cache' to populate it!" + raise RuntimeError(msg) if index_file_path.exists(): # Check how old the index file is - index_file_modified_time = datetime.datetime.fromtimestamp(index_file_path.stat().st_mtime) + index_file_modified_time = datetime.datetime.fromtimestamp( + index_file_path.stat().st_mtime, tz=datetime.timezone.utc + ) index_age = humanize.naturaltime(index_file_modified_time) LOGGER.info("CMSIS MCU description cache was last updated: %s", index_age) return cmsis_cache -def find_json_files(root_dir, exclude_dirs=[], file_pattern=r".*\.(json|json5)"): +def find_json_files( + root_dir: pathlib.Path, exclude_dirs: Sequence[str] | None = None, file_pattern: str = r".*\.(json|json5)" +) -> list[pathlib.Path]: """ Recursively searches for files matching the specified pattern in a given directory, excluding specified directories. @@ -81,6 +85,8 @@ def find_json_files(root_dir, exclude_dirs=[], file_pattern=r".*\.(json|json5)") Returns: A list of paths to found files. """ + if exclude_dirs is None: + exclude_dirs = [] json_files = [] for root, dirs, files in os.walk(root_dir): @@ -91,7 +97,7 @@ def find_json_files(root_dir, exclude_dirs=[], file_pattern=r".*\.(json|json5)") for file in files: if re.match(file_pattern, file): - json_files.append(pathlib.Path(os.path.join(root, file))) + json_files.append(pathlib.Path(root) / file) return json_files @@ -100,25 +106,19 @@ def get_mcu_names_used_by_targets_json5() -> Set[str]: """ Accumulate set of all `device_name` properties used by all targets defined in targets.json5 and custom_targets.json/json5. """ + LOGGER.info("Scanning targets.json5 for used MCU names...") + json_contents = decode_json_file(TARGETS_JSON5_PATH) # Search for files starting with "custom_targets" of type .json or .json5. Also exclude some folders like build and mbed-os exclude_dirs = ["build", "mbed-os", ".git"] file_pattern = r"custom_targets\.(json|json5)" - custom_targets_file = find_json_files(PROJECT_ROOT, exclude_dirs, file_pattern) + custom_targets_files = find_json_files(PROJECT_ROOT, exclude_dirs, file_pattern) - custom_targets_json_path = {} - for file in custom_targets_file: - if os.path.exists(file): - custom_targets_json_path = file - LOGGER.info(f"Custom_targets file detected - {custom_targets_json_path}") + for file in custom_targets_files: + LOGGER.info(f"Custom_targets file detected - {file}") + json_contents.update(decode_json_file(file)) used_mcu_names = set() - LOGGER.info("Scanning targets.json5 for used MCU names...") - json_contents = decode_json_file(TARGETS_JSON5_PATH) - if custom_targets_file: - LOGGER.info("Scanning custom_targets.json/json5. for used MCU names...") - json_contents.update(decode_json_file(custom_targets_json_path)) - for target_details in json_contents.values(): if "device_name" in target_details: used_mcu_names.add(target_details["device_name"]) @@ -126,9 +126,12 @@ def get_mcu_names_used_by_targets_json5() -> Set[str]: @cmsis_mcu_descr.command(short_help="Reload the cache of CMSIS MCU descriptions. This can take several minutes.") -def reload_cache(): +def reload_cache() -> None: """ - Reload the cache of CMSIS MCU descriptions. This can take several minutes. + Reload the cache of CMSIS MCU descriptions. + + This can take several minutes. + Note that it's possible for various MCU vendors' CMSIS pack servers to be down, and cmsis-pack-manager does not report any errors in this case (augh whyyyyy). @@ -143,9 +146,10 @@ def reload_cache(): @cmsis_mcu_descr.command(name="find-unused", short_help="Find MCU descriptions that are not used by targets.json5.") -def find_unused(): +def find_unused() -> None: """ Remove MCU descriptions that are not used by targets.json5. + Use this command after removing targets from Mbed to clean up old MCU definitions. """ used_mcu_names = get_mcu_names_used_by_targets_json5() @@ -164,13 +168,14 @@ def find_unused(): print("The following MCU descriptions are not used and should be pruned from cmsis_mcu_descriptions.json5") print("\n".join(removable_mcus)) + sys.exit(1) @cmsis_mcu_descr.command( name="check-missing", short_help="Check if there are any missing MCU descriptions used by targets.json5." ) -def check_missing(): +def check_missing() -> None: used_mcu_names = get_mcu_names_used_by_targets_json5() # Accumulate set of all keys in cmsis_mcu_descriptions.json @@ -193,16 +198,17 @@ def check_missing(): @cmsis_mcu_descr.command( name="fetch-missing", - short_help="Fetch any missing MCU descriptions used by targets.json5 or custom_targets.json/json5..", + short_help="Fetch any missing MCU descriptions used by targets.json5 or custom_targets.json/json5.", ) -def fetch_missing(): +def fetch_missing() -> None: """ - Scans through cmsis_mcu_descriptions.json5 for any missing MCU descriptions that are referenced by + Fetches any missing MCU descriptions used by targets.json5 or custom_targets.json/json5. + + This scans through cmsis_mcu_descriptions.json5 for any missing MCU descriptions that are referenced by targets.json5 or custom_targets.json/json5. If any are found, they are imported from the CMSIS cache. Note that downloaded descriptions should be checked for accuracy before they are committed. """ - used_mcu_names = get_mcu_names_used_by_targets_json5() # Accumulate set of all keys in cmsis_mcu_descriptions.json @@ -223,17 +229,19 @@ def fetch_missing(): for mcu in missing_mcu_names: if mcu not in cmsis_cache.index: - raise RuntimeError( + msg = ( f"MCU {mcu} is not present in the CMSIS MCU index ({cmsis_cache.index_path}). Maybe " f"wrong part number, or this MCU simply doesn't exist in the CMSIS index and has " f"to be added manually?" ) + raise RuntimeError(msg) missing_mcus_dict[mcu] = cmsis_cache.index[mcu] LOGGER.info( "In case of Custom target remove 'device_name' from your custom_targets.json5 file and add\n" - + "just the 'memories' section as 'memory_banks' section from content below.\n" - + f"Otherwise add the whole following entries to {CMSIS_MCU_DESCRIPTIONS_JSON_PATH}:" + "just the 'memories' section as 'memory_banks' section from content below.\n" + f"Otherwise add the whole following entries to {CMSIS_MCU_DESCRIPTIONS_JSON_PATH}:" ) print(json.dumps(missing_mcus_dict, indent=4, sort_keys=True)) + sys.exit(1) diff --git a/tools/python/mbed_tools/cli/configure.py b/tools/python/mbed_tools/cli/configure.py index aaf39f08072..6ce1e024a3b 100644 --- a/tools/python/mbed_tools/cli/configure.py +++ b/tools/python/mbed_tools/cli/configure.py @@ -4,12 +4,17 @@ # """Command to generate the application CMake configuration script used by the build/compile system.""" +from __future__ import annotations + +import logging import pathlib import click -from mbed_tools.project import MbedProgram from mbed_tools.build import generate_config +from mbed_tools.project import MbedProgram + +logger = logging.getLogger(__name__) @click.command( @@ -44,12 +49,13 @@ def configure( toolchain: str, mbed_target: str, program_path: str, - mbed_os_path: str, + mbed_os_path: str | None, output_dir: pathlib.Path, - custom_targets_json: str, - app_config: str, + custom_targets_json: str | None, + app_config: str | None, ) -> None: - """Exports a mbed_config.cmake file to build directory in the program root. + """ + Exports a mbed_config.cmake file to build directory in the program root. The parameters set in the CMake file will be dependent on the combination of toolchain and Mbed target provided and these can then control which parts of @@ -78,6 +84,9 @@ def configure( if app_config is not None: program.files.app_config_file = pathlib.Path(app_config) + if program.files.app_config_file is None: + logger.info("This program does not contain an mbed_app.json config file.") + mbed_target = mbed_target.upper() _, output_path = generate_config(mbed_target, toolchain, program) - click.echo(f"mbed_config.cmake has been generated and written to '{str(output_path.resolve())}'") + click.echo(f"mbed_config.cmake has been generated and written to '{output_path.resolve()!s}'") diff --git a/tools/python/mbed_tools/cli/list_connected_devices.py b/tools/python/mbed_tools/cli/list_connected_devices.py index ae644c7b52c..a0ebc89fd1d 100644 --- a/tools/python/mbed_tools/cli/list_connected_devices.py +++ b/tools/python/mbed_tools/cli/list_connected_devices.py @@ -4,19 +4,26 @@ # """Command to list all Mbed enabled devices connected to the host computer.""" -import click import json from operator import attrgetter -from typing import Iterable, List, Optional, Tuple +from typing import Iterable, List, Literal, Optional, Tuple + +import click from tabulate import tabulate -from mbed_tools.devices import get_connected_devices, Device +from mbed_tools.devices import Device, get_connected_devices from mbed_tools.targets import Board @click.command() +# Note: We need to give --format an alias so it doesn't shadow a builtin @click.option( - "--format", type=click.Choice(["table", "json"]), default="table", show_default=True, help="Set output format." + "--format", + "format_type", + type=click.Choice(["table", "json"]), + default="table", + show_default=True, + help="Set output format.", ) @click.option( "--show-all", @@ -25,7 +32,7 @@ default=False, help="Show all connected devices, even those which are not Mbed Boards.", ) -def list_connected_devices(format: str, show_all: bool) -> None: +def list_connected_devices(format_type: Literal["string", "json"], show_all: bool) -> None: """Prints connected devices.""" connected_devices = get_connected_devices() @@ -36,7 +43,7 @@ def list_connected_devices(format: str, show_all: bool) -> None: output_builders = {"table": _build_tabular_output, "json": _build_json_output} if devices: - output = output_builders[format](devices) + output = output_builders[format_type](devices) click.echo(output) else: click.echo("No connected Mbed devices found.") @@ -65,14 +72,14 @@ def _get_devices_ids(devices: Iterable[Device]) -> List[Tuple[Optional[int], Dev def _build_tabular_output(devices: Iterable[Device]) -> str: headers = ["Board name", "Serial number", "Serial port", "Mount point(s)", "Build target(s)", "Interface Version"] devices_data = [] - for id, device in _get_devices_ids(devices): + for dev_id, device in _get_devices_ids(devices): devices_data.append( [ device.mbed_board.board_name or "", device.serial_number, device.serial_port or "", "\n".join(str(mount_point) for mount_point in device.mount_points), - "\n".join(_get_build_targets(device.mbed_board, id)), + "\n".join(_get_build_targets(device.mbed_board, dev_id)), device.interface_version, ] ) @@ -81,7 +88,7 @@ def _build_tabular_output(devices: Iterable[Device]) -> str: def _build_json_output(devices: Iterable[Device]) -> str: devices_data = [] - for id, device in _get_devices_ids(devices): + for dev_id, device in _get_devices_ids(devices): board = device.mbed_board devices_data.append( { @@ -95,7 +102,7 @@ def _build_json_output(devices: Iterable[Device]) -> str: "board_name": board.board_name, "mbed_os_support": board.mbed_os_support, "mbed_enabled": board.mbed_enabled, - "build_targets": _get_build_targets(board, id), + "build_targets": _get_build_targets(board, dev_id), }, } ) diff --git a/tools/python/mbed_tools/cli/main.py b/tools/python/mbed_tools/cli/main.py index 01b755f3865..cd867595add 100644 --- a/tools/python/mbed_tools/cli/main.py +++ b/tools/python/mbed_tools/cli/main.py @@ -7,59 +7,39 @@ import logging import sys -from typing import Union, Any - import click +from typing_extensions import override -from mbed_tools.lib.logging import set_log_level, MbedToolsHandler - +from mbed_tools.cli.cmsis_mcu_descr import cmsis_mcu_descr from mbed_tools.cli.configure import configure from mbed_tools.cli.list_connected_devices import list_connected_devices -from mbed_tools.cli.project_management import new, import_, deploy from mbed_tools.cli.sterm import sterm -from mbed_tools.cli.cmsis_mcu_descr import cmsis_mcu_descr +from mbed_tools.lib.logging import MbedToolsHandler, set_log_level -CONTEXT_SETTINGS = dict(help_option_names=["-h", "--help"]) +CONTEXT_SETTINGS = {"help_option_names": ["-h", "--help"]} LOGGER = logging.getLogger(__name__) class GroupWithExceptionHandling(click.Group): """A click.Group which handles ToolsErrors and logging.""" - def invoke(self, context: click.Context) -> None: - """Invoke the command group. + @override + def invoke(self, ctx: click.Context) -> None: + """ + Invoke the command group. Args: - context: The current click context. + ctx: The current click context. """ # Use the context manager to ensure tools exceptions (expected behaviour) are shown as messages to the user, # but all other exceptions (unexpected behaviour) are shown as errors. - with MbedToolsHandler(LOGGER, context.params["traceback"]) as handler: - super().invoke(context) + with MbedToolsHandler(LOGGER, ctx.params["traceback"]) as handler: + super().invoke(ctx) sys.exit(handler.exit_code) -def print_version(context: click.Context, param: Union[click.Option, click.Parameter], value: bool) -> Any: - """Print the version of mbed-tools.""" - if not value or context.resilient_parsing: - return - - # Mbed CE: changed this to be hardcoded for now. - version_string = "7.60.0" - click.echo(version_string) - context.exit() - - @click.group(cls=GroupWithExceptionHandling, context_settings=CONTEXT_SETTINGS) -@click.option( - "--version", - is_flag=True, - callback=print_version, - expose_value=False, - is_eager=True, - help="Display versions of all Mbed Tools packages.", -) @click.option( "-v", "--verbose", @@ -70,14 +50,15 @@ def print_version(context: click.Context, param: Union[click.Option, click.Param @click.option("-t", "--traceback", is_flag=True, show_default=True, help="Show a traceback when an error is raised.") def cli(verbose: int, traceback: bool) -> None: """Command line tool for interacting with Mbed OS.""" + # note: traceback parameter not used here but is accessed through + # click.context.params in GroupWithExceptionHandling + del traceback + set_log_level(verbose) cli.add_command(configure, "configure") cli.add_command(list_connected_devices, "detect") -cli.add_command(new, "new") -cli.add_command(deploy, "deploy") -cli.add_command(import_, "import") cli.add_command(sterm, "sterm") cli.add_command(cmsis_mcu_descr) diff --git a/tools/python/mbed_tools/cli/project_management.py b/tools/python/mbed_tools/cli/project_management.py deleted file mode 100644 index c2e2bbd9b2f..00000000000 --- a/tools/python/mbed_tools/cli/project_management.py +++ /dev/null @@ -1,105 +0,0 @@ -# -# Copyright (c) 2020-2021 Arm Limited and Contributors. All rights reserved. -# SPDX-License-Identifier: Apache-2.0 -# -"""Project management commands: new, import_, deploy and libs.""" - -import os -import pathlib - -from typing import Any, List - -import click -import tabulate - -from mbed_tools.project import initialise_project, import_project, get_known_libs, deploy_project -from mbed_tools.project._internal import git_utils - - -@click.command() -@click.option("--create-only", "-c", is_flag=True, show_default=True, help="Create a program without fetching mbed-os.") -@click.argument("path", type=click.Path(resolve_path=True)) -def new(path: str, create_only: bool) -> None: - """Creates a new Mbed project at the specified path. Downloads mbed-os and adds it to the project. - - PATH: Path to the destination directory for the project. Will be created if it does not exist. - """ - click.echo(f"Creating a new Mbed program at path '{path}'.") - if not create_only: - click.echo("Downloading mbed-os and adding it to the project.") - - initialise_project(pathlib.Path(path), create_only) - - -@click.command() -@click.argument("url") -@click.argument("path", type=click.Path(), default="") -@click.option( - "--skip-resolve-libs", - "-s", - is_flag=True, - show_default=True, - help="Skip resolving program library dependencies after cloning.", -) -def import_(url: str, path: Any, skip_resolve_libs: bool) -> None: - """Clone an Mbed project and library dependencies. - - URL: The git url of the remote project to clone. - - PATH: Destination path for the clone. If not given the destination path is set to the project name in the cwd. - """ - click.echo(f"Cloning Mbed program '{url}'") - if not skip_resolve_libs: - click.echo("Resolving program library dependencies.") - - if path: - click.echo(f"Destination path is '{path}'") - path = pathlib.Path(path) - - dst_path = import_project(url, path, not skip_resolve_libs) - if not skip_resolve_libs: - libs = get_known_libs(dst_path) - _print_dependency_table(libs) - - -@click.command() -@click.argument("path", type=click.Path(), default=os.getcwd()) -@click.option( - "--force", - "-f", - is_flag=True, - show_default=True, - help="Forces checkout of all library repositories at specified commit in the .lib file, overwrites local changes.", -) -def deploy(path: str, force: bool) -> None: - """Checks out Mbed program library dependencies at the revision specified in the ".lib" files. - - Ensures all dependencies are resolved and the versions are synchronised to the version specified in the library - reference. - - PATH: Path to the Mbed project [default: CWD] - """ - click.echo("Checking out all libraries to revisions specified in .lib files. Resolving any unresolved libraries.") - root_path = pathlib.Path(path) - deploy_project(root_path, force) - libs = get_known_libs(root_path) - _print_dependency_table(libs) - - -def _print_dependency_table(libs: List) -> None: - click.echo("The following library dependencies were fetched: \n") - table = [] - for lib in libs: - table.append( - [ - lib.reference_file.stem, - lib.get_git_reference().repo_url, - lib.source_code_path, - git_utils.get_default_branch(git_utils.get_repo(lib.source_code_path)) - if not lib.get_git_reference().ref - else lib.get_git_reference().ref, - ] - ) - - headers = ("Library Name", "Repository URL", "Path", "Git Reference") - click.echo(tabulate.tabulate(table, headers=headers)) diff --git a/tools/python/mbed_tools/cli/sterm.py b/tools/python/mbed_tools/cli/sterm.py index 2b7050771da..3cd65877ae1 100644 --- a/tools/python/mbed_tools/cli/sterm.py +++ b/tools/python/mbed_tools/cli/sterm.py @@ -4,6 +4,8 @@ # """Command to launch a serial terminal to a connected Mbed device.""" +from __future__ import annotations + from typing import Any, Optional, Tuple import click @@ -18,7 +20,8 @@ def _get_target_id(target: str) -> Tuple[str, Optional[int]]: target_name, target_id = target.replace("]", "").split("[", maxsplit=1) if target_id.isdigit() and int(target_id) >= 0: return (target_name, int(target_id)) - raise click.ClickException("When using the format mbed-target[ID], ID must be a positive integer or 0.") + msg = "When using the format mbed-target[ID], ID must be a positive integer or 0." + raise click.ClickException(msg) return (target, None) @@ -41,27 +44,33 @@ def _get_target_id(target: str) -> Tuple[str, Optional[int]]: help="Switch local echo on/off.", ) @click.option("-m", "--mbed-target", type=str, help="Mbed target to detect. Example: K64F, NUCLEO_F401RE, NRF51822...") -def sterm(port: str, baudrate: int, echo: str, mbed_target: str) -> None: +def sterm(port: str | None, baudrate: int, echo: str, mbed_target: str) -> None: """Launches a serial terminal to a connected device.""" if port is None: port = _find_target_serial_port_or_default(mbed_target) - terminal.run(port, baudrate, echo=True if echo == "on" else False) + terminal.run(port, baudrate, echo=echo == "on") def _get_connected_mbed_devices() -> Any: connected_devices = get_connected_devices() if not connected_devices.identified_devices: - raise MbedDevicesError("No Mbed enabled devices found.") + msg = "No Mbed enabled devices found." + raise MbedDevicesError(msg) return connected_devices.identified_devices -def _find_target_serial_port_or_default(target: Optional[str]) -> Any: +def _find_target_serial_port_or_default(target: Optional[str]) -> str: if target is None: # just return the first valid device found device, *_ = _get_connected_mbed_devices() else: target_name, target_id = _get_target_id(target) device = find_connected_device(target_name.upper(), target_id) + + if device.serial_port is None: + msg = f"Could not detect the serial port for {device.mbed_board} with serial number {device.serial_number}" + raise MbedDevicesError(msg) + return device.serial_port diff --git a/tools/python/mbed_tools/devices/__init__.py b/tools/python/mbed_tools/devices/__init__.py index 428683a4383..3f6e5e7aa7e 100644 --- a/tools/python/mbed_tools/devices/__init__.py +++ b/tools/python/mbed_tools/devices/__init__.py @@ -2,7 +2,8 @@ # Copyright (c) 2020-2021 Arm Limited and Contributors. All rights reserved. # SPDX-License-Identifier: Apache-2.0 # -"""API to detect any Mbed OS devices connected to the host computer. +""" +API to detect any Mbed OS devices connected to the host computer. It is expected that this package will be used by developers of Mbed OS tooling rather than by users of Mbed OS. This package uses the https://github.com/ARMmbed/mbed-targets interface to identify valid Mbed Enabled Devices. @@ -11,6 +12,8 @@ For the command line interface to the API see the package https://github.com/ARMmbed/mbed-tools """ -from mbed_tools.devices.devices import get_connected_devices, find_connected_device, find_all_connected_devices -from mbed_tools.devices.device import Device -from mbed_tools.devices import exceptions +from mbed_tools.devices import exceptions as exceptions +from mbed_tools.devices.device import Device as Device +from mbed_tools.devices.devices import find_all_connected_devices as find_all_connected_devices +from mbed_tools.devices.devices import find_connected_device as find_connected_device +from mbed_tools.devices.devices import get_connected_devices as get_connected_devices diff --git a/tools/python/mbed_tools/devices/_internal/base_detector.py b/tools/python/mbed_tools/devices/_internal/base_detector.py index 5adfeedee82..12d9e207d9d 100644 --- a/tools/python/mbed_tools/devices/_internal/base_detector.py +++ b/tools/python/mbed_tools/devices/_internal/base_detector.py @@ -16,4 +16,3 @@ class DeviceDetector(ABC): @abstractmethod def find_candidates(self) -> List[CandidateDevice]: """Returns CandidateDevices.""" - pass diff --git a/tools/python/mbed_tools/devices/_internal/candidate_device.py b/tools/python/mbed_tools/devices/_internal/candidate_device.py index 377798cbeb9..0cf9a064152 100644 --- a/tools/python/mbed_tools/devices/_internal/candidate_device.py +++ b/tools/python/mbed_tools/devices/_internal/candidate_device.py @@ -5,8 +5,8 @@ """Defines CandidateDevice model used for device detection.""" from dataclasses import dataclass -from typing import Optional, Tuple, Any, Union, cast from pathlib import Path +from typing import Any, Optional, Tuple, Union, cast class CandidateDeviceError(ValueError): @@ -24,6 +24,8 @@ class FilesystemMountpointError(CandidateDeviceError): class DataField: """CandidateDevice data attribute descriptor.""" + name: str # pyright: ignore[reportUninitializedInstanceVariable] + def __set_name__(self, owner: object, name: str) -> None: """Sets the descriptor name, this is called by magic in the owners.__new__ method.""" self.name = name @@ -40,8 +42,9 @@ def __set__(self, instance: object, value: Any) -> None: """Prevent setting the descriptor to an empty or invalid hex value.""" try: instance.__dict__[self.name] = _format_hex(value) - except ValueError: - raise USBDescriptorError(f"{self.name} cannot be an empty and must be valid hex.") + except ValueError as ex: + msg = f"{self.name} cannot be an empty and must be valid hex." + raise USBDescriptorError(msg) from ex class USBDescriptorString(DataField): @@ -50,7 +53,8 @@ class USBDescriptorString(DataField): def __set__(self, instance: object, value: str) -> None: """Prevent setting the descriptor to a non-string or empty value.""" if not value or not isinstance(value, str): - raise USBDescriptorError(f"{self.name} cannot be an empty field and must be a string.") + msg = f"{self.name} cannot be an empty field and must be a string." + raise USBDescriptorError(msg) instance.__dict__[self.name] = value @@ -61,14 +65,16 @@ class FilesystemMountpoints(DataField): def __set__(self, instance: object, value: Union[tuple, list]) -> None: """Prevent setting the descriptor to a non-sequence or empty sequence value.""" if not value or not isinstance(value, (list, tuple)): - raise FilesystemMountpointError(f"{self.name} must be set to a non-empty list or tuple.") + msg = f"{self.name} must be set to a non-empty list or tuple." + raise FilesystemMountpointError(msg) instance.__dict__[self.name] = tuple(value) @dataclass(frozen=True, order=True) class CandidateDevice: - """Valid candidate device connected to the host computer. + """ + Valid candidate device connected to the host computer. We define a CandidateDevice as any USB mass storage device which mounts a filesystem. The device may or may not present a serial port. @@ -81,15 +87,19 @@ class CandidateDevice: serial_port: Serial port associated with the device, this could be None. """ - product_id: str = cast(str, USBDescriptorHex()) - vendor_id: str = cast(str, USBDescriptorHex()) - serial_number: str = cast(str, USBDescriptorString()) - mount_points: Tuple[Path, ...] = cast(Tuple[Path], FilesystemMountpoints()) + # Note: these classes (USBDescriptorHex, etc) are field classes that act like specific + # types but actually have additional validation triggered when they are set. + # So we have to lie to the type checker a bit. + product_id: str = cast(str, USBDescriptorHex()) # pyright: ignore[reportInvalidCast] + vendor_id: str = cast(str, USBDescriptorHex()) # pyright: ignore[reportInvalidCast] + serial_number: str = cast(str, USBDescriptorString()) # pyright: ignore[reportInvalidCast] + mount_points: Tuple[Path, ...] = cast(Tuple[Path], FilesystemMountpoints()) # pyright: ignore[reportInvalidCast] serial_port: Optional[str] = None def _format_hex(hex_value: str) -> str: - """Return hex value with a prefix. + """ + Return hex value with a prefix. Accepts hex_value in prefixed (0xff) and unprefixed (ff) formats. """ diff --git a/tools/python/mbed_tools/devices/_internal/darwin/device_detector.py b/tools/python/mbed_tools/devices/_internal/darwin/device_detector.py index 4eca620ea52..0cac9fe7e7f 100644 --- a/tools/python/mbed_tools/devices/_internal/darwin/device_detector.py +++ b/tools/python/mbed_tools/devices/_internal/darwin/device_detector.py @@ -7,12 +7,13 @@ import logging import pathlib import re -from typing import List, Tuple, Optional -from typing_extensions import TypedDict +from typing import List, Optional, Tuple + +from typing_extensions import TypedDict, override + from mbed_tools.devices._internal.base_detector import DeviceDetector from mbed_tools.devices._internal.candidate_device import CandidateDevice -from mbed_tools.devices._internal.darwin import system_profiler, ioreg, diskutil - +from mbed_tools.devices._internal.darwin import diskutil, ioreg, system_profiler logger = logging.getLogger(__name__) @@ -30,24 +31,23 @@ class CandidateDeviceData(TypedDict): class InvalidCandidateDeviceDataError(ValueError): """Raised when CandidateDevice was given invalid data and it cannot be built.""" - pass - class DarwinDeviceDetector(DeviceDetector): """Darwin specific implementation of device detection.""" + @override def find_candidates(self) -> List[CandidateDevice]: """Return a list of CandidateDevices.""" usb_devices_data = system_profiler.get_end_usb_devices_data() candidates = [] for device_data in usb_devices_data: - logging.debug(f"Building from: {device_data}.") + logger.debug(f"Building from: {device_data}.") try: candidate = _build_candidate(device_data) except InvalidCandidateDeviceDataError: pass else: - logging.debug(f"Built candidate: {candidate}.") + logger.debug(f"Built candidate: {candidate}.") candidates.append(candidate) return candidates @@ -57,8 +57,8 @@ def _build_candidate(device_data: system_profiler.USBDevice) -> CandidateDevice: try: return CandidateDevice(**assembled_data) except ValueError as e: - logging.debug(f"Unable to build candidate. {e}") - raise InvalidCandidateDeviceDataError + logger.debug(f"Unable to build candidate. {e}") + raise InvalidCandidateDeviceDataError from e def _assemble_candidate_data(device_data: system_profiler.USBDevice) -> CandidateDeviceData: @@ -72,7 +72,8 @@ def _assemble_candidate_data(device_data: system_profiler.USBDevice) -> Candidat def _format_vendor_id(vendor_id: str) -> str: - """Strips vendor name from vendor_id field. + """ + Strips vendor name from vendor_id field. Example: >>> _format_vendor_id("0x1234 (Nice Vendor Inc.)") # "0x1234" @@ -89,7 +90,7 @@ def _get_mount_points(device_data: system_profiler.USBDevice) -> Tuple[pathlib.P if mount_point: mount_points.append(pathlib.Path(mount_point)) else: - logging.debug(f"Couldn't determine mount point for device id: {storage_identifier}.") + logger.debug(f"Couldn't determine mount point for device id: {storage_identifier}.") return tuple(mount_points) @@ -97,21 +98,21 @@ def _get_serial_port(device_data: system_profiler.USBDevice) -> Optional[str]: """Returns serial port for a given device, None if serial port cannot be determined.""" device_name = device_data.get("_name") if not device_name: - logging.debug('Missing "_name" in "{device_data}", which is required for ioreg name.') + logger.debug('Missing "_name" in "{device_data}", which is required for ioreg name.') return None location_id = device_data.get("location_id") if not location_id: - logging.debug('Missing "location_id" in "{device_data}", which is required for ioreg name.') + logger.debug('Missing "location_id" in "{device_data}", which is required for ioreg name.') return None ioreg_name = _build_ioreg_device_name(device_name=device_name, location_id=location_id) - serial_port = ioreg.get_io_dialin_device(ioreg_name) - return serial_port + return ioreg.get_io_dialin_device(ioreg_name) def _build_ioreg_device_name(device_name: str, location_id: str) -> str: - """Converts extracted `_name` and `location_id` attributes from `system_profiler` to a valid ioreg device name. + """ + Converts extracted `_name` and `location_id` attributes from `system_profiler` to a valid ioreg device name. `system_profiler` utility returns location ids in the form of `0xNNNNNNN`, with an optional suffix of ` / N`. diff --git a/tools/python/mbed_tools/devices/_internal/darwin/diskutil.py b/tools/python/mbed_tools/devices/_internal/darwin/diskutil.py index a0e6fab140e..27db2913db2 100644 --- a/tools/python/mbed_tools/devices/_internal/darwin/diskutil.py +++ b/tools/python/mbed_tools/devices/_internal/darwin/diskutil.py @@ -4,11 +4,13 @@ # """Interactions with `diskutil`.""" +from __future__ import annotations + import plistlib import subprocess from typing import Dict, Iterable, List, Optional, cast -from typing_extensions import TypedDict +from typing_extensions import TypedDict VolumeTree = Dict # mypy does not work with recursive types, which nested "Partitions" would require @@ -20,9 +22,9 @@ class Volume(TypedDict, total=False): DeviceIdentifier: str # example: disk2 -def get_all_external_disks_data() -> List[VolumeTree]: +def get_all_external_disks_data() -> List[Volume | VolumeTree]: """Returns parsed output of `diskutil` call, fetching only information of interest.""" - output = subprocess.check_output(["diskutil", "list", "-plist", "external"], stderr=subprocess.DEVNULL) + output = subprocess.check_output(["/usr/sbin/diskutil", "list", "-plist", "external"], stderr=subprocess.DEVNULL) if output: data: Dict = plistlib.loads(output) return data.get("AllDisksAndPartitions", []) @@ -30,7 +32,8 @@ def get_all_external_disks_data() -> List[VolumeTree]: def get_all_external_volumes_data() -> List[Volume]: - """Returns all external volumes data. + """ + Returns all external volumes data. Reduces structure returned by `diskutil` call to one which will only contain data about Volumes. Useful for determining MountPoints and DeviceIdentifiers. @@ -56,8 +59,9 @@ def get_mount_point(device_identifier: str) -> Optional[str]: return None -def _filter_volumes(data: Iterable[VolumeTree]) -> List[Volume]: - """Flattens the structure returned by `diskutil` call. +def _filter_volumes(data: Iterable[VolumeTree | Volume]) -> List[Volume]: + """ + Flattens the structure returned by `diskutil` call. Expected input will contain both partitioned an unpartitioned devices. Partitioned devices list mounted partitions under an arbitrary key, diff --git a/tools/python/mbed_tools/devices/_internal/darwin/ioreg.py b/tools/python/mbed_tools/devices/_internal/darwin/ioreg.py index 61d9d2fe337..bb6f19e8ffb 100644 --- a/tools/python/mbed_tools/devices/_internal/darwin/ioreg.py +++ b/tools/python/mbed_tools/devices/_internal/darwin/ioreg.py @@ -12,7 +12,7 @@ def get_data(device_name: str) -> List[Dict]: """Returns parsed output of `ioreg` call for a given device name.""" - output = subprocess.check_output(["ioreg", "-a", "-r", "-n", device_name, "-l"]) + output = subprocess.check_output(["/usr/sbin/ioreg", "-a", "-r", "-n", device_name, "-l"]) if output: try: return cast(List[Dict], plistlib.loads(output)) diff --git a/tools/python/mbed_tools/devices/_internal/darwin/system_profiler.py b/tools/python/mbed_tools/devices/_internal/darwin/system_profiler.py index 633ebf68e26..52a1b7b5ba4 100644 --- a/tools/python/mbed_tools/devices/_internal/darwin/system_profiler.py +++ b/tools/python/mbed_tools/devices/_internal/darwin/system_profiler.py @@ -4,10 +4,13 @@ # """Interactions with `system_profiler`.""" +from __future__ import annotations + import plistlib import re import subprocess -from typing import Dict, Iterable, List, cast +from typing import Dict, Iterable, List, Union, cast + from typing_extensions import TypedDict USBDeviceTree = Dict # mypy does not work with recursive types, which "_items" would require @@ -30,11 +33,11 @@ class USBDevice(TypedDict, total=False): Media: List[USBDeviceMedia] -def get_all_usb_devices_data() -> List[USBDeviceTree]: +def get_all_usb_devices_data() -> List[USBDeviceTree | USBDevice]: """Returns parsed output of `system_profiler` call.""" - output = subprocess.check_output(["system_profiler", "-xml", "SPUSBDataType"], stderr=subprocess.DEVNULL) + output = subprocess.check_output(["/usr/sbin/system_profiler", "-xml", "SPUSBDataType"], stderr=subprocess.DEVNULL) if output: - return cast(List[USBDeviceTree], plistlib.loads(output)) + return cast(List[Union[USBDeviceTree, USBDevice]], plistlib.loads(output)) return [] @@ -42,12 +45,12 @@ def get_end_usb_devices_data() -> List[USBDevice]: """Returns only end devices from the output of `system_profiler` call.""" data = get_all_usb_devices_data() leaf_devices = _extract_leaf_devices(data) - end_devices = _filter_end_devices(leaf_devices) - return end_devices + return _filter_end_devices(leaf_devices) -def _extract_leaf_devices(data: Iterable[USBDeviceTree]) -> List[USBDevice]: - """Flattens the structure returned by `system_profiler` call. +def _extract_leaf_devices(data: Iterable[USBDeviceTree | USBDevice]) -> List[USBDevice]: + """ + Flattens the structure returned by `system_profiler` call. Expected input will contain a tree-like structures, this function will return their leaf nodes. """ @@ -62,7 +65,8 @@ def _extract_leaf_devices(data: Iterable[USBDeviceTree]) -> List[USBDevice]: def _filter_end_devices(data: Iterable[USBDevice]) -> List[USBDevice]: - """Removes devices that don't look like end devices. + """ + Removes devices that don't look like end devices. An end device is a device that shouldn't have child devices. I.e.: a hub IS NOT an end device, a mouse IS an end device. diff --git a/tools/python/mbed_tools/devices/_internal/detect_candidate_devices.py b/tools/python/mbed_tools/devices/_internal/detect_candidate_devices.py index 2302a616f42..ceddad090a0 100644 --- a/tools/python/mbed_tools/devices/_internal/detect_candidate_devices.py +++ b/tools/python/mbed_tools/devices/_internal/detect_candidate_devices.py @@ -7,8 +7,8 @@ import platform from typing import Iterable -from mbed_tools.devices._internal.candidate_device import CandidateDevice from mbed_tools.devices._internal.base_detector import DeviceDetector +from mbed_tools.devices._internal.candidate_device import CandidateDevice from mbed_tools.devices.exceptions import UnknownOSError @@ -21,19 +21,20 @@ def detect_candidate_devices() -> Iterable[CandidateDevice]: def _get_detector_for_current_os() -> DeviceDetector: """Returns DeviceDetector for current operating system.""" if platform.system() == "Windows": - from mbed_tools.devices._internal.windows.device_detector import WindowsDeviceDetector + from mbed_tools.devices._internal.windows.device_detector import WindowsDeviceDetector # noqa: PLC0415 return WindowsDeviceDetector() - if platform.system() == "Linux": - from mbed_tools.devices._internal.linux.device_detector import LinuxDeviceDetector + elif platform.system() == "Linux": + from mbed_tools.devices._internal.linux.device_detector import LinuxDeviceDetector # noqa: PLC0415 return LinuxDeviceDetector() - if platform.system() == "Darwin": - from mbed_tools.devices._internal.darwin.device_detector import DarwinDeviceDetector + elif platform.system() == "Darwin": + from mbed_tools.devices._internal.darwin.device_detector import DarwinDeviceDetector # noqa: PLC0415 return DarwinDeviceDetector() - - raise UnknownOSError( - f"We have detected the OS you are running is '{platform.system()}'. " - "Unfortunately we haven't implemented device detection support for this OS yet. Sorry!" - ) + else: + msg = ( + f"We have detected the OS you are running is '{platform.system()}'. " + "Unfortunately we haven't implemented device detection support for this OS yet. Sorry!" + ) + raise UnknownOSError(msg) diff --git a/tools/python/mbed_tools/devices/_internal/exceptions.py b/tools/python/mbed_tools/devices/_internal/exceptions.py index b81daf66b59..7104fd266d3 100644 --- a/tools/python/mbed_tools/devices/_internal/exceptions.py +++ b/tools/python/mbed_tools/devices/_internal/exceptions.py @@ -7,11 +7,11 @@ from mbed_tools.lib.exceptions import ToolsError -class SystemException(ToolsError): +class OperatingSystemError(ToolsError): """Exception with regards to the underlying operating system.""" -class NoBoardForCandidate(ToolsError): +class NoBoardForCandidateError(ToolsError): """Raised when board data cannot be determined for a candidate.""" diff --git a/tools/python/mbed_tools/devices/_internal/file_parser.py b/tools/python/mbed_tools/devices/_internal/file_parser.py index de0f9a4681d..1b6148464be 100644 --- a/tools/python/mbed_tools/devices/_internal/file_parser.py +++ b/tools/python/mbed_tools/devices/_internal/file_parser.py @@ -2,7 +2,8 @@ # Copyright (c) 2020-2021 Arm Limited and Contributors. All rights reserved. # SPDX-License-Identifier: Apache-2.0 # -"""Parses files found on Mbed enabled devices. +""" +Parses files found on Mbed enabled devices. There are a number of data files stored on an mbed enabled device's USB mass storage. @@ -93,16 +94,15 @@ import logging import pathlib import re - from dataclasses import dataclass -from typing import Optional, NamedTuple, Iterable, List - +from typing import Iterable, List, NamedTuple, Optional logger = logging.getLogger(__name__) class OnlineId(NamedTuple): - """Used to identify the target against the os.mbed.com website. + """ + Used to identify the target against the os.mbed.com website. The target type and slug are used in the URI for the board and together they can be used uniquely identify a board. @@ -123,7 +123,8 @@ class DeviceFileInfo: def read_device_files(directory_paths: Iterable[pathlib.Path]) -> DeviceFileInfo: - """Read data from files contained on an mbed enabled device's USB mass storage device. + """ + Read data from files contained on an mbed enabled device's USB mass storage device. If details.txt exists and it contains a product code, then we will use that code. If not then we try to grep the code from the mbed.htm file. We extract an OnlineID from mbed.htm as we also make use of that information to find a @@ -204,7 +205,8 @@ def _read_first_details_txt_contents(file_paths: Iterable[pathlib.Path]) -> dict def _read_details_txt(file_contents: str) -> dict: - """Parse the contents of a daplink-compatible device's details.txt. + """ + Parse the contents of a daplink-compatible device's details.txt. Args: file_contents: The contents of the details.txt file. @@ -215,7 +217,7 @@ def _read_details_txt(file_contents: str) -> dict: if line.startswith("#"): continue - key, sep, value = line.partition(":") + key, _sep, value = line.partition(":") if key and value: output[key.strip()] = value.strip() diff --git a/tools/python/mbed_tools/devices/_internal/linux/device_detector.py b/tools/python/mbed_tools/devices/_internal/linux/device_detector.py index c4623d87cbb..0688427232d 100644 --- a/tools/python/mbed_tools/devices/_internal/linux/device_detector.py +++ b/tools/python/mbed_tools/devices/_internal/linux/device_detector.py @@ -6,21 +6,22 @@ import logging from pathlib import Path -from typing import Tuple, List, Optional, cast +from typing import List, Optional, Tuple, cast import psutil import pyudev +from typing_extensions import override from mbed_tools.devices._internal.base_detector import DeviceDetector from mbed_tools.devices._internal.candidate_device import CandidateDevice, FilesystemMountpointError - logger = logging.getLogger(__name__) class LinuxDeviceDetector(DeviceDetector): """Linux specific implementation of device detection.""" + @override def find_candidates(self) -> List[CandidateDevice]: """Return a list of CandidateDevices.""" context = pyudev.Context() diff --git a/tools/python/mbed_tools/devices/_internal/resolve_board.py b/tools/python/mbed_tools/devices/_internal/resolve_board.py index 53919ada141..18792430eba 100644 --- a/tools/python/mbed_tools/devices/_internal/resolve_board.py +++ b/tools/python/mbed_tools/devices/_internal/resolve_board.py @@ -2,7 +2,8 @@ # Copyright (c) 2020-2021 Arm Limited and Contributors. All rights reserved. # SPDX-License-Identifier: Apache-2.0 # -"""Resolve targets for `CandidateDevice`. +""" +Resolve targets for `CandidateDevice`. Resolving a target involves looking up an `MbedTarget` from the `mbed-targets` API, using data found in the "htm file" located on an "Mbed Enabled" device's USB MSD. @@ -11,15 +12,12 @@ """ import logging - from typing import Optional -from mbed_tools.targets import Board, get_board_by_product_code, get_board_by_online_id, get_board_by_jlink_slug -from mbed_tools.targets.exceptions import UnknownBoard, MbedTargetsError - -from mbed_tools.devices._internal.exceptions import NoBoardForCandidate, ResolveBoardError +from mbed_tools.devices._internal.exceptions import NoBoardForCandidateError, ResolveBoardError from mbed_tools.devices._internal.file_parser import OnlineId - +from mbed_tools.targets import Board, get_board_by_jlink_slug, get_board_by_online_id, get_board_by_product_code +from mbed_tools.targets.exceptions import MbedTargetsError, UnknownBoardError logger = logging.getLogger(__name__) @@ -27,7 +25,8 @@ def resolve_board( product_code: Optional[str] = None, online_id: Optional[OnlineId] = None, serial_number: str = "" ) -> Board: - """Resolves a board object from the platform database. + """ + Resolves a board object from the platform database. We have multiple ways to identify boards from various metadata sources Mbed provides. This is because there are many supported Mbed device families, each with slightly different ways of identifying themselves as Mbed enabled. @@ -43,13 +42,13 @@ def resolve_board( if product_code: try: return get_board_by_product_code(product_code) - except UnknownBoard: - logger.error(f"Could not identify a board with the product code: '{product_code}'.") + except UnknownBoardError: + logger.exception(f"Could not identify a board with the product code: '{product_code}'.") except MbedTargetsError as e: - logger.error( - f"There was an error looking up the product code `{product_code}` from the target database.\nError: {e}" + logger.exception( + f"There was an error looking up the product code `{product_code}` from the target database." ) - raise ResolveBoardError() from e + raise ResolveBoardError from e if online_id: slug = online_id.slug @@ -59,29 +58,27 @@ def resolve_board( return get_board_by_jlink_slug(slug=slug) else: return get_board_by_online_id(slug=slug, target_type=target_type) - except UnknownBoard: - logger.error(f"Could not identify a board with the slug: '{slug}' and target type: '{target_type}'.") + except UnknownBoardError: + logger.exception(f"Could not identify a board with the slug: '{slug}' and target type: '{target_type}'.") except MbedTargetsError as e: - logger.error( - f"There was an error looking up the online ID `{online_id!r}` from the target database.\nError: {e}" - ) - raise ResolveBoardError() from e + logger.exception(f"There was an error looking up the online ID `{online_id!r}` from the target database.") + raise ResolveBoardError from e # Product code might be the first 4 characters of the serial number product_code = serial_number[:4] if product_code: try: return get_board_by_product_code(product_code) - except UnknownBoard: + except UnknownBoardError: # Most devices have a serial number so this may not be a problem logger.info( f"The device with the Serial Number: '{serial_number}' (Product Code: '{product_code}') " f"does not appear to be an Mbed development board." ) except MbedTargetsError as e: - logger.error( - f"There was an error looking up the product code `{product_code}` from the target database.\nError: {e}" + logger.exception( + f"There was an error looking up the product code `{product_code}` from the target database." ) - raise ResolveBoardError() from e + raise ResolveBoardError from e - raise NoBoardForCandidate + raise NoBoardForCandidateError diff --git a/tools/python/mbed_tools/devices/_internal/windows/component_descriptor.py b/tools/python/mbed_tools/devices/_internal/windows/component_descriptor.py index 48d5cad78e6..32907617f58 100644 --- a/tools/python/mbed_tools/devices/_internal/windows/component_descriptor.py +++ b/tools/python/mbed_tools/devices/_internal/windows/component_descriptor.py @@ -6,10 +6,11 @@ import logging from abc import ABC, abstractmethod -from typing import List, Any, Generator, Optional, NamedTuple, cast +from typing import Any, Generator, List, NamedTuple, Optional, cast -import pythoncom -import win32com.client +import pythoncom # pyright: ignore[reportMissingModuleSource] +import win32com.client # pyright: ignore[reportMissingModuleSource] +from typing_extensions import override from mbed_tools.devices._internal.windows.component_descriptor_utils import UNKNOWN_VALUE, is_undefined_data_object @@ -21,8 +22,9 @@ class ComponentDescriptor(ABC): """Win32 component descriptor.""" - def __init__(self, win32_definition: type, win32_class_name: str, win32_filter: Optional[str] = None): - """Initialiser. + def __init__(self, win32_definition: type, win32_class_name: str, win32_filter: Optional[str] = None) -> None: + """ + Initialiser. Args: win32_definition: definition of the Windows component as defined in MSDN. @@ -51,7 +53,7 @@ def win32_class_name(self) -> str: @property def field_names(self) -> List[str]: """Returns the names of all the fields of the descriptor.""" - return [k for k in getattr(self._win32_definition, NAMED_TUPLE_FIELDS_ATTRIBUTE)] + return list(getattr(self._win32_definition, NAMED_TUPLE_FIELDS_ATTRIBUTE)) @property @abstractmethod @@ -60,7 +62,8 @@ def component_id(self) -> str: @property def win32_filter(self) -> Optional[str]: - """Filter applied on a Win32 category. + """ + Filter applied on a Win32 category. For instance, the current component can be a subclass/subcategory of a component exposed by Win32. """ @@ -83,6 +86,7 @@ def get(self, field_name: str) -> Any: logger.debug(f"Attribute [{field_name}] is undefined on this instance {self}: {e}") return UNKNOWN_VALUE + @override def __str__(self) -> str: """String representation.""" values = {k: v for k, v in self.__dict__.items() if not k.startswith("_")} @@ -105,7 +109,7 @@ def __init__(self) -> None: def _read_cdispatch_fields(self, win32_element: Any, element_fields_list: List[str]) -> dict: """Reads all the fields from a cdispatch object returned by pywin32.""" if not win32_element: - return dict() + return {} return {k: self._read_cdispatch_field(win32_element, k) for k in element_fields_list} def _read_cdispatch_field(self, win32_element: Any, key: str) -> Any: @@ -124,9 +128,10 @@ def map_element(self, win32_element: Any, to_cls: type) -> ComponentDescriptor: def _get_list_iterator(self, win32_class_name: str, list_filter: Optional[str]) -> Generator[Any, None, None]: if list_filter: - query = f"Select * from {win32_class_name} where {list_filter}" - return self.wmi.ExecQuery(query) # type: ignore - return self.wmi.InstancesOf(win32_class_name) # type: ignore + # Note: This is a WQL query, not true SQL, and there does not appear to be a way to bind parameters. + query = f"Select * from {win32_class_name} where {list_filter}" # noqa: S608 + return self.wmi.ExecQuery(query) + return self.wmi.InstancesOf(win32_class_name) def element_generator( self, to_cls: type, win32_class_name: str, list_filter: Optional[str] @@ -139,7 +144,7 @@ def element_generator( class ComponentDescriptorWrapper: """Wraps a component descriptor.""" - def __init__(self, cls: type): + def __init__(self, cls: type) -> None: """initialiser.""" self._cls = cls self._win32_wrapper = Win32Wrapper() diff --git a/tools/python/mbed_tools/devices/_internal/windows/component_descriptor_utils.py b/tools/python/mbed_tools/devices/_internal/windows/component_descriptor_utils.py index c2468d831a3..0d90b767d26 100644 --- a/tools/python/mbed_tools/devices/_internal/windows/component_descriptor_utils.py +++ b/tools/python/mbed_tools/devices/_internal/windows/component_descriptor_utils.py @@ -4,8 +4,8 @@ # """Utilities with regards to Win32 component descriptors.""" -from typing import Any, NamedTuple, Union from collections import OrderedDict +from typing import Any, NamedTuple, Union UNKNOWN_VALUE = "Unknown" diff --git a/tools/python/mbed_tools/devices/_internal/windows/device_detector.py b/tools/python/mbed_tools/devices/_internal/windows/device_detector.py index 561d03b1405..ebe7c52ad76 100644 --- a/tools/python/mbed_tools/devices/_internal/windows/device_detector.py +++ b/tools/python/mbed_tools/devices/_internal/windows/device_detector.py @@ -7,10 +7,12 @@ from pathlib import Path from typing import List +from typing_extensions import override + from mbed_tools.devices._internal.base_detector import DeviceDetector from mbed_tools.devices._internal.candidate_device import CandidateDevice from mbed_tools.devices._internal.windows.system_data_loader import SystemDataLoader -from mbed_tools.devices._internal.windows.usb_data_aggregation import SystemUsbData, AggregatedUsbData +from mbed_tools.devices._internal.windows.usb_data_aggregation import AggregatedUsbData, SystemUsbData class WindowsDeviceDetector(DeviceDetector): @@ -20,6 +22,7 @@ def __init__(self) -> None: """Initialiser.""" self._data_loader = SystemDataLoader() + @override def find_candidates(self) -> List[CandidateDevice]: """Return a generator of Candidates.""" return [ diff --git a/tools/python/mbed_tools/devices/_internal/windows/device_instance_id.py b/tools/python/mbed_tools/devices/_internal/windows/device_instance_id.py index 35a1687d0a3..ac048d5f1e1 100644 --- a/tools/python/mbed_tools/devices/_internal/windows/device_instance_id.py +++ b/tools/python/mbed_tools/devices/_internal/windows/device_instance_id.py @@ -4,49 +4,59 @@ # """Utility in charge of finding the instance ID of a device.""" -import win32con -import win32api -from mbed_tools.devices._internal.exceptions import SystemException +from __future__ import annotations + import logging +from typing import TYPE_CHECKING, Any, Optional + +import win32api # pyright: ignore[reportMissingModuleSource] +import win32con # pyright: ignore[reportMissingModuleSource] -from typing import Optional, Any +from mbed_tools.devices._internal.exceptions import OperatingSystemError + +if TYPE_CHECKING: + from types import TracebackType logger = logging.getLogger(__name__) -class RegKey(object): +class RegKey: """Context manager in charge of opening and closing registry keys.""" def __init__(self, sub_registry_key: str) -> None: """Initialiser.""" access = win32con.KEY_READ | win32con.KEY_ENUMERATE_SUB_KEYS | win32con.KEY_QUERY_VALUE try: - self._hkey = win32api.RegOpenKey(win32con.HKEY_LOCAL_MACHINE, sub_registry_key, 0, access) + self._hkey = win32api.RegOpenKey(win32con.HKEY_LOCAL_MACHINE, sub_registry_key, False, access) except win32api.error as e: - raise SystemException(f"Could not read key [{sub_registry_key}] in the registry: {e}") + msg = f"Could not read key [{sub_registry_key}] in the registry: {e}" + raise OperatingSystemError(msg) from e def __enter__(self) -> Any: """Actions on entry.""" return self._hkey - def __exit__(self, type: Any, value: Any, traceback: Any) -> None: + def __exit__( + self, exc_type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None + ) -> None: """Actions on exit.""" win32api.RegCloseKey(self._hkey) - self._hkey.close() + self._hkey.Close() def get_children_instance_id(pnpid: str) -> Optional[str]: - """Gets the USB children instance ID from the plug and play ID. + """ + Gets the USB children instance ID from the plug and play ID. See https://docs.microsoft.com/en-us/windows-hardware/drivers/install/instance-ids. """ # Although the registry should not be accessed directly - # (See https://docs.microsoft.com/en-us/windows-hardware/drivers/install/hklm-system-currentcontrolset-enum-registry-tree), # noqa E501 + # (See https://docs.microsoft.com/en-us/windows-hardware/drivers/install/hklm-system-currentcontrolset-enum-registry-tree), # and SetupDi functions/APIs should be used instead in a similar fashion to Miro utility # (See https://github.com/cool-RR/Miro/blob/7b9ecd9bc0878e463f5a5e26e8b00b675e3f98ac/tv/windows/plat/usbutils.py) # Most libraries seems to be reading the registry: # - Pyserial: https://github.com/pyserial/pyserial/blob/master/serial/tools/list_ports_windows.py - # - Node serialport: https://github.com/serialport/node-serialport/blob/cd112ca5a3a3fe186e1ac6fa78eeeb5ea7396185/packages/bindings/src/serialport_win.cpp # noqa E501 + # - Node serialport: https://github.com/serialport/node-serialport/blob/cd112ca5a3a3fe186e1ac6fa78eeeb5ea7396185/packages/bindings/src/serialport_win.cpp # - USB device forensics: https://github.com/woanware/usbdeviceforensics/blob/master/pyTskusbdeviceforensics.py # For more details about the registry key actually looked at, See: # - https://stackoverflow.com/questions/3331043/get-list-of-connected-usb-devices diff --git a/tools/python/mbed_tools/devices/_internal/windows/disk_aggregation.py b/tools/python/mbed_tools/devices/_internal/windows/disk_aggregation.py index 95c52b275f6..c33344a8e91 100644 --- a/tools/python/mbed_tools/devices/_internal/windows/disk_aggregation.py +++ b/tools/python/mbed_tools/devices/_internal/windows/disk_aggregation.py @@ -2,7 +2,8 @@ # Copyright (c) 2020-2021 Arm Limited and Contributors. All rights reserved. # SPDX-License-Identifier: Apache-2.0 # -"""Data aggregation about a disk on Windows. +""" +Data aggregation about a disk on Windows. On Windows, information about disk drive is scattered around Physical disks, Partitions and Logical Drives. @@ -10,20 +11,21 @@ as a single object: AggregatedDiskData. """ -from typing import List, Optional, Callable -from typing import NamedTuple, cast +from typing import Callable, List, NamedTuple, Optional, cast + +from typing_extensions import override from mbed_tools.devices._internal.windows.component_descriptor import ComponentDescriptor from mbed_tools.devices._internal.windows.component_descriptor_utils import retain_value_or_default -from mbed_tools.devices._internal.windows.windows_identifier import WindowsUID from mbed_tools.devices._internal.windows.disk_drive import DiskDrive from mbed_tools.devices._internal.windows.disk_partition import DiskPartition from mbed_tools.devices._internal.windows.disk_partition_logical_disk_relationships import ( DiskPartitionLogicalDiskRelationship, ) from mbed_tools.devices._internal.windows.logical_disk import LogicalDisk +from mbed_tools.devices._internal.windows.system_data_loader import ComponentsLoader, SystemDataLoader from mbed_tools.devices._internal.windows.volume_set import VolumeInformation, get_volume_information -from mbed_tools.devices._internal.windows.system_data_loader import SystemDataLoader, ComponentsLoader +from mbed_tools.devices._internal.windows.windows_identifier import WindowsUID class AggregatedDiskDataDefinition(NamedTuple): @@ -56,6 +58,7 @@ def __init__(self) -> None: super().__init__(AggregatedDiskDataDefinition, win32_class_name="DiskDataAggregation") @property + @override def component_id(self) -> str: """Returns the ID field.""" return cast(str, self.get("label")) @@ -105,29 +108,29 @@ def aggregate(self, logical_disk: LogicalDisk) -> AggregatedDiskData: # Determines which physical disk the partition is on # See https://superuser.com/questions/1147218/on-which-physical-drive-is-this-logical-drive corresponding_physical = self._physical_disks.get(corresponding_partition.get("DiskIndex"), DiskDrive()) - aggregatedData = AggregatedDiskData() - aggregatedData.set_data_values( - dict( - uid=corresponding_physical.uid, - label=logical_disk.component_id, - description=logical_disk.get("Description"), - free_space=logical_disk.get("FreeSpace"), - size=logical_disk.get("Size"), - partition_name=corresponding_partition.component_id, - partition_type=corresponding_partition.get("Type"), - volume_information=corresponding_volume_information, - caption=corresponding_physical.get("Caption"), - physical_disk_name=corresponding_physical.get("DeviceID"), - model=corresponding_physical.get("Model"), - interface_type=corresponding_physical.get("InterfaceType"), - media_type=corresponding_physical.get("MediaType"), - manufacturer=corresponding_physical.get("Manufacturer"), - serial_number=retain_value_or_default(corresponding_physical.get("SerialNumber")), - status=corresponding_physical.get("Status"), - pnp_device_id=corresponding_physical.get("PNPDeviceID"), - ) + aggregated_data = AggregatedDiskData() + aggregated_data.set_data_values( + { + "uid": corresponding_physical.uid, + "label": logical_disk.component_id, + "description": logical_disk.get("Description"), + "free_space": logical_disk.get("FreeSpace"), + "size": logical_disk.get("Size"), + "partition_name": corresponding_partition.component_id, + "partition_type": corresponding_partition.get("Type"), + "volume_information": corresponding_volume_information, + "caption": corresponding_physical.get("Caption"), + "physical_disk_name": corresponding_physical.get("DeviceID"), + "model": corresponding_physical.get("Model"), + "interface_type": corresponding_physical.get("InterfaceType"), + "media_type": corresponding_physical.get("MediaType"), + "manufacturer": corresponding_physical.get("Manufacturer"), + "serial_number": retain_value_or_default(corresponding_physical.get("SerialNumber")), + "status": corresponding_physical.get("Status"), + "pnp_device_id": corresponding_physical.get("PNPDeviceID"), + } ) - return aggregatedData + return aggregated_data class WindowsDiskDataAggregator(DiskDataAggregator): @@ -136,15 +139,12 @@ class WindowsDiskDataAggregator(DiskDataAggregator): def __init__(self, data_loader: SystemDataLoader) -> None: """Initialiser.""" super().__init__( - physical_disks={ - d.Index: d # type: ignore - for d in ComponentsLoader(data_loader, DiskDrive).element_generator() - }, + physical_disks={d.index: d for d in ComponentsLoader(data_loader, DiskDrive).element_generator()}, partition_disks={ p.component_id: p for p in ComponentsLoader(data_loader, DiskPartition).element_generator() }, logical_partition_relationships={ - r.logical_disk_id: r.disk_partition_id # type: ignore + r.logical_disk_id: r.disk_partition_id for r in ComponentsLoader(data_loader, DiskPartitionLogicalDiskRelationship).element_generator() }, lookup_volume_information=lambda logical_disk: get_volume_information(logical_disk.component_id), @@ -162,12 +162,12 @@ def __init__(self, data_loader: SystemDataLoader) -> None: def _load_data(self) -> None: aggregator = WindowsDiskDataAggregator(self._data_loader) - disk_data_by_serialnumber: dict = dict() # The type is enforced so that mypy is happy. - disk_data_by_label = dict() + disk_data_by_serialnumber: dict = {} # The type is enforced so that mypy is happy. + disk_data_by_label = {} for ld in ComponentsLoader(self._data_loader, LogicalDisk).element_generator(): - aggregation = aggregator.aggregate(cast(LogicalDisk, ld)) + aggregation = aggregator.aggregate(ld) key = aggregation.get("uid").presumed_serial_number - disk_data_list = disk_data_by_serialnumber.get(key, list()) + disk_data_list = disk_data_by_serialnumber.get(key, []) disk_data_list.append(aggregation) disk_data_by_serialnumber[key] = disk_data_list disk_data_by_label[aggregation.get("label")] = aggregation @@ -179,18 +179,18 @@ def disk_data_by_serial_number(self) -> dict: """Gets system's disk data by serial number.""" if not self._disk_data_by_serial_number: self._load_data() - return self._disk_data_by_serial_number if self._disk_data_by_serial_number else dict() + return self._disk_data_by_serial_number if self._disk_data_by_serial_number else {} @property def disk_data_by_label(self) -> dict: """Gets system's disk data by label.""" if not self._disk_data_by_label: self._load_data() - return self._disk_data_by_label if self._disk_data_by_label else dict() + return self._disk_data_by_label if self._disk_data_by_label else {} def get_disk_information(self, uid: WindowsUID) -> List[AggregatedDiskData]: """Gets all disk information for a given UID.""" - return self.disk_data_by_serial_number.get(uid.presumed_serial_number, list()) + return self.disk_data_by_serial_number.get(uid.presumed_serial_number, []) def get_disk_information_by_label(self, label: str) -> AggregatedDiskData: """Gets all disk information for a given label.""" diff --git a/tools/python/mbed_tools/devices/_internal/windows/disk_drive.py b/tools/python/mbed_tools/devices/_internal/windows/disk_drive.py index 8f603959471..8a94742b1c9 100644 --- a/tools/python/mbed_tools/devices/_internal/windows/disk_drive.py +++ b/tools/python/mbed_tools/devices/_internal/windows/disk_drive.py @@ -4,18 +4,21 @@ # """Defines a Disk drive.""" -from typing import NamedTuple, cast, Optional, Tuple import re +from typing import NamedTuple, Optional, Tuple, cast + +from typing_extensions import override from mbed_tools.devices._internal.windows.component_descriptor import ComponentDescriptor -from mbed_tools.devices._internal.windows.component_descriptor_utils import is_undefined_value, UNKNOWN_VALUE +from mbed_tools.devices._internal.windows.component_descriptor_utils import UNKNOWN_VALUE, is_undefined_value from mbed_tools.devices._internal.windows.windows_identifier import WindowsUID PATTERN_UID = re.compile(r"[&#]?([0-9A-Za-z]{10,48})[&#]?") class DiskDriveMsdnDefinition(NamedTuple): - """Msdn definition of a disk drive. + """ + Msdn definition of a disk drive. See https://docs.microsoft.com/en-us/windows/win32/cimwin32prov/win32-diskdrive """ @@ -74,7 +77,8 @@ class DiskDriveMsdnDefinition(NamedTuple): class DiskDrive(ComponentDescriptor): - """Disk Drive as defined in Windows API. + """ + Disk Drive as defined in Windows API. See https://docs.microsoft.com/en-us/windows/win32/cimwin32prov/win32-diskdrive """ @@ -84,6 +88,7 @@ def __init__(self) -> None: super().__init__(DiskDriveMsdnDefinition, win32_class_name="Win32_DiskDrive") @property + @override def component_id(self) -> str: """Returns the device id field.""" return cast(str, self.get("DeviceID")) @@ -93,6 +98,15 @@ def uid(self) -> WindowsUID: """Returns the disk UID.""" return Win32DiskIdParser().parse(cast(str, self.get("PNPDeviceID")), self.get("SerialNumber")) + @property + def index(self) -> int: + """ + Per MSDN: Physical drive number of the given drive. + + A value of 0xffffffff indicates that the given drive does not map to a physical drive. + """ + return self.get("Index") + class Win32DiskIdParser: """Parser of a standard Win32 Disk.""" @@ -111,7 +125,8 @@ def _parse_pnpid(self, pnpid: str) -> Tuple[str, str]: return (UNKNOWN_VALUE, UNKNOWN_VALUE) def parse(self, pnpid: str, serial_number: Optional[str]) -> WindowsUID: - """Parses the UID value based on multiple fields. + """ + Parses the UID value based on multiple fields. For different boards, the ID is stored in different fields. e.g. JLink serial number is irrelevant whereas it is the correct field for Daplink boards. diff --git a/tools/python/mbed_tools/devices/_internal/windows/disk_partition.py b/tools/python/mbed_tools/devices/_internal/windows/disk_partition.py index d71b6ed7c4f..27af925cf04 100644 --- a/tools/python/mbed_tools/devices/_internal/windows/disk_partition.py +++ b/tools/python/mbed_tools/devices/_internal/windows/disk_partition.py @@ -6,11 +6,14 @@ from typing import NamedTuple, cast +from typing_extensions import override + from mbed_tools.devices._internal.windows.component_descriptor import ComponentDescriptor class DiskPartitionMsdnDefinition(NamedTuple): - """Msdn definition of a disk partition. + """ + Msdn definition of a disk partition. See https://docs.microsoft.com/en-us/windows/win32/cimwin32prov/win32-diskpartition """ @@ -58,7 +61,8 @@ class DiskPartitionMsdnDefinition(NamedTuple): class DiskPartition(ComponentDescriptor): - """Disk partition as defined in Windows API. + """ + Disk partition as defined in Windows API. See https://docs.microsoft.com/en-us/windows/win32/cimwin32prov/win32-diskpartition """ @@ -68,6 +72,7 @@ def __init__(self) -> None: super().__init__(DiskPartitionMsdnDefinition, win32_class_name="Win32_DiskPartition") @property + @override def component_id(self) -> str: """Returns the device id field.""" return cast(str, self.get("DeviceID")) diff --git a/tools/python/mbed_tools/devices/_internal/windows/disk_partition_logical_disk_relationships.py b/tools/python/mbed_tools/devices/_internal/windows/disk_partition_logical_disk_relationships.py index d1efc8c3280..0c1be2cc259 100644 --- a/tools/python/mbed_tools/devices/_internal/windows/disk_partition_logical_disk_relationships.py +++ b/tools/python/mbed_tools/devices/_internal/windows/disk_partition_logical_disk_relationships.py @@ -6,11 +6,14 @@ from typing import NamedTuple +from typing_extensions import override + from mbed_tools.devices._internal.windows.component_descriptor import ComponentDescriptor class DiskToPartitionMsdnDefinition(NamedTuple): - """Msdn definition of a disk partition - logical disk relationship. + """ + Msdn definition of a disk partition - logical disk relationship. See https://docs.microsoft.com/en-us/windows/win32/cimwin32prov/win32-logicaldisktopartition """ @@ -22,7 +25,8 @@ class DiskToPartitionMsdnDefinition(NamedTuple): class DiskPartitionLogicalDiskRelationship(ComponentDescriptor): - """Disk partition as defined in Windows API. + """ + Disk partition as defined in Windows API. See https://docs.microsoft.com/en-us/windows/win32/cimwin32prov/win32-logicaldisktopartition """ @@ -32,6 +36,7 @@ def __init__(self) -> None: super().__init__(DiskToPartitionMsdnDefinition, win32_class_name="Win32_LogicalDiskToPartition") @property + @override def component_id(self) -> str: """Returns the device id field.""" return f"{self.get('Antecedent')}->{self.get('Dependent')}" diff --git a/tools/python/mbed_tools/devices/_internal/windows/logical_disk.py b/tools/python/mbed_tools/devices/_internal/windows/logical_disk.py index 7f9ba138479..73d23da9d6f 100644 --- a/tools/python/mbed_tools/devices/_internal/windows/logical_disk.py +++ b/tools/python/mbed_tools/devices/_internal/windows/logical_disk.py @@ -6,11 +6,14 @@ from typing import NamedTuple, cast +from typing_extensions import override + from mbed_tools.devices._internal.windows.component_descriptor import ComponentDescriptor class LogicalDiskMsdnDefinition(NamedTuple): - """Msdn definition of a logical disk. + """ + Msdn definition of a logical disk. See https://docs.microsoft.com/en-us/windows/win32/cimwin32prov/cim-logicaldisk """ @@ -44,7 +47,8 @@ class LogicalDiskMsdnDefinition(NamedTuple): class LogicalDisk(ComponentDescriptor): - """Logical disk as defined in Windows API. + """ + Logical disk as defined in Windows API. See https://docs.microsoft.com/en-us/windows/win32/cimwin32prov/cim-logicaldisk """ @@ -54,6 +58,7 @@ def __init__(self) -> None: super().__init__(LogicalDiskMsdnDefinition, win32_class_name="CIM_LogicalDisk") @property + @override def component_id(self) -> str: """Returns the device id field.""" return cast(str, self.get("DeviceID")) diff --git a/tools/python/mbed_tools/devices/_internal/windows/serial_port.py b/tools/python/mbed_tools/devices/_internal/windows/serial_port.py index c52aecf9d1f..c8080b34311 100644 --- a/tools/python/mbed_tools/devices/_internal/windows/serial_port.py +++ b/tools/python/mbed_tools/devices/_internal/windows/serial_port.py @@ -2,7 +2,8 @@ # Copyright (c) 2020-2021 Arm Limited and Contributors. All rights reserved. # SPDX-License-Identifier: Apache-2.0 # -"""Defines a Serial Port. +""" +Defines a Serial Port. On Windows, Win32_SerialPort only represents physical serial Ports and hence, USB connections are not listed. https://superuser.com/questions/835848/how-to-view-serial-com-ports-but-not-through-device-manager @@ -14,7 +15,10 @@ import re from typing import NamedTuple, cast -from mbed_tools.devices._internal.windows.component_descriptor import ComponentDescriptor, UNKNOWN_VALUE +from typing_extensions import override + +from mbed_tools.devices._internal.windows.component_descriptor import ComponentDescriptor +from mbed_tools.devices._internal.windows.component_descriptor_utils import UNKNOWN_VALUE CAPTION_PATTERN = re.compile(r"^.* [(](.*)[)]$") @@ -26,7 +30,8 @@ def parse_caption(caption: str) -> str: class PnPEntityMsdnDefinition(NamedTuple): - """Msdn definition of a PnPEntity. + """ + Msdn definition of a PnPEntity. See https://docs.microsoft.com/en-us/windows/win32/cimwin32prov/win32-pnpentity """ @@ -60,7 +65,8 @@ class PnPEntityMsdnDefinition(NamedTuple): class SerialPort(ComponentDescriptor): - """Serial Port as defined in Windows API. + """ + Serial Port as defined in Windows API. As can be seen in Windows documentation, https://docs.microsoft.com/en-us/windows-hardware/drivers/install/system-defined-device-setup-classes-available-to-vendors#ports--com---lpt-ports--, @@ -76,6 +82,7 @@ def __init__(self) -> None: ) @property + @override def component_id(self) -> str: """Returns the device id field.""" return cast(str, self.get("DeviceID")) diff --git a/tools/python/mbed_tools/devices/_internal/windows/serial_port_data_loader.py b/tools/python/mbed_tools/devices/_internal/windows/serial_port_data_loader.py index b517425b05d..5fdac99f1a9 100644 --- a/tools/python/mbed_tools/devices/_internal/windows/serial_port_data_loader.py +++ b/tools/python/mbed_tools/devices/_internal/windows/serial_port_data_loader.py @@ -4,11 +4,11 @@ # """Loads serial port data.""" -from typing import Optional, Generator, cast, List +from typing import List, Optional -from mbed_tools.devices._internal.windows.system_data_loader import SystemDataLoader, ComponentsLoader -from mbed_tools.devices._internal.windows.usb_device_identifier import UsbIdentifier, parse_device_id from mbed_tools.devices._internal.windows.serial_port import SerialPort +from mbed_tools.devices._internal.windows.system_data_loader import ComponentsLoader, SystemDataLoader +from mbed_tools.devices._internal.windows.usb_device_identifier import UsbIdentifier, parse_device_id class SystemSerialPortInformation: @@ -21,10 +21,7 @@ def __init__(self, data_loader: SystemDataLoader) -> None: def _load_data(self) -> None: self._serial_port_by_usb_id = { - parse_device_id(p.pnp_id): p - for p in cast( - Generator[SerialPort, None, None], ComponentsLoader(self._data_loader, SerialPort).element_generator() - ) + parse_device_id(p.pnp_id): p for p in ComponentsLoader(self._data_loader, SerialPort).element_generator() } @property @@ -32,9 +29,9 @@ def serial_port_data_by_id(self) -> dict: """Gets system's serial ports by usb id.""" if not self._serial_port_by_usb_id: self._load_data() - return self._serial_port_by_usb_id if self._serial_port_by_usb_id else dict() + return self._serial_port_by_usb_id if self._serial_port_by_usb_id else {} def get_serial_port_information(self, usb_id: UsbIdentifier) -> List[SerialPort]: """Gets all disk information for a given serial number.""" port = self.serial_port_data_by_id.get(usb_id) - return [port] if port else list() + return [port] if port else [] diff --git a/tools/python/mbed_tools/devices/_internal/windows/system_data_loader.py b/tools/python/mbed_tools/devices/_internal/windows/system_data_loader.py index 9cda64745b1..b2a3e783a43 100644 --- a/tools/python/mbed_tools/devices/_internal/windows/system_data_loader.py +++ b/tools/python/mbed_tools/devices/_internal/windows/system_data_loader.py @@ -4,10 +4,14 @@ # """Loads system data in parallel and all at once in order to improve performance.""" +from __future__ import annotations + from concurrent.futures import ThreadPoolExecutor -from typing import List, Tuple, Dict, Generator, Optional, cast +from typing import Dict, Generator, Generic, List, Optional, Tuple, cast + +from typing_extensions import TypeVar -from mbed_tools.devices._internal.windows.component_descriptor import ComponentDescriptorWrapper, ComponentDescriptor +from mbed_tools.devices._internal.windows.component_descriptor import ComponentDescriptor, ComponentDescriptorWrapper from mbed_tools.devices._internal.windows.disk_drive import DiskDrive from mbed_tools.devices._internal.windows.disk_partition import DiskPartition from mbed_tools.devices._internal.windows.disk_partition_logical_disk_relationships import ( @@ -32,11 +36,15 @@ def load_all(cls: type) -> Tuple[type, List[ComponentDescriptor]]: """Loads all elements present in the system referring to a specific type.""" - return (cls, [element for element in ComponentDescriptorWrapper(cls).element_generator()]) + return (cls, list(ComponentDescriptorWrapper(cls).element_generator())) + + +ComponentT = TypeVar("ComponentT", bound=ComponentDescriptor) class SystemDataLoader: - """Object in charge of loading all system data with regards to Usb, Disk or serial port. + """ + Object in charge of loading all system data with regards to Usb, Disk or serial port. It loads all the data in parallel and all at once in order to improve performance. """ @@ -49,7 +57,7 @@ def _load(self) -> None: """Loads all system data in parallel.""" with ThreadPoolExecutor() as executor: results = executor.map(load_all, SYSTEM_DATA_TYPES) - self._system_data = {k: v for (k, v) in results} + self._system_data = dict(results) @property def system_data(self) -> Dict[type, List[ComponentDescriptor]]: @@ -58,20 +66,20 @@ def system_data(self) -> Dict[type, List[ComponentDescriptor]]: self._load() return cast(Dict[type, List[ComponentDescriptor]], self._system_data) - def get_system_data(self, cls: type) -> List[ComponentDescriptor]: + def get_system_data(self, cls: type[ComponentT]) -> List[ComponentT]: """Gets the system data for a particular type.""" - return self.system_data.get(cls, list()) + return cast(List[ComponentT], self.system_data.get(cls, [])) -class ComponentsLoader: +class ComponentsLoader(Generic[ComponentT]): """Loads system components.""" - def __init__(self, data_loader: SystemDataLoader, cls: type) -> None: + def __init__(self, data_loader: SystemDataLoader, cls: type[ComponentT]) -> None: """initialiser.""" self._cls = cls self._data_loader = data_loader - def element_generator(self) -> Generator["ComponentDescriptor", None, None]: + def element_generator(self) -> Generator[ComponentT, None, None]: """Gets a generator over all elements currently registered in the system.""" - for component in self._data_loader.get_system_data(self._cls): - yield component + elements = self._data_loader.get_system_data(self._cls) + yield from elements diff --git a/tools/python/mbed_tools/devices/_internal/windows/usb_controller.py b/tools/python/mbed_tools/devices/_internal/windows/usb_controller.py index 3b2b5beca58..b150fba26ae 100644 --- a/tools/python/mbed_tools/devices/_internal/windows/usb_controller.py +++ b/tools/python/mbed_tools/devices/_internal/windows/usb_controller.py @@ -6,11 +6,14 @@ from typing import NamedTuple, cast +from typing_extensions import override + from mbed_tools.devices._internal.windows.component_descriptor import ComponentDescriptor class UsbControllerMsdnDefinition(NamedTuple): - """Msdn definition of a USB controller. + """ + Msdn definition of a USB controller. See https://docs.microsoft.com/en-gb/windows/win32/cimwin32prov/win32-usbcontroller?redirectedfrom=MSDN Similar to https://docs.microsoft.com/en-gb/windows/win32/cimwin32prov/win32-usbcontrollerdevice @@ -42,7 +45,8 @@ class UsbControllerMsdnDefinition(NamedTuple): class UsbController(ComponentDescriptor): - """USB Controller as defined in Windows API. + """ + USB Controller as defined in Windows API. See https://docs.microsoft.com/en-gb/windows/win32/cimwin32prov/win32-usbcontroller?redirectedfrom=MSDN Similar to https://docs.microsoft.com/en-gb/windows/win32/cimwin32prov/win32-usbcontrollerdevice @@ -53,6 +57,7 @@ def __init__(self) -> None: super().__init__(UsbControllerMsdnDefinition, win32_class_name="Win32_USBController") @property + @override def component_id(self) -> str: """Returns the device id field.""" return cast(str, self.get("DeviceID")) diff --git a/tools/python/mbed_tools/devices/_internal/windows/usb_data_aggregation.py b/tools/python/mbed_tools/devices/_internal/windows/usb_data_aggregation.py index b26a470f64d..dc9d8498fd6 100644 --- a/tools/python/mbed_tools/devices/_internal/windows/usb_data_aggregation.py +++ b/tools/python/mbed_tools/devices/_internal/windows/usb_data_aggregation.py @@ -4,16 +4,18 @@ # """Aggregation of all USB data given by Windows in various locations.""" -from typing import NamedTuple, List, cast +from typing import List, NamedTuple, cast + +from typing_extensions import override from mbed_tools.devices._internal.windows.component_descriptor import ComponentDescriptor -from mbed_tools.devices._internal.windows.disk_aggregation import SystemDiskInformation, AggregatedDiskData +from mbed_tools.devices._internal.windows.disk_aggregation import AggregatedDiskData, SystemDiskInformation from mbed_tools.devices._internal.windows.serial_port import SerialPort from mbed_tools.devices._internal.windows.serial_port_data_loader import SystemSerialPortInformation +from mbed_tools.devices._internal.windows.system_data_loader import SystemDataLoader from mbed_tools.devices._internal.windows.usb_device_identifier import UsbIdentifier from mbed_tools.devices._internal.windows.usb_hub import UsbHub from mbed_tools.devices._internal.windows.usb_hub_data_loader import SystemUsbDeviceInformation -from mbed_tools.devices._internal.windows.system_data_loader import SystemDataLoader class AggregatedUsbDataDefinition(NamedTuple): @@ -35,6 +37,7 @@ def __init__(self) -> None: super().__init__(AggregatedUsbDataDefinition, win32_class_name="AggregatedUsbData") @property + @override def component_id(self) -> str: """Returns an id.""" return str(self.uid) @@ -76,7 +79,12 @@ def aggregate(self, usb_id: UsbIdentifier) -> AggregatedUsbData: usb_data = self._usb_devices.get_usb_devices(usb_id) aggregated_data = AggregatedUsbData() aggregated_data.set_data_values( - dict(usb_identifier=usb_id, disks=disk_data, serial_port=serial_data, related_usb_interfaces=usb_data) + { + "usb_identifier": usb_id, + "disks": disk_data, + "serial_port": serial_data, + "related_usb_interfaces": usb_data, + } ) return aggregated_data diff --git a/tools/python/mbed_tools/devices/_internal/windows/usb_device_identifier.py b/tools/python/mbed_tools/devices/_internal/windows/usb_device_identifier.py index 85670c3227e..98632cd0d79 100644 --- a/tools/python/mbed_tools/devices/_internal/windows/usb_device_identifier.py +++ b/tools/python/mbed_tools/devices/_internal/windows/usb_device_identifier.py @@ -5,7 +5,9 @@ """Defines a USB Identifier.""" import re -from typing import Dict, List, NamedTuple, Optional, Pattern, Any, cast +from typing import Dict, List, NamedTuple, Optional, Pattern, cast + +from typing_extensions import override from mbed_tools.devices._internal.windows.component_descriptor_utils import is_undefined_data_object from mbed_tools.devices._internal.windows.windows_identifier import WindowsUID @@ -14,7 +16,8 @@ class UsbIdentifier(NamedTuple): - """Object describing the different elements present in the device ID. + """ + Object describing the different elements present in the device ID. Attributes: UID: Universal ID, either the serial number or device instance ID. @@ -24,7 +27,7 @@ class UsbIdentifier(NamedTuple): MI: Multiple Interface, a 2 digit interface number. """ - UID: Optional[str] = None + UID: Optional[WindowsUID] = None VID: Optional[str] = None PID: Optional[str] = None REV: Optional[str] = None @@ -54,7 +57,8 @@ def vendor_id(self) -> str: """Returns the product id field.""" return self.VID or "" - def __eq__(self, other: Any) -> bool: + @override + def __eq__(self, other: object) -> bool: """States whether the other id equals to self.""" if not other or not isinstance(other, UsbIdentifier): return False @@ -63,6 +67,7 @@ def __eq__(self, other: Any) -> bool: return all([self.uid == other.uid, self.product_id == other.product_id, self.vendor_id == other.vendor_id]) + @override def __hash__(self) -> int: """Generates a hash.""" return hash(self.uid) + hash(self.product_id) + hash(self.vendor_id) @@ -74,13 +79,15 @@ def is_undefined(self) -> bool: class Win32DeviceIdParser: - """Parser of a standard Win32 device ID. + """ + Parser of a standard Win32 device ID. See https://docs.microsoft.com/en-us/windows-hardware/drivers/install/standard-usb-identifiers """ def parse_uid(self, raw_id: str, serial_number: Optional[str] = None) -> WindowsUID: - """Parses the UID value. + """ + Parses the UID value. As described here: https://docs.microsoft.com/it-it/windows-hardware/drivers/install/device-instance-ids @@ -107,9 +114,9 @@ def record_id_element(self, element: str, valuable_information: dict, patterns_d if match: valuable_information[k] = match.group(1) - def split_id_elements(self, parts: List[str], serial_number: str = None) -> dict: + def split_id_elements(self, parts: List[str], serial_number: Optional[str] = None) -> dict: """Splits the different elements of an Device ID.""" - information = dict() + information = {} information[KEY_UID] = self.parse_uid(parts[-1], serial_number) other_elements = parts[-2].split("&") patterns_dict = UsbIdentifier.get_patterns_dict() @@ -118,7 +125,8 @@ def split_id_elements(self, parts: List[str], serial_number: str = None) -> dict return information def parse(self, id_string: Optional[str], serial_number: Optional[str] = None) -> "UsbIdentifier": - r"""Parses the device id string and retrieves the different elements of interest. + r""" + Parses the device id string and retrieves the different elements of interest. See https://docs.microsoft.com/en-us/windows-hardware/drivers/install/standard-usb-identifiers Format: \ @@ -146,7 +154,8 @@ def parse(self, id_string: Optional[str], serial_number: Optional[str] = None) - def parse_device_id(id_string: Optional[str], serial_number: Optional[str] = None) -> UsbIdentifier: - """Parses the device id string and retrieves the different elements of interest. + """ + Parses the device id string and retrieves the different elements of interest. See https://docs.microsoft.com/en-us/windows-hardware/drivers/install/standard-usb-identifiers """ diff --git a/tools/python/mbed_tools/devices/_internal/windows/usb_hub.py b/tools/python/mbed_tools/devices/_internal/windows/usb_hub.py index 7999508c7cf..07a042d1926 100644 --- a/tools/python/mbed_tools/devices/_internal/windows/usb_hub.py +++ b/tools/python/mbed_tools/devices/_internal/windows/usb_hub.py @@ -6,11 +6,14 @@ from typing import NamedTuple, cast +from typing_extensions import override + from mbed_tools.devices._internal.windows.component_descriptor import ComponentDescriptor class UsbHubMsdnDefinition(NamedTuple): - """Msdn definition of a Usb hub. + """ + Msdn definition of a Usb hub. See https://docs.microsoft.com/en-gb/previous-versions/windows/desktop/cimwin32a/win32-usbhub?redirectedfrom=MSDN See also https://docs.microsoft.com/en-us/windows/win32/cimwin32prov/cim-usbdevice @@ -47,7 +50,8 @@ class UsbHubMsdnDefinition(NamedTuple): class UsbHub(ComponentDescriptor): - """USB Hub as defined in Windows API. + """ + USB Hub as defined in Windows API. See https://docs.microsoft.com/en-gb/previous-versions/windows/desktop/cimwin32a/win32-usbhub?redirectedfrom=MSDN Seems similar to https://docs.microsoft.com/en-us/windows/win32/cimwin32prov/cim-usbhub @@ -58,6 +62,7 @@ def __init__(self) -> None: super().__init__(UsbHubMsdnDefinition, win32_class_name="Win32_USBHub") @property + @override def component_id(self) -> str: """Returns the device id field.""" return cast(str, self.get("DeviceID")) diff --git a/tools/python/mbed_tools/devices/_internal/windows/usb_hub_data_loader.py b/tools/python/mbed_tools/devices/_internal/windows/usb_hub_data_loader.py index 2e05df56bff..06ba4fb13b0 100644 --- a/tools/python/mbed_tools/devices/_internal/windows/usb_hub_data_loader.py +++ b/tools/python/mbed_tools/devices/_internal/windows/usb_hub_data_loader.py @@ -4,18 +4,19 @@ # """Loads System's USB hub.""" -from typing import Dict, List, cast, Optional, Set, Generator +from typing import Dict, Generator, List, Optional, Set, cast from mbed_tools.devices._internal.windows.component_descriptor import ComponentDescriptor from mbed_tools.devices._internal.windows.device_instance_id import get_children_instance_id -from mbed_tools.devices._internal.windows.system_data_loader import SystemDataLoader, ComponentsLoader +from mbed_tools.devices._internal.windows.system_data_loader import ComponentsLoader, SystemDataLoader from mbed_tools.devices._internal.windows.usb_controller import UsbController -from mbed_tools.devices._internal.windows.usb_device_identifier import parse_device_id, UsbIdentifier +from mbed_tools.devices._internal.windows.usb_device_identifier import UsbIdentifier, parse_device_id from mbed_tools.devices._internal.windows.usb_hub import UsbHub class SystemUsbDeviceInformation: - """Usb Hub cache for this system. + """ + Usb Hub cache for this system. On Windows, each interface e.g. Composite, Mass storage, Port is defined as a separate independent UsbHub although they are related to the same device. @@ -29,13 +30,10 @@ def __init__(self, data_loader: SystemDataLoader) -> None: self._data_loader = data_loader def _list_usb_controller_ids(self) -> List[UsbIdentifier]: - return cast( - List[UsbIdentifier], - [ - parse_device_id(cast(UsbController, usbc).component_id) - for usbc in ComponentsLoader(self._data_loader, UsbController).element_generator() - ], - ) + return [ + parse_device_id(usbc.component_id) + for usbc in ComponentsLoader(self._data_loader, UsbController).element_generator() + ] def _iterate_over_hubs(self) -> Generator[ComponentDescriptor, None, None]: return ComponentsLoader(self._data_loader, UsbHub).element_generator() @@ -52,7 +50,7 @@ def _determine_potential_serial_number(self, usb_device: UsbHub) -> Optional[str def _load(self) -> None: """Populates the cache.""" - self._cache = cast(Dict[UsbIdentifier, List[UsbHub]], dict()) + self._cache = cast(Dict[UsbIdentifier, List[UsbHub]], {}) self._ids_cache = cast(Set[UsbIdentifier], set()) controllers = self._list_usb_controller_ids() for usb_device in self._iterate_over_hubs(): @@ -61,7 +59,7 @@ def _load(self) -> None: ) if usb_id in controllers: continue - entry = self._cache.get(usb_id, list()) + entry = self._cache.get(usb_id, []) entry.append(cast(UsbHub, usb_device)) self._cache[usb_id] = entry @@ -78,10 +76,10 @@ def usb_devices(self) -> Dict[UsbIdentifier, List[UsbHub]]: def get_usb_devices(self, uid: UsbIdentifier) -> List[UsbHub]: """Gets all USB devices related to an identifier.""" - return self.usb_devices.get(uid, list()) + return self.usb_devices.get(uid, []) - def usb_device_ids(self) -> List[UsbIdentifier]: + def usb_device_ids(self) -> Set[UsbIdentifier]: """Gets system usb device IDs.""" if not self._ids_cache: self._load() - return cast(List[UsbIdentifier], self._ids_cache) + return cast(Set[UsbIdentifier], self._ids_cache) diff --git a/tools/python/mbed_tools/devices/_internal/windows/volume_set.py b/tools/python/mbed_tools/devices/_internal/windows/volume_set.py index a48f857b38f..3b73a1ea5d8 100644 --- a/tools/python/mbed_tools/devices/_internal/windows/volume_set.py +++ b/tools/python/mbed_tools/devices/_internal/windows/volume_set.py @@ -2,26 +2,31 @@ # Copyright (c) 2020-2021 Arm Limited and Contributors. All rights reserved. # SPDX-License-Identifier: Apache-2.0 # -"""Defines a Volume Set. +""" +Defines a Volume Set. CIM_VolumeSet should be the data model to use but does not seem to actually return the data that we are looking for: https://docs.microsoft.com/en-us/windows/win32/cimwin32prov/cim-volumeset Therefore, a specific data model needs to be constructed using other Windows methods. """ -from enum import Enum -from typing import NamedTuple, List -from mbed_tools.devices._internal.windows.component_descriptor import UNKNOWN_VALUE +from __future__ import annotations import logging -import win32.win32api -import win32.win32file +from enum import Enum +from typing import List, NamedTuple + +import win32.win32api # pyright: ignore[reportMissingModuleSource] +import win32.win32file # pyright: ignore[reportMissingModuleSource] + +from mbed_tools.devices._internal.windows.component_descriptor_utils import UNKNOWN_VALUE logger = logging.getLogger(__name__) class DriveType(Enum): - """Drive type as defined in Win32 API. + """ + Drive type as defined in Win32 API. See https://docs.microsoft.com/en-us/windows/win32/api/fileapi/nf-fileapi-getdrivetypea. """ @@ -39,7 +44,8 @@ class DriveType(Enum): class VolumeInformation(NamedTuple): - """Volume information. + """ + Volume information. See http://timgolden.me.uk/pywin32-docs/win32api__GetVolumeInformation_meth.html See also http://timgolden.me.uk/python/win32_how_do_i/find-drive-types.html @@ -54,7 +60,7 @@ class VolumeInformation(NamedTuple): DriveType: DriveType # As defined by GetDriveType -def _get_windows_volume_information(volume: str) -> List[str]: +def _get_windows_volume_information(volume: str) -> List[str | int]: try: return list(win32.win32api.GetVolumeInformation(volume)) except Exception as e: @@ -82,8 +88,9 @@ def get_volume_information(volume: str) -> VolumeInformation: """Gets the volume information.""" if not volume.endswith("\\"): volume = f"{volume}\\" - values: list = _get_windows_volume_information(volume) + [ + values: list = [ + *_get_windows_volume_information(volume), _get_volume_name_for_mount_point(volume), - _get_drive_type(volume), # type: ignore + _get_drive_type(volume), ] return VolumeInformation(*values) diff --git a/tools/python/mbed_tools/devices/_internal/windows/windows_identifier.py b/tools/python/mbed_tools/devices/_internal/windows/windows_identifier.py index cfb1517e85a..eb32fd6f792 100644 --- a/tools/python/mbed_tools/devices/_internal/windows/windows_identifier.py +++ b/tools/python/mbed_tools/devices/_internal/windows/windows_identifier.py @@ -5,7 +5,9 @@ """Defines a Windows identifier.""" from functools import total_ordering -from typing import Any, Optional +from typing import Optional + +from typing_extensions import override from mbed_tools.devices._internal.windows.component_descriptor_utils import is_undefined_value @@ -13,7 +15,8 @@ def is_device_instance_id(value: Optional[str]) -> bool: - """Determines whether a value is a device instance ID generated by Windows or not. + """ + Determines whether a value is a device instance ID generated by Windows or not. See https://docs.microsoft.com/it-it/windows-hardware/drivers/install/instance-ids Typical IDs generated by Windows for a same device (but different interfaces) are, as follows: @@ -26,7 +29,8 @@ def is_device_instance_id(value: Optional[str]) -> bool: @total_ordering class WindowsUID: - """Definition of a Windows Universal ID. + """ + Definition of a Windows Universal ID. UID in Windows can be either a serial number or an device instance ID depending on how underlying interfaces were defined. @@ -61,8 +65,10 @@ def __init__(self, uid: str, raw_uid: str, serial_number: Optional[str]) -> None self.raw_uid = raw_uid self.serial_number = serial_number - def __eq__(self, other: Any) -> bool: - """Defines the equality checker. + @override + def __eq__(self, other: object) -> bool: + """ + Defines the equality checker. The `equal` method does not follow a typical data model equal logic because of the complexity of this class which tries to find out which of its @@ -75,11 +81,9 @@ def __eq__(self, other: Any) -> bool: if not other or not isinstance(other, WindowsUID): return False if ( - self.uid == other.uid + self.uid in (other.uid, other.serial_number) or self.raw_uid == other.raw_uid - or self.serial_number == other.serial_number - or self.serial_number == other.uid - or self.uid == other.serial_number + or self.serial_number in (other.serial_number, other.uid) ): return True # Due to the complexity of determining the UID on Windows, @@ -88,7 +92,8 @@ def __eq__(self, other: Any) -> bool: @property def presumed_serial_number(self) -> str: - """Determines what may be the most likely value for the serial number of the component. + """ + Determines what may be the most likely value for the serial number of the component. From the different components at its disposal, the system tries to find what may be the serial number. """ @@ -109,8 +114,10 @@ def contains_genuine_serial_number(self) -> bool: serial_number = self.presumed_serial_number return not (is_undefined_value(serial_number) or is_device_instance_id(serial_number)) + @override def __hash__(self) -> int: - """Calculates the hash of the UID. + """ + Calculates the hash of the UID. Due to the complexity of the `equal` method, this implementation of the `hash` calculation breaks the hashing/equality rules of typical data objects. @@ -121,10 +128,12 @@ def __hash__(self) -> int: """ return hash(self.instance_id) + @override def __repr__(self) -> str: """String representation of the UID.""" return f"WindowsUID({self.uid})" + @override def __str__(self) -> str: """String representation of the UID.""" elements = [f"{k}={v!r}" for (k, v) in self.__dict__.items()] diff --git a/tools/python/mbed_tools/devices/device.py b/tools/python/mbed_tools/devices/device.py index 7bcf955d3a4..b7c0dd095f2 100644 --- a/tools/python/mbed_tools/devices/device.py +++ b/tools/python/mbed_tools/devices/device.py @@ -6,17 +6,20 @@ from dataclasses import dataclass, field from pathlib import Path -from typing import Tuple, Optional, List -from mbed_tools.targets import Board -from mbed_tools.devices._internal.detect_candidate_devices import CandidateDevice -from mbed_tools.devices._internal.resolve_board import resolve_board, NoBoardForCandidate, ResolveBoardError +from typing import List, Optional, Sequence + +from mbed_tools.devices._internal.candidate_device import CandidateDevice +from mbed_tools.devices._internal.exceptions import NoBoardForCandidateError, ResolveBoardError from mbed_tools.devices._internal.file_parser import read_device_files -from mbed_tools.devices.exceptions import DeviceLookupFailed +from mbed_tools.devices._internal.resolve_board import resolve_board +from mbed_tools.devices.exceptions import DeviceLookupFailedError +from mbed_tools.targets import Board @dataclass(frozen=True, order=True) class Device: - """Definition of an Mbed Enabled Device. + """ + Definition of an Mbed Enabled Device. An Mbed Device is always a USB mass storage device, which sometimes also presents a USB serial port. A valid Mbed Device must have a Board associated with it. @@ -31,13 +34,14 @@ class Device: mbed_board: Board serial_number: str serial_port: Optional[str] - mount_points: Tuple[Path, ...] + mount_points: Sequence[Path] mbed_enabled: bool = False interface_version: Optional[str] = None @classmethod def from_candidate(cls, candidate: CandidateDevice) -> "Device": - """Contruct a Device from a CandidateDevice. + """ + Contruct a Device from a CandidateDevice. We try to resolve a board using data files that may be stored on the CandidateDevice. If this fails we set the board to `None` which means we couldn't verify this Device @@ -52,15 +56,16 @@ def from_candidate(cls, candidate: CandidateDevice) -> "Device": device_file_info.product_code, device_file_info.online_id, candidate.serial_number ) mbed_enabled = True - except NoBoardForCandidate: + except NoBoardForCandidateError: # Create an empty Board to ensure the device is fully populated and rendering is simple mbed_board = Board.from_offline_board_entry({}) mbed_enabled = False - except ResolveBoardError: - raise DeviceLookupFailed( + except ResolveBoardError as ex: + msg = ( f"Failed to resolve the board for candidate device {candidate!r}. There was a problem looking up the " "board data in the database." ) + raise DeviceLookupFailedError(msg) from ex return Device( serial_port=candidate.serial_port, @@ -74,7 +79,8 @@ def from_candidate(cls, candidate: CandidateDevice) -> "Device": @dataclass(order=True) class ConnectedDevices: - """Definition of connected devices which may be Mbed Boards. + """ + Definition of connected devices which may be Mbed Boards. If a connected device is identified as an Mbed Board by using the HTM file on the USB mass storage device (or sometimes by using the serial number), it will be included in the `identified_devices` list. @@ -91,7 +97,8 @@ class ConnectedDevices: unidentified_devices: List[Device] = field(default_factory=list) def add_device(self, device: Device) -> None: - """Add a device to the connected devices. + """ + Add a device to the connected devices. Args: device: a Device object containing the device information. diff --git a/tools/python/mbed_tools/devices/devices.py b/tools/python/mbed_tools/devices/devices.py index 60b2596118f..ae0a2ea012b 100644 --- a/tools/python/mbed_tools/devices/devices.py +++ b/tools/python/mbed_tools/devices/devices.py @@ -8,13 +8,13 @@ from typing import List, Optional from mbed_tools.devices._internal.detect_candidate_devices import detect_candidate_devices - from mbed_tools.devices.device import ConnectedDevices, Device -from mbed_tools.devices.exceptions import DeviceLookupFailed, NoDevicesFound +from mbed_tools.devices.exceptions import DeviceLookupFailedError, NoDevicesFoundError def get_connected_devices() -> ConnectedDevices: - """Returns Mbed Devices connected to host computer. + """ + Returns Mbed Devices connected to host computer. Connected devices which have been identified as Mbed Boards and also connected devices which are potentially Mbed Boards (but not could not be identified in the database) are returned. @@ -29,7 +29,8 @@ def get_connected_devices() -> ConnectedDevices: def find_connected_device(target_name: str, identifier: Optional[int] = None) -> Device: - """Find a connected device matching the given target_name, if there is only one. + """ + Find a connected device matching the given target_name, if there is only one. Args: target_name: The Mbed target name of the device. @@ -61,11 +62,12 @@ def find_connected_device(target_name: str, identifier: Optional[int] = None) -> f"`{target_name}[{identifier}]` is not a valid connected target.\n" f"The following {target_name}s were detected:\n{detected_targets}" ) - raise DeviceLookupFailed(msg) + raise DeviceLookupFailedError(msg) def find_all_connected_devices(target_name: str) -> List[Device]: - """Find all connected devices matching the given target_name. + """ + Find all connected devices matching the given target_name. Args: target_name: The Mbed target name of the device. @@ -79,7 +81,8 @@ def find_all_connected_devices(target_name: str) -> List[Device]: """ connected = get_connected_devices() if not connected.identified_devices: - raise NoDevicesFound("No Mbed enabled devices found.") + msg = "No Mbed enabled devices found." + raise NoDevicesFoundError(msg) matching_devices = sorted( [device for device in connected.identified_devices if device.mbed_board.board_type == target_name.upper()], @@ -97,4 +100,4 @@ def find_all_connected_devices(target_name: str) -> List[Device]: "Check the device is connected by USB, and that the name is entered correctly.\n" f"The following devices were detected:\n{detected_targets}" ) - raise DeviceLookupFailed(msg) + raise DeviceLookupFailedError(msg) diff --git a/tools/python/mbed_tools/devices/exceptions.py b/tools/python/mbed_tools/devices/exceptions.py index ea453e850e9..1816b6eefb0 100644 --- a/tools/python/mbed_tools/devices/exceptions.py +++ b/tools/python/mbed_tools/devices/exceptions.py @@ -11,11 +11,11 @@ class MbedDevicesError(ToolsError): """Base public exception for the mbed-devices package.""" -class DeviceLookupFailed(MbedDevicesError): +class DeviceLookupFailedError(MbedDevicesError): """Failed to look up data associated with the device.""" -class NoDevicesFound(MbedDevicesError): +class NoDevicesFoundError(MbedDevicesError): """No Mbed Enabled devices were found.""" diff --git a/tools/python/mbed_tools/lib/json_helpers.py b/tools/python/mbed_tools/lib/json_helpers.py index ef6f9f85489..4694586957e 100644 --- a/tools/python/mbed_tools/lib/json_helpers.py +++ b/tools/python/mbed_tools/lib/json_helpers.py @@ -5,12 +5,12 @@ """Helpers for json related functions.""" import json -import pyjson5 import logging - from pathlib import Path from typing import Any +import pyjson5 + logger = logging.getLogger(__name__) @@ -21,15 +21,16 @@ def decode_json_file(path: Path) -> Any: logger.debug(f"Loading JSON file {path}") return json.loads(path.read_text()) except json.JSONDecodeError: - logger.error(f"Failed to decode JSON data in the file located at '{path}'") + logger.exception(f"Failed to decode JSON data in the file located at '{path}'") raise elif path.suffix == ".json5": try: logger.debug(f"Loading JSON file {path}") with path.open() as json_file: - return pyjson5.decode_io(json_file) + return pyjson5.decode_io(json_file) # pyright: ignore[reportArgumentType] except ValueError: - logger.error(f"Failed to decode JSON data in the file located at '{path}'") + logger.exception(f"Failed to decode JSON data in the file located at '{path}'") raise else: - raise ValueError(f"Unknown JSON file extension {path.suffix}") + msg = f"Unknown JSON file extension {path.suffix}" + raise ValueError(msg) diff --git a/tools/python/mbed_tools/lib/logging.py b/tools/python/mbed_tools/lib/logging.py index 923c5b129a1..f97f87c256c 100644 --- a/tools/python/mbed_tools/lib/logging.py +++ b/tools/python/mbed_tools/lib/logging.py @@ -4,9 +4,10 @@ # """Helpers for logging errors according to severity of the exception.""" -from typing import Type, Optional, cast -from types import TracebackType import logging +from types import TracebackType +from typing import Optional, Type, cast + from mbed_tools.lib.exceptions import ToolsError LOGGING_FORMAT = "%(levelname)s: %(message)s" @@ -32,7 +33,7 @@ def _exception_message(err: BaseException, log_level: int, traceback: bool) -> s class MbedToolsHandler: """Context Manager to catch Mbed Tools exceptions and generate a helpful user facing message.""" - def __init__(self, logger: logging.Logger, traceback: bool = False): + def __init__(self, logger: logging.Logger, traceback: bool = False) -> None: """Keep track of the logger to use and whether or not a traceback should be generated.""" self._logger = logger self._traceback = traceback @@ -61,7 +62,8 @@ def __exit__( def log_exception(logger: logging.Logger, exception: Exception, show_traceback: bool = False) -> None: - """Logs an exception in both normal and verbose forms. + """ + Logs an exception in both normal and verbose forms. Args: logger: logger @@ -72,7 +74,8 @@ def log_exception(logger: logging.Logger, exception: Exception, show_traceback: def set_log_level(verbose_count: int) -> None: - """Sets the log level. + """ + Sets the log level. Args: verbose_count: number of `-v` flags used diff --git a/tools/python/mbed_tools/lib/python_helpers.py b/tools/python/mbed_tools/lib/python_helpers.py index 739b5fa95f7..7e37b8af6b2 100644 --- a/tools/python/mbed_tools/lib/python_helpers.py +++ b/tools/python/mbed_tools/lib/python_helpers.py @@ -8,7 +8,8 @@ def flatten_nested(input_iter: Iterable) -> List: - """Flatten a nested Iterable with arbitrary levels of nesting. + """ + Flatten a nested Iterable with arbitrary levels of nesting. If the input is an iterator then this function will exhaust it. diff --git a/tools/python/mbed_tools/project/__init__.py b/tools/python/mbed_tools/project/__init__.py index da693bdf87e..50394325346 100644 --- a/tools/python/mbed_tools/project/__init__.py +++ b/tools/python/mbed_tools/project/__init__.py @@ -2,12 +2,12 @@ # Copyright (c) 2020-2021 Arm Limited and Contributors. All rights reserved. # SPDX-License-Identifier: Apache-2.0 # -"""Creation and management of Mbed OS projects. +""" +Creation and management of Mbed OS projects. * Creation of a new Mbed OS application. * Cloning of an existing Mbed OS program. * Deploy of a specific version of Mbed OS or library. """ -from mbed_tools.project.project import initialise_project, import_project, deploy_project, get_known_libs -from mbed_tools.project.mbed_program import MbedProgram +from mbed_tools.project.mbed_program import MbedProgram as MbedProgram diff --git a/tools/python/mbed_tools/project/_internal/git_utils.py b/tools/python/mbed_tools/project/_internal/git_utils.py deleted file mode 100644 index 5adb2d3eaf0..00000000000 --- a/tools/python/mbed_tools/project/_internal/git_utils.py +++ /dev/null @@ -1,148 +0,0 @@ -# -# Copyright (c) 2020-2021 Arm Limited and Contributors. All rights reserved. -# SPDX-License-Identifier: Apache-2.0 -# -"""Wrappers for git operations.""" - -from dataclasses import dataclass -from pathlib import Path - -import git -import logging - -from mbed_tools.project.exceptions import VersionControlError -from mbed_tools.project._internal.progress import ProgressReporter -from typing import Optional - -logger = logging.getLogger(__name__) - - -@dataclass -class GitReference: - """Git reference for a remote repository. - - Attributes: - repo_url: URL of the git repository. - ref: The reference commit sha, tag or branch. - """ - - repo_url: str - ref: str - - -def clone(url: str, dst_dir: Path, ref: Optional[str] = None, depth: int = 1) -> git.Repo: - """Clone a library repository. - - Args: - url: URL of the remote to clone. - dst_dir: Destination directory for the cloned repo. - ref: An optional git commit hash, branch or tag reference to checkout - depth: Truncate history to the specified number of commits. Defaults to - 1, to make a shallow clone. - - Raises: - VersionControlError: Cloning the repository failed. - """ - # Gitpython doesn't propagate the git error message when a repo is already - # cloned, so we cannot depend on git to handle the "already cloned" error. - # We must handle this ourselves instead. - if dst_dir.exists() and list(dst_dir.glob("*")): - raise VersionControlError(f"{dst_dir} exists and is not an empty directory.") - - clone_from_kwargs = {"url": url, "to_path": str(dst_dir), "progress": ProgressReporter(name=url), "depth": depth} - if ref: - clone_from_kwargs["branch"] = ref - - try: - return git.Repo.clone_from(**clone_from_kwargs) - except git.exc.GitCommandError as err: - raise VersionControlError(f"Cloning git repository from url '{url}' failed. Error from VCS: {err}") - - -def checkout(repo: git.Repo, ref: str, force: bool = False) -> None: - """Check out a specific reference in the given repository. - - Args: - repo: git.Repo object where the checkout will be performed. - ref: Git commit hash, branch or tag reference, must be a valid ref defined in the repo. - - Raises: - VersionControlError: Check out failed. - """ - try: - git_args = [ref] + ["--force"] if force else [ref] - repo.git.checkout(*git_args) - except git.exc.GitCommandError as err: - raise VersionControlError(f"Failed to check out revision '{ref}'. Error from VCS: {err}") - - -def fetch(repo: git.Repo, ref: str) -> None: - """Fetch from the repo's origin. - - Args: - repo: git.Repo object where the checkout will be performed. - ref: Git commit hash, branch or tag reference, must be a valid ref defined in the repo. - - Raises: - VersionControlError: Fetch failed. - """ - try: - repo.git.fetch("origin", ref) - except git.exc.GitCommandError as err: - raise VersionControlError(f"Failed to fetch. Error from VCS: {err}") - - -def init(path: Path) -> git.Repo: - """Initialise a git repository at the given path. - - Args: - path: Path where the repo will be initialised. - - Returns: - Initialised git.Repo object. - - Raises: - VersionControlError: initalising the repository failed. - """ - try: - return git.Repo.init(str(path)) - except git.exc.GitCommandError as err: - raise VersionControlError(f"Failed to initialise git repository at path '{path}'. Error from VCS: {err}") - - -def get_repo(path: Path) -> git.Repo: - """Get a git.Repo object from an existing repository path. - - Args: - path: Path to the git repository. - - Returns: - git.Repo object. - - Raises: - VersionControlError: No valid git repository at this path. - """ - try: - return git.Repo(str(path)) - except git.exc.InvalidGitRepositoryError: - raise VersionControlError( - "Could not find a valid git repository at this path. Please perform a `git init` command." - ) - - -def get_default_branch(repo: git.Repo) -> str: - """Get a default branch from an existing git.Repo. - - Args: - repo: git.Repo object - - Returns: - The default branch name as a string. - - Raises: - VersionControlError: Could not find the default branch name. - """ - try: - return str(repo.git.symbolic_ref("refs/remotes/origin/HEAD").rsplit("/", maxsplit=1)[-1]) - except git.exc.GitCommandError as err: - raise VersionControlError(f"Could not resolve default repository branch name. Error from VCS: {err}") diff --git a/tools/python/mbed_tools/project/_internal/libraries.py b/tools/python/mbed_tools/project/_internal/libraries.py deleted file mode 100644 index 84bbd101677..00000000000 --- a/tools/python/mbed_tools/project/_internal/libraries.py +++ /dev/null @@ -1,134 +0,0 @@ -# -# Copyright (c) 2020-2021 Arm Limited and Contributors. All rights reserved. -# SPDX-License-Identifier: Apache-2.0 -# -"""Objects for library reference handling.""" - -import logging - -from dataclasses import dataclass -from pathlib import Path -from typing import Generator, List - -from mbed_tools.project._internal import git_utils -from mbed_tools.project.exceptions import VersionControlError - -logger = logging.getLogger(__name__) - - -@dataclass(frozen=True, order=True) -class MbedLibReference: - """Metadata associated with an Mbed library. - - An Mbed library is an external dependency of an MbedProgram. The MbedProgram is made aware of the library - dependency by the presence of a .lib file in the project tree, which we refer to as a library reference file. The - library reference file contains a URI where the dependency's source code can be fetched. - - Attributes: - reference_file: Path to the .lib reference file for this library. - source_code_path: Path to the source code if it exists in the local project. - """ - - reference_file: Path - source_code_path: Path - - def is_resolved(self) -> bool: - """Determines if the source code for this library is present in the source tree.""" - return self.source_code_path.exists() and self.source_code_path.is_dir() - - def get_git_reference(self) -> git_utils.GitReference: - """Get the source code location from the library reference file. - - Returns: - Data structure containing the contents of the library reference file. - """ - raw_ref = self.reference_file.read_text().strip() - url, sep, ref = raw_ref.partition("#") - - if url.endswith("/"): - url = url[:-1] - - return git_utils.GitReference(repo_url=url, ref=ref) - - -@dataclass -class LibraryReferences: - """Manages library references in an MbedProgram.""" - - root: Path - ignore_paths: List[str] - - def fetch(self) -> None: - """Recursively clone all dependencies defined in .lib files.""" - for lib in self.iter_unresolved(): - git_ref = lib.get_git_reference() - logger.info(f"Resolving library reference {git_ref.repo_url}.") - _clone_at_ref(git_ref.repo_url, lib.source_code_path, git_ref.ref) - - # Check if we find any new references after cloning dependencies. - if list(self.iter_unresolved()): - self.fetch() - - def checkout(self, force: bool) -> None: - """Check out all resolved libs to revision specified in .lib files.""" - for lib in self.iter_resolved(): - repo = git_utils.get_repo(lib.source_code_path) - git_ref = lib.get_git_reference() - - if not git_ref.ref: - git_ref.ref = git_utils.get_default_branch(repo) - - git_utils.fetch(repo, git_ref.ref) - git_utils.checkout(repo, "FETCH_HEAD", force=force) - - def iter_all(self) -> Generator[MbedLibReference, None, None]: - """Iterate all library references in the tree. - - Yields: - Iterator to library reference. - """ - for lib in self.root.rglob("*.lib"): - if not self._in_ignore_path(lib): - yield MbedLibReference(lib, lib.with_suffix("")) - - def iter_unresolved(self) -> Generator[MbedLibReference, None, None]: - """Iterate all unresolved library references in the tree. - - Yields: - Iterator to library reference. - """ - for lib in self.iter_all(): - if not lib.is_resolved(): - yield lib - - def iter_resolved(self) -> Generator[MbedLibReference, None, None]: - """Iterate all resolved library references in the tree. - - Yields: - Iterator to library reference. - """ - for lib in self.iter_all(): - if lib.is_resolved(): - yield lib - - def _in_ignore_path(self, lib_reference_path: Path) -> bool: - """Check if a library reference is in a path we want to ignore.""" - return any(p in lib_reference_path.parts for p in self.ignore_paths) - - -def _clone_at_ref(url: str, path: Path, ref: str) -> None: - if ref: - logger.info(f"Checking out revision {ref} for library {url}.") - try: - git_utils.clone(url, path, ref) - except VersionControlError: - # We weren't able to clone. Try again without the ref. - repo = git_utils.clone(url, path) - # We couldn't clone the ref and had to fall back to cloning - # just the default branch. Fetch the ref before checkout, so - # that we have it available locally. - logger.warning(f"No tag or branch with name {ref}. Fetching full repository.") - git_utils.fetch(repo, ref) - git_utils.checkout(repo, "FETCH_HEAD") - else: - git_utils.clone(url, path) diff --git a/tools/python/mbed_tools/project/_internal/progress.py b/tools/python/mbed_tools/project/_internal/progress.py deleted file mode 100644 index d9b81b1ee8a..00000000000 --- a/tools/python/mbed_tools/project/_internal/progress.py +++ /dev/null @@ -1,61 +0,0 @@ -# -# Copyright (c) 2020-2021 Arm Limited and Contributors. All rights reserved. -# SPDX-License-Identifier: Apache-2.0 -# -"""Progress bar for git operations.""" - -import sys - -from typing import Optional, Any - -from git import RemoteProgress -from tqdm import tqdm - - -class ProgressBar(tqdm): - """tqdm progress bar that can be used in a callback context.""" - - total: Any - - def update_progress(self, block_num: float = 1, block_size: float = 1, total_size: float = None) -> None: - """Update the progress bar. - - Args: - block_num: Number of the current block. - block_size: Size of the current block. - total_size: Total size of all expected blocks. - """ - if total_size is not None and self.total != total_size: - self.total = total_size - self.update(block_num * block_size - self.n) - - -class ProgressReporter(RemoteProgress): - """GitPython RemoteProgress subclass that displays a progress bar for git fetch and push operations.""" - - def __init__(self, *args: Any, name: str = "", **kwargs: Any) -> None: - """Initialiser. - - Args: - name: The name of the git repository to report progress on. - """ - self.name = name - super().__init__(*args, **kwargs) - - def update(self, op_code: int, cur_count: float, max_count: Optional[float] = None, message: str = "") -> None: - """Called whenever the progress changes. - - Args: - op_code: Integer describing the stage of the current operation. - cur_count: Current item count. - max_count: Maximum number of items expected. - message: Message string describing the number of bytes transferred in the WRITING operation. - """ - if self.BEGIN & op_code: - self.bar = ProgressBar(total=max_count, file=sys.stderr, leave=False) - - self.bar.desc = f"{self.name} {self._cur_line}" - self.bar.update_progress(block_num=cur_count, total_size=max_count) - - if self.END & op_code: - self.bar.close() diff --git a/tools/python/mbed_tools/project/_internal/project_data.py b/tools/python/mbed_tools/project/_internal/project_data.py index 3be4b102f03..e9d9e61436b 100644 --- a/tools/python/mbed_tools/project/_internal/project_data.py +++ b/tools/python/mbed_tools/project/_internal/project_data.py @@ -4,19 +4,11 @@ # """Objects representing Mbed program and library data.""" -import json import logging - from dataclasses import dataclass from pathlib import Path from typing import Optional -from mbed_tools.project._internal.render_templates import ( - render_cmakelists_template, - render_main_cpp_template, - render_gitignore_template, -) - logger = logging.getLogger(__name__) # Mbed program file names and constants. @@ -43,7 +35,8 @@ @dataclass class MbedProgramFiles: - """Files defining an MbedProgram. + """ + Files defining an MbedProgram. This object holds paths to the various files which define an MbedProgram. @@ -63,45 +56,10 @@ class MbedProgramFiles: cmake_build_dir: Path custom_targets_json: Path - @classmethod - def from_new(cls, root_path: Path) -> "MbedProgramFiles": - """Create MbedProgramFiles from a new directory. - - A "new directory" in this context means it doesn't already contain an Mbed program. - - Args: - root_path: The directory in which to create the program data files. - - Raises: - ValueError: A program .mbed or mbed-os.lib file already exists at this path. - """ - app_config = root_path / APP_CONFIG_FILE_NAME_JSON5 - mbed_os_ref = root_path / MBED_OS_REFERENCE_FILE_NAME - cmakelists_file = root_path / CMAKELISTS_FILE_NAME - main_cpp = root_path / MAIN_CPP_FILE_NAME - gitignore = root_path / ".gitignore" - cmake_build_dir = root_path / BUILD_DIR - custom_targets_json = root_path / CUSTOM_TARGETS_JSON_FILE_NAME - - if mbed_os_ref.exists(): - raise ValueError(f"Program already exists at path {root_path}.") - - app_config.write_text(json.dumps(DEFAULT_APP_CONFIG, indent=4)) - mbed_os_ref.write_text(f"{MBED_OS_REFERENCE_URL}#{MBED_OS_REFERENCE_ID}") - render_cmakelists_template(cmakelists_file, root_path.stem) - render_main_cpp_template(main_cpp) - render_gitignore_template(gitignore) - return cls( - app_config_file=app_config, - mbed_os_ref=mbed_os_ref, - cmakelists_file=cmakelists_file, - cmake_build_dir=cmake_build_dir, - custom_targets_json=custom_targets_json, - ) - @classmethod def from_existing(cls, root_path: Path, build_dir: Path) -> "MbedProgramFiles": - """Create MbedProgramFiles from a directory containing an existing program. + """ + Create MbedProgramFiles from a directory containing an existing program. Args: root_path: The path containing the MbedProgramFiles. @@ -112,8 +70,6 @@ def from_existing(cls, root_path: Path, build_dir: Path) -> "MbedProgramFiles": app_config = root_path / APP_CONFIG_FILE_NAME_JSON5 elif (root_path / APP_CONFIG_FILE_NAME_JSON).exists(): app_config = root_path / APP_CONFIG_FILE_NAME_JSON - else: - logger.info("This program does not contain an mbed_app.json config file.") # If there's already a custom_targets.json5, use that. # Otherwise, assume json. @@ -139,7 +95,8 @@ def from_existing(cls, root_path: Path, build_dir: Path) -> "MbedProgramFiles": @dataclass class MbedOS: - """Metadata associated with a copy of MbedOS. + """ + Metadata associated with a copy of MbedOS. This object holds information about MbedOS used by MbedProgram. @@ -159,13 +116,16 @@ def from_existing(cls, root_path: Path, check_root_path_exists: bool = True) -> cmsis_mcu_descriptions_json_file = root_path / CMSIS_MCU_DESCRIPTIONS_JSON_FILE_PATH if check_root_path_exists and not root_path.exists(): - raise ValueError("The mbed-os directory does not exist.") + msg = "The mbed-os directory does not exist." + raise ValueError(msg) if root_path.exists() and not targets_json_file.exists(): - raise ValueError(f"This MbedOS copy does not contain a {TARGETS_JSON_FILE_PATH} file.") + msg = f"This MbedOS copy does not contain a {TARGETS_JSON_FILE_PATH} file." + raise ValueError(msg) if root_path.exists() and not cmsis_mcu_descriptions_json_file.exists(): - raise ValueError(f"This MbedOS copy does not contain a {CMSIS_MCU_DESCRIPTIONS_JSON_FILE_PATH.name} file.") + msg = f"This MbedOS copy does not contain a {CMSIS_MCU_DESCRIPTIONS_JSON_FILE_PATH.name} file." + raise ValueError(msg) return cls( root=root_path, diff --git a/tools/python/mbed_tools/project/_internal/render_templates.py b/tools/python/mbed_tools/project/_internal/render_templates.py index 824b174cde6..e1f38dd0ce2 100644 --- a/tools/python/mbed_tools/project/_internal/render_templates.py +++ b/tools/python/mbed_tools/project/_internal/render_templates.py @@ -4,8 +4,6 @@ # """Render jinja templates required by the project package.""" -import datetime - from pathlib import Path import jinja2 @@ -13,45 +11,17 @@ TEMPLATES_DIRECTORY = Path("_internal", "templates") -def render_cmakelists_template(cmakelists_file: Path, program_name: str) -> None: - """Render CMakeLists.tmpl with the copyright year and program name as the app target name. - - Args: - cmakelists_file: The path where CMakeLists.txt will be written. - program_name: The name of the program, will be used as the app target name. - """ - cmakelists_file.write_text( - render_jinja_template( - "CMakeLists.tmpl", {"program_name": program_name, "year": str(datetime.datetime.now().year)} - ) - ) - - -def render_main_cpp_template(main_cpp: Path) -> None: - """Render a basic main.cpp which prints a hello message and returns. - - Args: - main_cpp: Path where the main.cpp file will be written. - """ - main_cpp.write_text(render_jinja_template("main.tmpl", {"year": str(datetime.datetime.now().year)})) - - -def render_gitignore_template(gitignore: Path) -> None: - """Write out a basic gitignore file ignoring the build and config directory. - - Args: - gitignore: The path where the gitignore file will be written. - """ - gitignore.write_text(render_jinja_template("gitignore.tmpl", {})) - - def render_jinja_template(template_name: str, context: dict) -> str: - """Render a jinja template. + """ + Render a jinja template. Args: template_name: The name of the template being rendered. context: Data to render into the jinja template. """ - env = jinja2.Environment(loader=jinja2.PackageLoader("mbed_tools.project", str(TEMPLATES_DIRECTORY))) + env = jinja2.Environment( + loader=jinja2.PackageLoader("mbed_tools.project", str(TEMPLATES_DIRECTORY)), + autoescape=False, # autoescape not needed because we are not rendering HTML + ) template = env.get_template(template_name) return template.render(context) diff --git a/tools/python/mbed_tools/project/_internal/templates/CMakeLists.tmpl b/tools/python/mbed_tools/project/_internal/templates/CMakeLists.tmpl deleted file mode 100644 index 6e1fb1e5b60..00000000000 --- a/tools/python/mbed_tools/project/_internal/templates/CMakeLists.tmpl +++ /dev/null @@ -1,27 +0,0 @@ -# Copyright (c) {{year}} ARM Limited. All rights reserved. -# SPDX-License-Identifier: Apache-2.0 - -cmake_minimum_required(VERSION 3.19.0) - -set(MBED_PATH ${CMAKE_CURRENT_SOURCE_DIR}/mbed-os CACHE INTERNAL "") -set(MBED_CONFIG_PATH ${CMAKE_CURRENT_BINARY_DIR} CACHE INTERNAL "") -set(APP_TARGET {{program_name}}) - -include(${MBED_PATH}/tools/cmake/app.cmake) - -project(${APP_TARGET}) - -add_subdirectory(${MBED_PATH}) - -add_executable(${APP_TARGET} - main.cpp -) - -target_link_libraries(${APP_TARGET} mbed-os) - -mbed_set_post_build(${APP_TARGET}) - -option(VERBOSE_BUILD "Have a verbose build process") -if(VERBOSE_BUILD) - set(CMAKE_VERBOSE_MAKEFILE ON) -endif() diff --git a/tools/python/mbed_tools/project/_internal/templates/gitignore.tmpl b/tools/python/mbed_tools/project/_internal/templates/gitignore.tmpl deleted file mode 100644 index f8fdeb6c7fa..00000000000 --- a/tools/python/mbed_tools/project/_internal/templates/gitignore.tmpl +++ /dev/null @@ -1,2 +0,0 @@ -.mbedbuild -cmake_build/ diff --git a/tools/python/mbed_tools/project/_internal/templates/main.tmpl b/tools/python/mbed_tools/project/_internal/templates/main.tmpl deleted file mode 100644 index baa7dcc90b1..00000000000 --- a/tools/python/mbed_tools/project/_internal/templates/main.tmpl +++ /dev/null @@ -1,13 +0,0 @@ -/* mbed Microcontroller Library - * Copyright (c) {{year}} ARM Limited - * SPDX-License-Identifier: Apache-2.0 - */ - -#include "mbed.h" - - -int main() -{ - printf("Hello, Mbed!\n"); - return 0; -} diff --git a/tools/python/mbed_tools/project/exceptions.py b/tools/python/mbed_tools/project/exceptions.py index 4c4db0b8e02..2dfc98a001f 100644 --- a/tools/python/mbed_tools/project/exceptions.py +++ b/tools/python/mbed_tools/project/exceptions.py @@ -15,13 +15,13 @@ class VersionControlError(MbedProjectError): """Raised when a source control management operation failed.""" -class ExistingProgram(MbedProjectError): +class ExistingProgramError(MbedProjectError): """Raised when a program already exists at a given path.""" -class ProgramNotFound(MbedProjectError): +class ProgramNotFoundError(MbedProjectError): """Raised when an expected program is not found.""" -class MbedOSNotFound(MbedProjectError): +class MbedOSNotFoundError(MbedProjectError): """A valid copy of MbedOS was not found.""" diff --git a/tools/python/mbed_tools/project/mbed_program.py b/tools/python/mbed_tools/project/mbed_program.py index 084d5338f49..b7ffccdfe24 100644 --- a/tools/python/mbed_tools/project/mbed_program.py +++ b/tools/python/mbed_tools/project/mbed_program.py @@ -6,24 +6,24 @@ import logging import pathlib - from pathlib import Path -from typing import Dict +from typing import Dict, Optional from urllib.parse import urlparse -from mbed_tools.project.exceptions import ProgramNotFound, ExistingProgram, MbedOSNotFound from mbed_tools.project._internal.project_data import ( - MbedProgramFiles, - MbedOS, - MBED_OS_REFERENCE_FILE_NAME, MBED_OS_DIR_NAME, + MBED_OS_REFERENCE_FILE_NAME, + MbedOS, + MbedProgramFiles, ) +from mbed_tools.project.exceptions import MbedOSNotFoundError, ProgramNotFoundError logger = logging.getLogger(__name__) class MbedProgram: - """Represents an Mbed program. + """ + Represents an Mbed program. An `MbedProgram` consists of: * A copy of, or reference to, `MbedOS` @@ -32,7 +32,8 @@ class MbedProgram: """ def __init__(self, program_files: MbedProgramFiles, mbed_os: MbedOS, root_path: pathlib.Path) -> None: - """Initialise the program attributes. + """ + Initialise the program attributes. Args: program_files: Object holding paths to a set of files that define an Mbed program. @@ -42,36 +43,12 @@ def __init__(self, program_files: MbedProgramFiles, mbed_os: MbedOS, root_path: self.root = root_path self.mbed_os = mbed_os - @classmethod - def from_new(cls, dir_path: Path) -> "MbedProgram": - """Create an MbedProgram from an empty directory. - - Creates the directory if it doesn't exist. - - Args: - dir_path: Directory in which to create the program. - - Raises: - ExistingProgram: An existing program was found in the path. - """ - if _tree_contains_program(dir_path): - raise ExistingProgram( - f"An existing Mbed program was found in the directory tree {dir_path}. It is not possible to nest Mbed " - "programs. Please ensure there is no mbed-os.lib file in the cwd hierarchy." - ) - - logger.info(f"Creating Mbed program at path '{dir_path.resolve()}'") - dir_path.mkdir(exist_ok=True) - program_files = MbedProgramFiles.from_new(dir_path) - logger.info(f"Creating git repository for the Mbed program '{dir_path}'") - mbed_os = MbedOS.from_new(dir_path / MBED_OS_DIR_NAME) - return cls(program_files, mbed_os, dir_path) - @classmethod def from_existing( - cls, dir_path: Path, build_dir: Path, mbed_os_path: Path = None, check_mbed_os: bool = True + cls, dir_path: Path, build_dir: Path, mbed_os_path: Optional[Path] = None, check_mbed_os: bool = True ) -> "MbedProgram": - """Create an MbedProgram from an existing program directory. + """ + Create an MbedProgram from an existing program directory. Args: dir_path: Directory containing an Mbed program. @@ -95,16 +72,18 @@ def from_existing( try: mbed_os = MbedOS.from_existing(mbed_os_path, check_mbed_os) except ValueError as mbed_os_err: - raise MbedOSNotFound( + msg = ( f"Mbed OS was not found due to the following error: {mbed_os_err}" "\nYou may need to resolve the mbed-os.lib reference. You can do this by performing a `deploy`." ) + raise MbedOSNotFoundError(msg) from mbed_os_err return cls(program, mbed_os, program_root) def parse_url(name_or_url: str) -> Dict[str, str]: - """Create a valid github/armmbed url from a program name. + """ + Create a valid github/armmbed url from a program name. Args: url: The URL, or a program name to turn into an URL. @@ -124,25 +103,9 @@ def parse_url(name_or_url: str) -> Dict[str, str]: return {"url": url, "dst_path": url_obj.path.rsplit("/", maxsplit=1)[-1].replace("/", "")} -def _tree_contains_program(path: Path) -> bool: - """Check if the current path or its ancestors contain an mbed-os.lib file. - - Args: - path: The starting path for the search. The search walks up the tree from this path. - - Returns: - `True` if an mbed-os.lib file is located between `path` and filesystem root. - `False` if no mbed-os.lib file was found. - """ - try: - _find_program_root(path) - return True - except ProgramNotFound: - return False - - def _find_program_root(cwd: Path) -> Path: - """Walk up the directory tree, looking for an mbed-os.lib file. + """ + Walk up the directory tree, looking for an mbed-os.lib file. Programs contain an mbed-os.lib file at the root of the source tree. @@ -166,8 +129,9 @@ def _find_program_root(cwd: Path) -> Path: potential_root = potential_root.parent logger.debug("No mbed-os.lib file found.") - raise ProgramNotFound( + msg = ( f"No program found from {cwd.resolve()} to {cwd.resolve().anchor}. Please set the directory to a program " "directory containing an mbed-os.lib file. You can also set the directory to a program subdirectory if there " "is an mbed-os.lib file at the root of your program's directory tree." ) + raise ProgramNotFoundError(msg) diff --git a/tools/python/mbed_tools/project/project.py b/tools/python/mbed_tools/project/project.py deleted file mode 100644 index 8469dd2e2d7..00000000000 --- a/tools/python/mbed_tools/project/project.py +++ /dev/null @@ -1,86 +0,0 @@ -# -# Copyright (c) 2020-2021 Arm Limited and Contributors. All rights reserved. -# SPDX-License-Identifier: Apache-2.0 -# -"""Defines the public API of the package.""" - -import pathlib -import logging - -from typing import List, Any - -from mbed_tools.project.mbed_program import MbedProgram, parse_url -from mbed_tools.project._internal.libraries import LibraryReferences -from mbed_tools.project._internal import git_utils - -logger = logging.getLogger(__name__) - - -def import_project(url: str, dst_path: Any = None, recursive: bool = False) -> pathlib.Path: - """Clones an Mbed project from a remote repository. - - Args: - url: URL of the repository to clone. - dst_path: Destination path for the repository. - recursive: Recursively clone all project dependencies. - - Returns: - The path the project was cloned to. - """ - git_data = parse_url(url) - url = git_data["url"] - if not dst_path: - dst_path = pathlib.Path(git_data["dst_path"]) - - git_utils.clone(url, dst_path) - if recursive: - libs = LibraryReferences(root=dst_path, ignore_paths=["mbed-os"]) - libs.fetch() - - return dst_path - - -def initialise_project(path: pathlib.Path, create_only: bool) -> None: - """Create a new Mbed project, optionally fetching and adding mbed-os. - - Args: - path: Path to the project folder. Created if it doesn't exist. - create_only: Flag which suppreses fetching mbed-os. If the value is `False`, fetch mbed-os from the remote. - """ - program = MbedProgram.from_new(path) - if not create_only: - libs = LibraryReferences(root=program.root, ignore_paths=["mbed-os"]) - libs.fetch() - - -def deploy_project(path: pathlib.Path, force: bool = False) -> None: - """Deploy a specific revision of the current Mbed project. - - This function also resolves and syncs all library dependencies to the revision specified in the library reference - files. - - Args: - path: Path to the Mbed project. - force: Force overwrite uncommitted changes. If False, the deploy will fail if there are uncommitted local - changes. - """ - libs = LibraryReferences(path, ignore_paths=["mbed-os"]) - libs.checkout(force=force) - if list(libs.iter_unresolved()): - logger.info("Unresolved libraries detected, downloading library source code.") - libs.fetch() - - -def get_known_libs(path: pathlib.Path) -> List: - """List all resolved library dependencies. - - This function will not resolve dependencies. This will only generate a list of resolved dependencies. - - Args: - path: Path to the Mbed project. - - Returns: - A list of known dependencies. - """ - libs = LibraryReferences(path, ignore_paths=["mbed-os"]) - return list(sorted(libs.iter_resolved())) diff --git a/tools/python/mbed_tools/py.typed b/tools/python/mbed_tools/py.typed new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tools/python/mbed_tools/sterm/terminal.py b/tools/python/mbed_tools/sterm/terminal.py index 85149d48e88..ec91c829e0c 100644 --- a/tools/python/mbed_tools/sterm/terminal.py +++ b/tools/python/mbed_tools/sterm/terminal.py @@ -2,7 +2,8 @@ # Copyright (c) 2020-2021 Arm Limited and Contributors. All rights reserved. # SPDX-License-Identifier: Apache-2.0 # -"""Serial terminal implementation based on pyserial.tools.miniterm. +""" +Serial terminal implementation based on pyserial.tools.miniterm. The Mbed serial terminal makes the following modifications to the default Miniterm. * Custom help menu text @@ -13,14 +14,18 @@ To start the terminal clients should call the "run" function, this is the entry point to the module. """ +import codecs +import typing from typing import Any from serial import Serial from serial.tools.miniterm import Miniterm +from typing_extensions import override def run(port: str, baud: int, echo: bool = True) -> None: - """Run the serial terminal. + """ + Run the serial terminal. This function is blocking as it waits for the terminal thread to finish executing before returning. @@ -29,7 +34,7 @@ def run(port: str, baud: int, echo: bool = True) -> None: baud: Serial baud rate. echo: Echo user input back to the console. """ - term = SerialTerminal(Serial(port=port, baudrate=str(baud)), echo=echo) + term = SerialTerminal(Serial(port=port, baudrate=baud), echo=echo) term.start() try: @@ -42,7 +47,8 @@ def run(port: str, baud: int, echo: bool = True) -> None: class SerialTerminal(Miniterm): - """An implementation of Miniterm that implements the additional Mbed terminal functionality. + """ + An implementation of Miniterm that implements the additional Mbed terminal functionality. Overrides the `writer` method to implement modified menu key handling behaviour. Overrides the Miniterm::get_help_text method to return the Mbed custom help text. @@ -52,21 +58,23 @@ class SerialTerminal(Miniterm): def __init__(self, *args: Any, **kwargs: Any) -> None: """Set the rx/tx encoding and special characters.""" super().__init__(*args, **kwargs) - self.exit_character = CTRL_C - self.menu_character = CTRL_T - self.reset_character = CTRL_B - self.help_character = CTRL_H + self.exit_character: str = CTRL_C + self.menu_character: str = CTRL_T + self.reset_character: str = CTRL_B + self.help_character: str = CTRL_H self.set_rx_encoding("UTF-8") self.set_tx_encoding("UTF-8") def reset(self) -> None: """Send a reset signal.""" - self.serial.sendBreak() + self.serial.send_break() + @override def get_help_text(self) -> str: """Return the text displayed when the user requests help.""" return HELP_TEXT + @override def writer(self) -> None: """Implements terminal behaviour.""" menu_active = False @@ -76,6 +84,9 @@ def writer(self) -> None: except KeyboardInterrupt: input_key = self.exit_character + if input_key is None: + continue + if (menu_active and input_key in VALID_MENU_KEYS) or (input_key == self.help_character): self.handle_menu_key(input_key) menu_active = False @@ -100,7 +111,8 @@ def _write_transformed_char(self, text: str) -> None: for transformation in self.tx_transformations: text = transformation.tx(text) - self.serial.write(self.tx_encoder.encode(text)) + encoder = typing.cast(codecs.IncrementalEncoder, self.tx_encoder) + _ = self.serial.write(encoder.encode(text)) def _echo_transformed_char(self, text: str) -> None: for transformation in self.tx_transformations: diff --git a/tools/python/mbed_tools/targets/__init__.py b/tools/python/mbed_tools/targets/__init__.py index 44ec67ecc98..c036d3d17f0 100644 --- a/tools/python/mbed_tools/targets/__init__.py +++ b/tools/python/mbed_tools/targets/__init__.py @@ -2,7 +2,8 @@ # Copyright (c) 2020-2021 Arm Limited and Contributors. All rights reserved. # SPDX-License-Identifier: Apache-2.0 # -"""An abstraction layer describing hardware supported by Mbed OS. +""" +An abstraction layer describing hardware supported by Mbed OS. Querying board database ----------------------- @@ -21,7 +22,10 @@ For details about configuration of this module, look at `mbed_tools.targets.config`. """ -from mbed_tools.targets import exceptions -from mbed_tools.targets.get_target import get_target_by_name, get_target_by_board_type -from mbed_tools.targets.get_board import get_board_by_product_code, get_board_by_online_id, get_board_by_jlink_slug -from mbed_tools.targets.board import Board +from mbed_tools.targets import exceptions as exceptions +from mbed_tools.targets.board import Board as Board +from mbed_tools.targets.get_board import get_board_by_jlink_slug as get_board_by_jlink_slug +from mbed_tools.targets.get_board import get_board_by_online_id as get_board_by_online_id +from mbed_tools.targets.get_board import get_board_by_product_code as get_board_by_product_code +from mbed_tools.targets.get_target import get_target_by_board_type as get_target_by_board_type +from mbed_tools.targets.get_target import get_target_by_name as get_target_by_name diff --git a/tools/python/mbed_tools/targets/_internal/board_database.py b/tools/python/mbed_tools/targets/_internal/board_database.py index d909a0a53c2..3be79f0e76b 100644 --- a/tools/python/mbed_tools/targets/_internal/board_database.py +++ b/tools/python/mbed_tools/targets/_internal/board_database.py @@ -4,19 +4,13 @@ # """Internal helper to retrieve target information from the online database.""" -import pathlib -from http import HTTPStatus import json -from json.decoder import JSONDecodeError import logging -from typing import List, Optional, Dict, Any - -import requests - -from mbed_tools.targets._internal.exceptions import ResponseJSONError, BoardAPIError - -from mbed_tools.targets.env import env +import pathlib +from json.decoder import JSONDecodeError +from typing import Any +from mbed_tools.targets._internal.exceptions import ResponseJSONError INTERNAL_PACKAGE_DIR = pathlib.Path(__file__).parent SNAPSHOT_FILENAME = "board_database_snapshot.json" @@ -33,7 +27,8 @@ def get_board_database_path() -> pathlib.Path: def get_offline_board_data() -> Any: - """Loads board data from JSON stored in offline snapshot. + """ + Loads board data from JSON stored in offline snapshot. Returns: The board database as retrieved from the local database snapshot. @@ -45,71 +40,5 @@ def get_offline_board_data() -> Any: try: return json.loads(boards_snapshot_path.read_text()) except JSONDecodeError as json_err: - raise ResponseJSONError(f"Invalid JSON received from '{boards_snapshot_path}'.") from json_err - - -def get_online_board_data() -> List[dict]: - """Retrieves board data from the online API. - - Returns: - The board database as retrieved from the boards API - - Raises: - ResponseJSONError: error decoding the response JSON. - BoardAPIError: error retrieving data from the board API. - """ - board_data: List[dict] = [{}] - response = _get_request() - if response.status_code != HTTPStatus.OK: - warning_msg = _response_error_code_to_str(response) - logger.warning(warning_msg) - logger.debug(f"Response received from API:\n{response.text}") - raise BoardAPIError(warning_msg) - - try: - json_data = response.json() - except JSONDecodeError as json_err: - warning_msg = f"Invalid JSON received from '{_BOARD_API}'." - logger.warning(warning_msg) - logger.debug(f"Response received from API:\n{response.text}") - raise ResponseJSONError(warning_msg) from json_err - - try: - board_data = json_data["data"] - except KeyError as key_err: - warning_msg = f"JSON received from '{_BOARD_API}' is missing the 'data' field." - logger.warning(warning_msg) - keys_found = ", ".join(json_data.keys()) - logger.debug(f"Fields found in JSON Response: {keys_found}") - raise ResponseJSONError(warning_msg) from key_err - - return board_data - - -def _response_error_code_to_str(response: requests.Response) -> str: - if response.status_code == HTTPStatus.UNAUTHORIZED: - return ( - f"Authentication failed for '{_BOARD_API}'. Please check that the environment variable " - f"'MBED_API_AUTH_TOKEN' is correctly configured with a private access token." - ) - else: - return f"An HTTP {response.status_code} was received from '{_BOARD_API}'." - - -def _get_request() -> requests.Response: - """Make a GET request to the API, ensuring the correct headers are set.""" - header: Optional[Dict[str, str]] = None - mbed_api_auth_token = env.MBED_API_AUTH_TOKEN - if mbed_api_auth_token: - header = {"Authorization": f"Bearer {mbed_api_auth_token}"} - - try: - return requests.get(_BOARD_API, headers=header) - except requests.exceptions.ConnectionError as connection_error: - if isinstance(connection_error, requests.exceptions.SSLError): - logger.warning("Unable to verify an SSL certificate with requests.") - elif isinstance(connection_error, requests.exceptions.ProxyError): - logger.warning("Failed to connect to proxy. Please check your proxy configuration.") - - logger.warning("Unable to connect to the online database. Please check your internet connection.") - raise BoardAPIError("Failed to connect to the online database.") from connection_error + msg = f"Invalid JSON received from '{boards_snapshot_path}'." + raise ResponseJSONError(msg) from json_err diff --git a/tools/python/mbed_tools/targets/_internal/data/board_database_snapshot.json b/tools/python/mbed_tools/targets/_internal/data/board_database_snapshot.json index 05d4b409d4e..539482e3df2 100644 --- a/tools/python/mbed_tools/targets/_internal/data/board_database_snapshot.json +++ b/tools/python/mbed_tools/targets/_internal/data/board_database_snapshot.json @@ -1035,6 +1035,15 @@ ], "mbed_enabled": [] }, + { + "board_type": "CY8CPROTO_062S3_4343W", + "board_name": "PSoC 62S3 Wi-Fi BT Prototyping Kit", + "product_code": "190E", + "target_type": "platform", + "slug": "CY8CPROTO-062S3-4343W", + "build_variant": [], + "mbed_enabled": [] + }, { "board_type": "RHOMBIO_L476DMW1K", "board_name": "RHOMBIO L476DMW1K", diff --git a/tools/python/mbed_tools/targets/_internal/target_attributes.py b/tools/python/mbed_tools/targets/_internal/target_attributes.py index 6cefdc5a067..7139f82affb 100644 --- a/tools/python/mbed_tools/targets/_internal/target_attributes.py +++ b/tools/python/mbed_tools/targets/_internal/target_attributes.py @@ -2,7 +2,8 @@ # Copyright (c) 2020-2021 Arm Limited and Contributors. All rights reserved. # SPDX-License-Identifier: Apache-2.0 # -"""Internal helper to retrieve target attribute information. +""" +Internal helper to retrieve target attribute information. This information is parsed from the targets.json configuration file found in the mbed-os repo. @@ -10,17 +11,16 @@ import logging import pathlib -from typing import Dict, Any, Set, Optional +from typing import Any, Dict, Optional, Set from mbed_tools.lib.exceptions import ToolsError from mbed_tools.lib.json_helpers import decode_json_file - from mbed_tools.targets._internal.targets_json_parsers.accumulating_attribute_parser import ( get_accumulating_attributes_for_target, ) from mbed_tools.targets._internal.targets_json_parsers.overriding_attribute_parser import ( - get_overriding_attributes_for_target, get_labels_for_target, + get_overriding_attributes_for_target, ) INTERNAL_PACKAGE_DIR = pathlib.Path(__file__).parent @@ -42,7 +42,8 @@ class TargetNotFoundError(TargetAttributesError): def get_target_attributes(targets_json_data: dict, target_name: str, allow_non_public_targets: bool = False) -> dict: - """Retrieves attribute data taken from targets.json for a single target. + """ + Retrieves attribute data taken from targets.json for a single target. Args: targets_json_data: target definitions from targets.json @@ -73,7 +74,8 @@ def get_target_attributes(targets_json_data: dict, target_name: str, allow_non_p def _extract_target_attributes( all_targets_data: Dict[str, Any], target_name: str, allow_non_public_targets: bool ) -> dict: - """Extracts the definition for a particular target from all the targets in targets.json. + """ + Extracts the definition for a particular target from all the targets in targets.json. Args: all_targets_data: a dictionary representation of the raw targets.json data. @@ -87,13 +89,13 @@ def _extract_target_attributes( TargetNotFoundError: no target definition found in targets.json. """ if target_name not in all_targets_data: - raise TargetNotFoundError(f"Target attributes for {target_name} not found.") + msg = f"Target attributes for {target_name} not found." + raise TargetNotFoundError(msg) # All target definitions are assumed to be public unless specifically set as public=false if not all_targets_data[target_name].get("public", True) and not allow_non_public_targets: - raise TargetNotFoundError( - f"Cannot get attributes for {target_name} because it is marked non-public in targets JSON. This likely means you set MBED_TARGET to the name of the MCU rather than the name of the board." - ) + msg = f"Cannot get attributes for {target_name} because it is marked non-public in targets JSON. This likely means you set MBED_TARGET to the name of the MCU rather than the name of the board." + raise TargetNotFoundError(msg) target_attributes = get_overriding_attributes_for_target(all_targets_data, target_name) accumulated_attributes = get_accumulating_attributes_for_target(all_targets_data, target_name) @@ -102,7 +104,8 @@ def _extract_target_attributes( def _extract_core_labels(target_core: Optional[str]) -> Set[str]: - """Find the labels associated with the target's core. + """ + Find the labels associated with the target's core. Args: target_core: the target core, set as a build attribute @@ -118,7 +121,8 @@ def _extract_core_labels(target_core: Optional[str]) -> Set[str]: def _apply_config_overrides(config: Dict[str, Any], overrides: Dict[str, Any]) -> Dict[str, Any]: - """Returns the config attribute with any overrides applied. + """ + Returns the config attribute with any overrides applied. Args: config: the cumulative config settings defined for a target @@ -131,11 +135,11 @@ def _apply_config_overrides(config: Dict[str, Any], overrides: Dict[str, Any]) - TargetsJsonConfigurationError: overrides can't be applied to config settings that aren't already defined """ config = config.copy() - for key in overrides: + for key, override in overrides.items(): try: - config[key]["value"] = overrides[key] + config[key]["value"] = override except KeyError: logger.warning( - f"Cannot apply override {key}={overrides[key]}, there is no config setting defined matching that name." + f"Cannot apply override {key}={override}, there is no config setting defined matching that name." ) return config diff --git a/tools/python/mbed_tools/targets/_internal/targets_json_parsers/__init__.py b/tools/python/mbed_tools/targets/_internal/targets_json_parsers/__init__.py index b563e97ac37..5a9a2e7f013 100644 --- a/tools/python/mbed_tools/targets/_internal/targets_json_parsers/__init__.py +++ b/tools/python/mbed_tools/targets/_internal/targets_json_parsers/__init__.py @@ -2,7 +2,8 @@ # Copyright (c) 2020-2021 Arm Limited and Contributors. All rights reserved. # SPDX-License-Identifier: Apache-2.0 # -"""Package for parsing Mbed OS library's targets.json. +""" +Package for parsing Mbed OS library's targets.json. targets.json contains the attribute data for all supported targets. diff --git a/tools/python/mbed_tools/targets/_internal/targets_json_parsers/accumulating_attribute_parser.py b/tools/python/mbed_tools/targets/_internal/targets_json_parsers/accumulating_attribute_parser.py index eadaa66a135..f134d8546b5 100644 --- a/tools/python/mbed_tools/targets/_internal/targets_json_parsers/accumulating_attribute_parser.py +++ b/tools/python/mbed_tools/targets/_internal/targets_json_parsers/accumulating_attribute_parser.py @@ -2,7 +2,8 @@ # Copyright (c) 2020-2021 Arm Limited and Contributors. All rights reserved. # SPDX-License-Identifier: Apache-2.0 # -"""Functions for parsing the inheritance for accumulating attributes. +""" +Functions for parsing the inheritance for accumulating attributes. Accumulating attributes are both defined and can be added to and removed from further down the hierarchy. The hierarchy is also slightly different to the other fields as it is determined as 'breadth-first' in @@ -11,7 +12,7 @@ import itertools from collections import deque -from typing import Dict, List, Any, Deque +from typing import Any, Deque, Dict, List ACCUMULATING_ATTRIBUTES = ("extra_labels", "macros", "device_has", "features", "components") MODIFIERS = ("add", "remove") @@ -21,7 +22,8 @@ def get_accumulating_attributes_for_target(all_targets_data: Dict[str, Any], target_name: str) -> Dict[str, Any]: - """Parses the data for all targets and returns the accumulating attributes for the specified target. + """ + Parses the data for all targets and returns the accumulating attributes for the specified target. Args: all_targets_data: a dictionary representation of the contents of targets.json @@ -35,7 +37,8 @@ def get_accumulating_attributes_for_target(all_targets_data: Dict[str, Any], tar def _targets_accumulate_hierarchy(all_targets_data: Dict[str, Any], target_name: str) -> List[dict]: - """List all ancestors of a target in order of accumulation inheritance (breadth-first). + """ + List all ancestors of a target in order of accumulation inheritance (breadth-first). Using a breadth-first traverse of the inheritance tree, return a list of targets in the order of inheritance, starting with the target itself and finishing with its highest ancestor. @@ -72,26 +75,22 @@ def _targets_accumulate_hierarchy(all_targets_data: Dict[str, Any], target_name: return targets_in_order -def _add_attribute_element( - accumulator: Dict[str, Any], attribute_name: str, elements_to_add: List[Any] -) -> Dict[str, Any]: - """Adds an attribute element to an attribute. +def _add_attribute_element(accumulator: Dict[str, Any], attribute_name: str, elements_to_add: List[Any]) -> None: + """ + Adds an attribute element to an attribute. Args: accumulator: a store of attributes to be updated attribute_name: name of the attribute to update elements_to_add: element to add to the attribute list - - Returns: - The accumulator object with the new elements added """ for element in elements_to_add: accumulator[attribute_name].append(element) - return accumulator def _element_matches(element_to_remove: str, element_to_check: str) -> bool: - """Checks if an element meets the criteria to be removed from list. + """ + Checks if an element meets the criteria to be removed from list. Some attribute elements (eg. macros) can be defined with a number value eg. MACRO_SOMETHING=5. If we are then instructed to remove @@ -108,18 +107,14 @@ def _element_matches(element_to_remove: str, element_to_check: str) -> bool: return element_to_check == element_to_remove or element_to_check.startswith(f"{element_to_remove}=") -def _remove_attribute_element( - accumulator: Dict[str, Any], attribute_name: str, elements_to_remove: List[Any] -) -> Dict[str, Any]: - """Removes an attribute element from an attribute. +def _remove_attribute_element(accumulator: Dict[str, Any], attribute_name: str, elements_to_remove: List[Any]) -> None: + """ + Removes an attribute element from an attribute. Args: accumulator: a store of attributes to be updated attribute_name: name of the attribute to update elements_to_remove: element to remove from the attribute list - - Returns: - The accumulator object with the desired elements removed """ existing_elements = accumulator[attribute_name] combinations_to_check = itertools.product(existing_elements, elements_to_remove) @@ -131,13 +126,13 @@ def _remove_attribute_element( for element in checked_elements_to_remove: accumulator[attribute_name].remove(element) - return accumulator def _calculate_attribute_elements( attribute_name: str, starting_state: Dict[str, Any], applicable_accumulation_order: List[dict] ) -> Dict[str, Any]: - """Adds and removes elements for an attribute based on the definitions encountered in the hierarchy. + """ + Adds and removes elements for an attribute based on the definitions encountered in the hierarchy. This is done via modifying attributes eg. "extra_labels" can be modified by "extra_labels_add" and "extra_labels_remove". @@ -168,7 +163,8 @@ def _calculate_attribute_elements( def _calculate_attribute_for_target( attribute_name: str, target: Dict[str, Any], targets_in_order: List[dict] ) -> Dict[str, Any]: - """Finds a single accumulated attribute for a target from its list of ancestors. + """ + Finds a single accumulated attribute for a target from its list of ancestors. Args: attribute_name: the name of the accumulating attribute @@ -185,7 +181,8 @@ def _calculate_attribute_for_target( def _find_nearest_defined_attribute(targets_in_order: List[dict], attribute_name: str) -> Dict[str, Any]: - """Returns the definition of a particular attribute first encountered in the accumulation order. + """ + Returns the definition of a particular attribute first encountered in the accumulation order. Args: targets_in_order: the inheritance order for the target, from the target itself to its highest ancestor @@ -201,7 +198,8 @@ def _find_nearest_defined_attribute(targets_in_order: List[dict], attribute_name def _determine_accumulated_attributes(targets_in_order: List[dict]) -> Dict[str, Any]: - """Finds all the accumulated attributes for a target from its list of ancestors. + """ + Finds all the accumulated attributes for a target from its list of ancestors. Iterates through the order of inheritance (accumulation order) to find the nearest definition of an attribute, then retraces backwards calculating additions and deletions that modify it. diff --git a/tools/python/mbed_tools/targets/_internal/targets_json_parsers/overriding_attribute_parser.py b/tools/python/mbed_tools/targets/_internal/targets_json_parsers/overriding_attribute_parser.py index 214ba3e01bb..22dafe78583 100644 --- a/tools/python/mbed_tools/targets/_internal/targets_json_parsers/overriding_attribute_parser.py +++ b/tools/python/mbed_tools/targets/_internal/targets_json_parsers/overriding_attribute_parser.py @@ -2,7 +2,8 @@ # Copyright (c) 2020-2021 Arm Limited and Contributors. All rights reserved. # SPDX-License-Identifier: Apache-2.0 # -"""Functions for parsing the inheritance for overriding attributes. +""" +Functions for parsing the inheritance for overriding attributes. Overriding attributes are defined and can be overridden further down the hierarchy. @@ -21,16 +22,17 @@ from collections import deque from functools import reduce -from typing import Dict, List, Any, Deque, Set +from typing import Any, Deque, Dict, List, Set from mbed_tools.targets._internal.targets_json_parsers.accumulating_attribute_parser import ALL_ACCUMULATING_ATTRIBUTES MERGING_ATTRIBUTES = ("config", "overrides", "memory_banks", "memory_overrides") -NON_OVERRIDING_ATTRIBUTES = ALL_ACCUMULATING_ATTRIBUTES + ("public", "inherits") +NON_OVERRIDING_ATTRIBUTES = (*ALL_ACCUMULATING_ATTRIBUTES, "public", "inherits") def get_overriding_attributes_for_target(all_targets_data: Dict[str, Any], target_name: str) -> Dict[str, Any]: - """Parses the data for all targets and returns the overriding attributes for the specified target. + """ + Parses the data for all targets and returns the overriding attributes for the specified target. Args: all_targets_data: a dictionary representation of the contents of targets.json @@ -44,7 +46,8 @@ def get_overriding_attributes_for_target(all_targets_data: Dict[str, Any], targe def get_labels_for_target(all_targets_data: Dict[str, Any], target_name: str) -> Set[str]: - """The labels for a target are the names of all the boards (public and private) that the board inherits from. + """ + The labels for a target are the names of all the boards (public and private) that the board inherits from. The order of these labels are not reflective of inheritance order. @@ -60,7 +63,8 @@ def get_labels_for_target(all_targets_data: Dict[str, Any], target_name: str) -> def _targets_override_hierarchy(all_targets_data: Dict[str, Any], target_name: str) -> List[dict]: - """List all ancestors of a target in order of overriding inheritance (depth-first). + """ + List all ancestors of a target in order of overriding inheritance (depth-first). Using a depth-first traverse of the inheritance tree, return a list of targets in the order of inheritance, starting with the target itself and finishing with its highest ancestor. @@ -98,7 +102,8 @@ def _targets_override_hierarchy(all_targets_data: Dict[str, Any], target_name: s def _determine_overridden_attributes(targets_in_order: List[dict]) -> Dict[str, Any]: - """Finds all the overrideable attributes for a target from its list of ancestors. + """ + Finds all the overrideable attributes for a target from its list of ancestors. Combines the attributes from all the targets in the hierarchy. Starts from the highest ancestor reduces down to the target itself, overriding if they define the same attribute. @@ -120,8 +125,7 @@ def _determine_overridden_attributes(targets_in_order: List[dict]) -> Dict[str, merged_attribute_elements = _reduce_right_list_of_dictionaries(list(override_order_for_single_attribute)) if merged_attribute_elements: target_attributes[merging_attribute] = merged_attribute_elements - overridden_attributes = _remove_unwanted_attributes(target_attributes) - return overridden_attributes + return _remove_unwanted_attributes(target_attributes) def _reduce_right_list_of_dictionaries(list_of_dicts: List[dict]) -> Dict[str, Any]: @@ -130,7 +134,8 @@ def _reduce_right_list_of_dictionaries(list_of_dicts: List[dict]) -> Dict[str, A def _remove_unwanted_attributes(target_attributes: Dict[str, Any]) -> Dict[str, Any]: - """Removes all non-overriding attributes. + """ + Removes all non-overriding attributes. Defined in NON_OVERRIDING_ATTRIBUTES. Accumulating arguments are inherited in a different way that is handled by its own parser. @@ -150,7 +155,8 @@ def _remove_unwanted_attributes(target_attributes: Dict[str, Any]) -> Dict[str, def _extract_target_labels(targets_in_order: List[dict], target_name: str) -> Set[str]: - """Collect a set of all the board names from the inherits field in each target in the hierarchy. + """ + Collect a set of all the board names from the inherits field in each target in the hierarchy. Args: targets_in_order: list of targets in order of inheritance, starting with the target up to its highest ancestor diff --git a/tools/python/mbed_tools/targets/board.py b/tools/python/mbed_tools/targets/board.py index a4b8539e772..39b325d8ede 100644 --- a/tools/python/mbed_tools/targets/board.py +++ b/tools/python/mbed_tools/targets/board.py @@ -10,7 +10,8 @@ @dataclass(frozen=True, order=True) class Board: - """Representation of an Mbed-Enabled Development Board. + """ + Representation of an Mbed-Enabled Development Board. Attributes: board_type: Type of board in format that allows cross-referencing with target definitions. @@ -37,7 +38,8 @@ class Board: @classmethod def from_online_board_entry(cls, board_entry: dict) -> "Board": - """Create a new instance of Board from an online database entry. + """ + Create a new instance of Board from an online database entry. Args: board_entry: A single entity retrieved from the board database API. @@ -60,7 +62,7 @@ def from_online_board_entry(cls, board_entry: dict) -> "Board": # This is simply so we can demo the tools to PE. This must be removed and replaced with a proper mechanism # for determining the build variant for all platforms. We probably need to add this information to the # boards database. - build_variant=tuple(), + build_variant=(), ) @classmethod diff --git a/tools/python/mbed_tools/targets/boards.py b/tools/python/mbed_tools/targets/boards.py index 3440af00258..2b5e75b95c9 100644 --- a/tools/python/mbed_tools/targets/boards.py +++ b/tools/python/mbed_tools/targets/boards.py @@ -4,62 +4,61 @@ # """Interface to the Board Database.""" -import json +from __future__ import annotations +import json +from collections.abc import Set as AbstractSet from dataclasses import asdict -from collections.abc import Set -from typing import Iterator, Iterable, Any, Callable +from typing import Callable, Iterable, Iterator, Sequence -from mbed_tools.targets._internal import board_database +from typing_extensions import override -from mbed_tools.targets.exceptions import UnknownBoard +from mbed_tools.targets._internal import board_database from mbed_tools.targets.board import Board +from mbed_tools.targets.exceptions import UnknownBoardError -class Boards(Set): - """Interface to the Board Database. +class Boards(AbstractSet): + """ + Interface to the Board Database. Boards is initialised with an Iterable[Board]. The classmethods can be used to construct Boards with data from either the online or offline database. """ @classmethod - def from_offline_database(cls) -> "Boards": - """Initialise with the offline board database. + def from_offline_database(cls) -> Boards: + """ + Initialise with the offline board database. Raises: BoardDatabaseError: Could not retrieve data from the board database. """ return cls(Board.from_offline_board_entry(b) for b in board_database.get_offline_board_data()) - @classmethod - def from_online_database(cls) -> "Boards": - """Initialise with the online board database. - - Raises: - BoardDatabaseError: Could not retrieve data from the board database. + def __init__(self, boards_data: Iterable[Board]) -> None: """ - return cls(Board.from_online_board_entry(b) for b in board_database.get_online_board_data()) - - def __init__(self, boards_data: Iterable["Board"]) -> None: - """Initialise with a list of boards. + Initialise with a list of boards. Args: boards_data: iterable of board data from a board database source. """ - self._boards_data = tuple(boards_data) + self._boards_data: Sequence[Board] = tuple(boards_data) - def __iter__(self) -> Iterator["Board"]: + @override + def __iter__(self) -> Iterator[Board]: """Yield an Board on each iteration.""" - for board in self._boards_data: - yield board + yield from self._boards_data + @override def __len__(self) -> int: """Return the number of boards.""" return len(self._boards_data) - def __contains__(self, board: object) -> Any: - """Check if a board is in the collection of boards. + @override + def __contains__(self, board: object) -> bool: + """ + Check if a board is in the collection of boards. Args: board: An instance of Board. @@ -70,7 +69,8 @@ def __contains__(self, board: object) -> Any: return any(x == board for x in self) def get_board(self, matching: Callable) -> Board: - """Returns first Board for which `matching` returns True. + """ + Returns first Board for which `matching` returns True. Args: matching: A function which will be called for each board in database @@ -81,7 +81,7 @@ def get_board(self, matching: Callable) -> Board: try: return next(board for board in self if matching(board)) except StopIteration: - raise UnknownBoard() + raise UnknownBoardError from None def json_dump(self) -> str: """Return the contents of the board database as a json string.""" diff --git a/tools/python/mbed_tools/targets/env.py b/tools/python/mbed_tools/targets/env.py index 205b443f6f2..47dd6e5c6b9 100644 --- a/tools/python/mbed_tools/targets/env.py +++ b/tools/python/mbed_tools/targets/env.py @@ -2,7 +2,8 @@ # Copyright (c) 2020-2021 Arm Limited and Contributors. All rights reserved. # SPDX-License-Identifier: Apache-2.0 # -"""Environment options for `mbed-targets`. +""" +Environment options for `mbed-targets`. All the env configuration options can be set either via environment variables or using a `.env` file containing the variable definitions as follows: @@ -23,18 +24,20 @@ import dotenv -dotenv.load_dotenv(dotenv.find_dotenv(usecwd=True)) +_ = dotenv.load_dotenv(dotenv.find_dotenv(usecwd=True)) class Env: - """Provides access to environment variables. + """ + Provides access to environment variables. Ensures variables are reloaded when environment changes during runtime. """ @property - def MBED_API_AUTH_TOKEN(self) -> str: - """Token to use when accessing online API. + def MBED_API_AUTH_TOKEN(self) -> str: # noqa: N802 + """ + Token to use when accessing online API. Mbed Targets uses the online mbed board database at os.mbed.com as its data source. A snapshot of the board database is shipped with the package, for faster lookup of known @@ -48,25 +51,6 @@ def MBED_API_AUTH_TOKEN(self) -> str: """ return os.getenv("MBED_API_AUTH_TOKEN", "") - @property - def MBED_DATABASE_MODE(self) -> str: - """Database mode to use when retrieving board data. - - Mbed Targets supports an online and offline mode, which controls where to look up the board database. - - The board lookup can be from either the online or offline database, depending - on the value of an environment variable called `MBED_DATABASE_MODE`. - - The mode can be set to one of the following: - - - `AUTO`: the offline database is searched first, if the board isn't found the online database is searched. - - `ONLINE`: the online database is always used. - - `OFFLINE`: the offline database is always used. - - If `MBED_DATABASE_MODE` is not set, it defaults to `AUTO`. - """ - return os.getenv("MBED_DATABASE_MODE", "AUTO") - env = Env() """Instance of `Env` class.""" diff --git a/tools/python/mbed_tools/targets/exceptions.py b/tools/python/mbed_tools/targets/exceptions.py index 5e0e0569862..e6688b063d7 100644 --- a/tools/python/mbed_tools/targets/exceptions.py +++ b/tools/python/mbed_tools/targets/exceptions.py @@ -15,11 +15,11 @@ class TargetError(ToolsError): """Target definition cannot be retrieved.""" -class UnknownBoard(MbedTargetsError): +class UnknownBoardError(MbedTargetsError): """Requested board was not found.""" -class UnsupportedMode(MbedTargetsError): +class UnsupportedModeError(MbedTargetsError): """The Database Mode is unsupported.""" diff --git a/tools/python/mbed_tools/targets/get_board.py b/tools/python/mbed_tools/targets/get_board.py index 53df7719f94..51299b5a1d5 100644 --- a/tools/python/mbed_tools/targets/get_board.py +++ b/tools/python/mbed_tools/targets/get_board.py @@ -2,26 +2,24 @@ # Copyright (c) 2020-2021 Arm Limited and Contributors. All rights reserved. # SPDX-License-Identifier: Apache-2.0 # -"""Interface for accessing Mbed-Enabled Development Board data. +""" +Interface for accessing Mbed-Enabled Development Board data. An instance of `mbed_tools.targets.board.Board` can be retrieved by calling one of the public functions. """ import logging -from enum import Enum from typing import Callable -from mbed_tools.targets.env import env -from mbed_tools.targets.exceptions import UnknownBoard, UnsupportedMode, BoardDatabaseError from mbed_tools.targets.board import Board from mbed_tools.targets.boards import Boards - logger = logging.getLogger(__name__) def get_board_by_product_code(product_code: str) -> Board: - """Returns first `mbed_tools.targets.board.Board` matching given product code. + """ + Returns first `mbed_tools.targets.board.Board` matching given product code. Args: product_code: the product code to look up in the database. @@ -33,7 +31,8 @@ def get_board_by_product_code(product_code: str) -> Board: def get_board_by_online_id(slug: str, target_type: str) -> Board: - """Returns first `mbed_tools.targets.board.Board` matching given online id. + """ + Returns first `mbed_tools.targets.board.Board` matching given online id. Args: slug: The slug to look up in the database. @@ -47,7 +46,8 @@ def get_board_by_online_id(slug: str, target_type: str) -> Board: def get_board_by_jlink_slug(slug: str) -> Board: - """Returns first `mbed-tools.targets.board.Board` matching given slug. + """ + Returns first `mbed-tools.targets.board.Board` matching given slug. With J-Link, the slug is extracted from a board manufacturer URL, and may not match the Mbed slug. The J-Link slug is compared against the slug, board_name and @@ -66,7 +66,8 @@ def get_board_by_jlink_slug(slug: str) -> Board: def get_board(matching: Callable) -> Board: - """Returns first `mbed_tools.targets.board.Board` for which `matching` is True. + """ + Returns first `mbed_tools.targets.board.Board` for which `matching` is True. Uses database mode configured in the environment. @@ -76,38 +77,4 @@ def get_board(matching: Callable) -> Board: Raises: UnknownBoard: a board matching the criteria could not be found in the board database. """ - database_mode = _get_database_mode() - - if database_mode == _DatabaseMode.OFFLINE: - logger.info("Using the offline database (only) to identify boards.") - return Boards.from_offline_database().get_board(matching) - - if database_mode == _DatabaseMode.ONLINE: - logger.info("Using the online database (only) to identify boards.") - return Boards.from_online_database().get_board(matching) - try: - logger.info("Using the offline database to identify boards.") - return Boards.from_offline_database().get_board(matching) - except UnknownBoard: - logger.info("Unable to identify a board using the offline database, trying the online database.") - try: - return Boards.from_online_database().get_board(matching) - except BoardDatabaseError: - logger.error("Unable to access the online database to identify a board.") - raise UnknownBoard() - - -class _DatabaseMode(Enum): - """Selected database mode.""" - - OFFLINE = 0 - ONLINE = 1 - AUTO = 2 - - -def _get_database_mode() -> _DatabaseMode: - database_mode = env.MBED_DATABASE_MODE - try: - return _DatabaseMode[database_mode] - except KeyError: - raise UnsupportedMode(f"{database_mode} is not a supported database mode.") + return Boards.from_offline_database().get_board(matching) diff --git a/tools/python/mbed_tools/targets/get_target.py b/tools/python/mbed_tools/targets/get_target.py index 9df9182b3ea..14aa6472482 100644 --- a/tools/python/mbed_tools/targets/get_target.py +++ b/tools/python/mbed_tools/targets/get_target.py @@ -2,18 +2,20 @@ # Copyright (c) 2020-2021 Arm Limited and Contributors. All rights reserved. # SPDX-License-Identifier: Apache-2.0 # -"""Interface for accessing Targets from Mbed OS's targets.json. +""" +Interface for accessing Targets from Mbed OS's targets.json. An instance of `mbed_tools.targets.target.Target` can be retrieved by calling one of the public functions. """ -from mbed_tools.targets.exceptions import TargetError from mbed_tools.targets._internal import target_attributes +from mbed_tools.targets.exceptions import TargetError def get_target_by_name(name: str, targets_json_data: dict) -> dict: - """Returns a dictionary of attributes for the target whose name matches the name given. + """ + Returns a dictionary of attributes for the target whose name matches the name given. The target is as defined in the targets.json file found in the Mbed OS library. @@ -31,7 +33,8 @@ def get_target_by_name(name: str, targets_json_data: dict) -> dict: def get_target_by_board_type(board_type: str, targets_json_data: dict) -> dict: - """Returns the target whose name matches a board's build_type. + """ + Returns the target whose name matches a board's build_type. The target is as defined in the targets.json file found in the Mbed OS library. diff --git a/tools/python_tests/mbed_tools/cli/test_devices_command_integration.py b/tools/python_tests/mbed_tools/cli/test_devices_command_integration.py index b6d1351afa6..11215850d3e 100644 --- a/tools/python_tests/mbed_tools/cli/test_devices_command_integration.py +++ b/tools/python_tests/mbed_tools/cli/test_devices_command_integration.py @@ -31,11 +31,3 @@ def callback(): self.assertEqual(1, result.exit_code) logger_error.assert_called_once() - - -class TestVersionCommand(TestCase): - def test_version_command(self): - runner = CliRunner() - result = runner.invoke(main.cli, ["--version"]) - self.assertTrue(result.output) - self.assertEqual(0, result.exit_code) diff --git a/tools/python_tests/mbed_tools/cli/test_project_management.py b/tools/python_tests/mbed_tools/cli/test_project_management.py deleted file mode 100644 index 48708e97e2b..00000000000 --- a/tools/python_tests/mbed_tools/cli/test_project_management.py +++ /dev/null @@ -1,113 +0,0 @@ -# -# Copyright (c) 2020-2021 Arm Limited and Contributors. All rights reserved. -# SPDX-License-Identifier: Apache-2.0 -# -import pathlib - -from textwrap import dedent -from unittest import mock - -import pytest - -from click.testing import CliRunner - -from mbed_tools.cli.project_management import new, import_, deploy - - -@pytest.fixture -def mock_initialise_project(): - with mock.patch("mbed_tools.cli.project_management.initialise_project") as init_proj: - yield init_proj - - -@pytest.fixture -def mock_import_project(): - with mock.patch("mbed_tools.cli.project_management.import_project") as import_proj: - yield import_proj - - -@pytest.fixture -def mock_deploy_project(): - with mock.patch("mbed_tools.cli.project_management.deploy_project") as deploy_proj: - yield deploy_proj - - -@pytest.fixture -def mock_get_libs(): - with mock.patch("mbed_tools.cli.project_management.get_known_libs") as get_libs: - yield get_libs - - -class TestNewCommand: - def test_calls_new_function_with_correct_args(self, mock_initialise_project): - CliRunner().invoke(new, ["path", "--create-only"]) - mock_initialise_project.assert_called_once_with(pathlib.Path("path").resolve(), True) - - def test_echos_mbed_os_message_when_required(self, mock_initialise_project): - expected = ( - "Creating a new Mbed program at path " - + "'" - + str(pathlib.Path("path").resolve()) - + "'" - + ".\nDownloading mbed-os and adding it to the project.\n" - ) - - result = CliRunner().invoke(new, ["path"]) - - assert result.output == expected - - -class TestImportCommand: - def test_calls_clone_function_with_correct_args(self, mock_import_project): - CliRunner().invoke(import_, ["url", "dst"]) - mock_import_project.assert_called_once_with("url", pathlib.Path("dst"), True) - - def test_prints_fetched_libs(self, mock_import_project, mock_get_libs): - mock_methods = {"get_git_reference.return_value": mock.Mock(ref="abcdef", repo_url="https://repo/url")} - mock_get_libs.return_value = [ - mock.Mock(reference_file=pathlib.Path("test"), source_code_path=pathlib.Path("source"), **mock_methods) - ] - expected = """ - Library Name Repository URL Path Git Reference - -------------- ---------------- ------ --------------- - test https://repo/url source abcdef - """ - - runner = CliRunner() - ret = runner.invoke(import_, ["url", "dst"]) - - assert dedent(expected) in ret.output - - def test_does_not_print_libs_table_when_skip_resolve_specified(self, mock_import_project, mock_get_libs): - expected = """ - Library Name Repository URL Path Git Reference - -------------- ---------------- ------ --------------- - """ - - runner = CliRunner() - ret = runner.invoke(import_, ["url", "dst", "-s"]) - - assert dedent(expected) not in ret.output - mock_get_libs.assert_not_called() - - -class TestDeployCommand: - def test_calls_deploy_function_with_correct_args(self, mock_deploy_project): - CliRunner().invoke(deploy, ["path", "--force"]) - mock_deploy_project.assert_called_once_with(pathlib.Path("path"), True) - - def test_prints_fetched_libs(self, mock_deploy_project, mock_get_libs): - mock_methods = {"get_git_reference.return_value": mock.Mock(ref="abcdef", repo_url="https://repo/url")} - mock_get_libs.return_value = [ - mock.Mock(reference_file=pathlib.Path("test"), source_code_path=pathlib.Path("source"), **mock_methods) - ] - expected = """ - Library Name Repository URL Path Git Reference - -------------- ---------------- ------ --------------- - test https://repo/url source abcdef - """ - - runner = CliRunner() - ret = runner.invoke(deploy, ["path", "--force"]) - - assert dedent(expected) in ret.output diff --git a/tools/python_tests/mbed_tools/devices/_internal/darwin/test_ioreg.py b/tools/python_tests/mbed_tools/devices/_internal/darwin/test_ioreg.py index 77277195df2..626e714ca2f 100644 --- a/tools/python_tests/mbed_tools/devices/_internal/darwin/test_ioreg.py +++ b/tools/python_tests/mbed_tools/devices/_internal/darwin/test_ioreg.py @@ -18,7 +18,7 @@ def test_returns_data_from_ioreg_call(self, check_output): """ self.assertEqual(get_data("some device"), ["foo"]) - check_output.assert_called_once_with(["ioreg", "-a", "-r", "-n", "some device", "-l"]) + check_output.assert_called_once_with(["/usr/sbin/ioreg", "-a", "-r", "-n", "some device", "-l"]) def test_handles_corrupt_data_gracefully(self, check_output): check_output.return_value = b""" diff --git a/tools/python_tests/mbed_tools/devices/_internal/darwin/test_system_profiler.py b/tools/python_tests/mbed_tools/devices/_internal/darwin/test_system_profiler.py index e42592875a9..8eba4af6292 100644 --- a/tools/python_tests/mbed_tools/devices/_internal/darwin/test_system_profiler.py +++ b/tools/python_tests/mbed_tools/devices/_internal/darwin/test_system_profiler.py @@ -2,6 +2,7 @@ # Copyright (c) 2020-2021 Arm Limited and Contributors. All rights reserved. # SPDX-License-Identifier: Apache-2.0 # + import plistlib from unittest import TestCase, mock diff --git a/tools/python_tests/mbed_tools/devices/_internal/test_resolve_board.py b/tools/python_tests/mbed_tools/devices/_internal/test_resolve_board.py index 05265c884ea..e60ed2be030 100644 --- a/tools/python_tests/mbed_tools/devices/_internal/test_resolve_board.py +++ b/tools/python_tests/mbed_tools/devices/_internal/test_resolve_board.py @@ -6,10 +6,10 @@ import pytest -from mbed_tools.targets.exceptions import UnknownBoard, MbedTargetsError +from mbed_tools.targets.exceptions import UnknownBoardError, MbedTargetsError from mbed_tools.devices._internal.file_parser import OnlineId, DeviceFileInfo -from mbed_tools.devices._internal.resolve_board import NoBoardForCandidate, resolve_board, ResolveBoardError +from mbed_tools.devices._internal.resolve_board import NoBoardForCandidateError, resolve_board, ResolveBoardError @pytest.fixture @@ -40,9 +40,9 @@ def test_returns_resolved_target(self, get_board_by_product_code_mock): get_board_by_product_code_mock.assert_called_once_with(dev_info.product_code) def test_raises_when_board_not_found(self, get_board_by_product_code_mock): - get_board_by_product_code_mock.side_effect = UnknownBoard + get_board_by_product_code_mock.side_effect = UnknownBoardError - with pytest.raises(NoBoardForCandidate): + with pytest.raises(NoBoardForCandidateError): resolve_board(product_code="0123") def test_raises_when_database_lookup_fails(self, get_board_by_product_code_mock, caplog): @@ -66,9 +66,9 @@ def test_returns_resolved_board(self, get_board_by_online_id_mock): get_board_by_online_id_mock.assert_called_once_with(target_type=online_id.target_type, slug=online_id.slug) def test_raises_when_board_not_found(self, get_board_by_online_id_mock): - get_board_by_online_id_mock.side_effect = UnknownBoard + get_board_by_online_id_mock.side_effect = UnknownBoardError - with pytest.raises(NoBoardForCandidate): + with pytest.raises(NoBoardForCandidateError): resolve_board(online_id=OnlineId(target_type="hat", slug="boat")) def test_raises_when_database_lookup_fails(self, get_board_by_online_id_mock, caplog): @@ -92,10 +92,10 @@ def test_returns_resolved_board(self, get_board_by_jlink_slug_mock): get_board_by_jlink_slug_mock.assert_called_once_with(online_id.slug) def test_raises_when_board_not_found(self, get_board_by_jlink_slug_mock): - get_board_by_jlink_slug_mock.side_effect = UnknownBoard + get_board_by_jlink_slug_mock.side_effect = UnknownBoardError online_id = OnlineId("jlink", "test-board") - with pytest.raises(NoBoardForCandidate): + with pytest.raises(NoBoardForCandidateError): resolve_board(online_id=online_id) def test_raises_when_database_lookup_fails(self, get_board_by_jlink_slug_mock, caplog): @@ -118,9 +118,9 @@ def test_resolves_board_using_product_code_when_available(self, get_board_by_pro get_board_by_product_code_mock.assert_called_once_with(serial_number[:4]) def test_raises_when_board_not_found(self, get_board_by_product_code_mock): - get_board_by_product_code_mock.side_effect = UnknownBoard + get_board_by_product_code_mock.side_effect = UnknownBoardError - with pytest.raises(NoBoardForCandidate): + with pytest.raises(NoBoardForCandidateError): resolve_board(serial_number="0") def test_raises_when_database_lookup_fails(self, get_board_by_product_code_mock, caplog): diff --git a/tools/python_tests/mbed_tools/devices/test_mbed_devices.py b/tools/python_tests/mbed_tools/devices/test_mbed_devices.py index 2d25fa9c351..4118f2a5620 100644 --- a/tools/python_tests/mbed_tools/devices/test_mbed_devices.py +++ b/tools/python_tests/mbed_tools/devices/test_mbed_devices.py @@ -13,10 +13,10 @@ from python_tests.mbed_tools.devices.factories import CandidateDeviceFactory from mbed_tools.devices.device import Device -from mbed_tools.devices._internal.exceptions import NoBoardForCandidate, ResolveBoardError +from mbed_tools.devices._internal.exceptions import NoBoardForCandidateError, ResolveBoardError from mbed_tools.devices.devices import get_connected_devices, find_connected_device, find_all_connected_devices -from mbed_tools.devices.exceptions import DeviceLookupFailed, NoDevicesFound +from mbed_tools.devices.exceptions import DeviceLookupFailedError, NoDevicesFoundError @mock.patch("mbed_tools.devices.devices.detect_candidate_devices") @@ -45,7 +45,7 @@ def test_builds_devices_from_candidates(self, read_device_files, resolve_board, @mock.patch.object(Board, "from_offline_board_entry") def test_skips_candidates_without_a_board(self, board, resolve_board, detect_candidate_devices): candidate = CandidateDeviceFactory() - resolve_board.side_effect = NoBoardForCandidate + resolve_board.side_effect = NoBoardForCandidateError detect_candidate_devices.return_value = [candidate] board.return_value = None @@ -66,7 +66,7 @@ def test_raises_when_resolve_board_fails(self, read_device_files, resolve_board, resolve_board.side_effect = ResolveBoardError detect_candidate_devices.return_value = [candidate] - with pytest.raises(DeviceLookupFailed, match="candidate"): + with pytest.raises(DeviceLookupFailedError, match="candidate"): get_connected_devices() @@ -113,7 +113,7 @@ def test_raises_when_multiple_matching_name_no_identifier(self, mock_find_connec ), ] - with pytest.raises(DeviceLookupFailed, match="Multiple"): + with pytest.raises(DeviceLookupFailedError, match="Multiple"): find_connected_device("K64F", None) def test_raises_when_identifier_out_of_bounds(self, mock_find_connected_devices): @@ -135,7 +135,7 @@ def test_raises_when_identifier_out_of_bounds(self, mock_find_connected_devices) ), ] - with pytest.raises(DeviceLookupFailed, match="valid"): + with pytest.raises(DeviceLookupFailedError, match="valid"): find_connected_device("K64F", 2) @@ -161,7 +161,7 @@ def test_finds_all_devices_with_matching_name(self, mock_get_connected_devices): def test_raises_when_no_mbed_enabled_devices_found(self, mock_get_connected_devices): mock_get_connected_devices.return_value = mock.Mock(identified_devices=[], spec=True) - with pytest.raises(NoDevicesFound): + with pytest.raises(NoDevicesFoundError): find_all_connected_devices("K64F") def test_raises_when_device_matching_target_name_not_found(self, mock_get_connected_devices): @@ -182,7 +182,7 @@ def test_raises_when_device_matching_target_name_not_found(self, mock_get_connec ) with pytest.raises( - DeviceLookupFailed, + DeviceLookupFailedError, match=( f".*(target: {re.escape(connected_target_name)}).*(port: {re.escape(connected_target_serial_port)}).*" f"(mount point.*: {re.escape(str(connected_target_mount_point))})" diff --git a/tools/python_tests/mbed_tools/project/_internal/test_git_utils.py b/tools/python_tests/mbed_tools/project/_internal/test_git_utils.py deleted file mode 100644 index c188d47aa55..00000000000 --- a/tools/python_tests/mbed_tools/project/_internal/test_git_utils.py +++ /dev/null @@ -1,151 +0,0 @@ -# -# Copyright (c) 2020-2021 Arm Limited and Contributors. All rights reserved. -# SPDX-License-Identifier: Apache-2.0 -# -from pathlib import Path -from unittest import mock - -import pytest - -from mbed_tools.project.exceptions import VersionControlError -from mbed_tools.project._internal import git_utils - - -@pytest.fixture -def mock_repo(): - with mock.patch("mbed_tools.project._internal.git_utils.git.Repo") as repo: - yield repo - - -@pytest.fixture -def mock_progress(): - with mock.patch("mbed_tools.project._internal.git_utils.ProgressReporter") as progress: - yield progress - - -class TestClone: - def test_returns_repo(self, mock_progress, mock_repo, tmp_path): - url = "https://blah" - path = Path(tmp_path, "dst") - repo = git_utils.clone(url, path) - - assert repo is not None - mock_repo.clone_from.assert_called_once_with(url=url, to_path=str(path), progress=mock_progress(), depth=1) - - def test_returns_repo_for_ref(self, mock_progress, mock_repo, tmp_path): - url = "https://example.com/org/repo.git" - ref = "development" - path = Path(tmp_path, "repo") - repo = git_utils.clone(url, path, ref) - - assert repo is not None - mock_repo.clone_from.assert_called_once_with( - url=url, to_path=str(path), progress=mock_progress(), depth=1, branch=ref - ) - - def test_raises_when_fails_due_to_bad_url(self, tmp_path): - with pytest.raises(VersionControlError, match="from url 'bad' failed"): - git_utils.clone("bad", Path(tmp_path, "dst")) - - def test_raises_when_fails_due_to_bad_url_with_ref(self, mock_progress, mock_repo, tmp_path): - url = "https://example.com/org/repo.git" - ref = "development" - path = Path(tmp_path, "repo") - - mock_repo.clone_from.side_effect = git_utils.git.exc.GitCommandError("git clone", 255) - - with pytest.raises(VersionControlError, match=f"Cloning git repository from url '{url}' failed."): - git_utils.clone(url, path, ref) - - def test_raises_when_fails_due_to_existing_nonempty_dst_dir(self, mock_repo, tmp_path): - dst_dir = Path(tmp_path, "dst") - dst_dir.mkdir() - (dst_dir / "some_file.txt").touch() - - with pytest.raises(VersionControlError, match="exists and is not an empty directory"): - git_utils.clone("https://blah", dst_dir) - - def test_can_clone_to_empty_existing_dst_dir(self, mock_repo, tmp_path, mock_progress): - dst_dir = Path(tmp_path, "dst") - dst_dir.mkdir() - url = "https://repo" - - repo = git_utils.clone(url, dst_dir) - - assert repo is not None - mock_repo.clone_from.assert_called_once_with(url=url, to_path=str(dst_dir), progress=mock_progress(), depth=1) - - -class TestInit: - def test_returns_initialised_repo(self, mock_repo): - repo = git_utils.init(Path()) - - assert repo is not None - mock_repo.init.assert_called_once_with(str(Path())) - - def test_raises_when_init_fails(self, mock_repo): - mock_repo.init.side_effect = git_utils.git.exc.GitCommandError("git init", 255) - - with pytest.raises(VersionControlError): - git_utils.init(Path()) - - -class TestGetRepo: - def test_returns_repo_object(self, mock_repo): - repo = git_utils.get_repo(Path()) - - assert isinstance(repo, mock_repo().__class__) - - def test_raises_version_control_error_when_no_git_repo_found(self, mock_repo): - mock_repo.side_effect = git_utils.git.exc.InvalidGitRepositoryError - - with pytest.raises(VersionControlError): - git_utils.get_repo(Path()) - - -class TestCheckout: - def test_git_lib_called_with_correct_command(self, mock_repo): - git_utils.checkout(mock_repo, "master") - - mock_repo.git.checkout.assert_called_once_with("master") - - def test_git_lib_called_with_correct_command_with_force(self, mock_repo): - git_utils.checkout(mock_repo, "master", force=True) - - mock_repo.git.checkout.assert_called_once_with("master", "--force") - - def test_raises_version_control_error_when_git_checkout_fails(self, mock_repo): - mock_repo.git.checkout.side_effect = git_utils.git.exc.GitCommandError("git checkout", 255) - - with pytest.raises(VersionControlError): - git_utils.checkout(mock_repo, "bad") - - -class TestFetch: - def test_does_a_fetch(self, mock_repo): - ref = "b23a8eb1c3f80292c8eb40689106759fae83a4c6" - git_utils.fetch(mock_repo, ref) - - mock_repo.git.fetch.assert_called_once_with("origin", ref) - - def test_raises_when_fetch_fails(self, mock_repo): - ref = "v2.7.9" - mock_repo.git.fetch.side_effect = git_utils.git.exc.GitCommandError("git fetch", 255) - - with pytest.raises(VersionControlError): - git_utils.fetch(mock_repo, ref) - - -class TestGetDefaultBranch: - def test_returns_default_branch_name(self, mock_repo): - mock_repo().git.symbolic_ref.return_value = "refs/remotes/origin/main" - - branch_name = git_utils.get_default_branch(mock_repo()) - - assert branch_name == "main" - - def test_raises_version_control_error_when_git_command_fails(self, mock_repo): - mock_repo().git.symbolic_ref.side_effect = git_utils.git.exc.GitCommandError("git symbolic_ref", 255) - - with pytest.raises(VersionControlError): - git_utils.get_default_branch(mock_repo()) diff --git a/tools/python_tests/mbed_tools/project/_internal/test_libraries.py b/tools/python_tests/mbed_tools/project/_internal/test_libraries.py deleted file mode 100644 index eade42b9cc3..00000000000 --- a/tools/python_tests/mbed_tools/project/_internal/test_libraries.py +++ /dev/null @@ -1,201 +0,0 @@ -# -# Copyright (c) 2020-2021 Arm Limited and Contributors. All rights reserved. -# SPDX-License-Identifier: Apache-2.0 -# -import pathlib - -import pytest - -from unittest import mock - -from mbed_tools.project._internal.libraries import MbedLibReference, LibraryReferences -from mbed_tools.project.exceptions import VersionControlError -from python_tests.mbed_tools.project.factories import make_mbed_lib_reference - - -@pytest.fixture -def mock_clone(): - with mock.patch("mbed_tools.project._internal.git_utils.clone") as clone: - yield clone - - -@pytest.fixture -def mock_checkout(): - with mock.patch("mbed_tools.project._internal.git_utils.checkout") as checkout: - yield checkout - - -@pytest.fixture -def mock_fetch(): - with mock.patch("mbed_tools.project._internal.git_utils.fetch") as fetch: - yield fetch - - -@pytest.fixture -def mock_get_repo(): - with mock.patch("mbed_tools.project._internal.git_utils.get_repo") as get_repo: - yield get_repo - - -@pytest.fixture -def mock_get_default_branch(): - with mock.patch("mbed_tools.project._internal.git_utils.get_default_branch") as get_default_branch: - yield get_default_branch - - -@pytest.fixture -def mock_repo(): - with mock.patch("mbed_tools.project._internal.git_utils.git.Repo") as repo: - yield repo - - -class TestLibraryReferences: - def test_hydrates_top_level_library_references(self, mock_clone, tmp_path): - fs_root = pathlib.Path(tmp_path, "foo") - lib = make_mbed_lib_reference(fs_root, ref_url="https://git") - mock_clone.side_effect = lambda url, dst_dir, *args: dst_dir.mkdir() - - lib_refs = LibraryReferences(fs_root, ignore_paths=["mbed-os"]) - lib_refs.fetch() - - mock_clone.assert_called_once_with(lib.get_git_reference().repo_url, lib.source_code_path) - assert lib.is_resolved() - - def test_hydrates_recursive_dependencies(self, mock_clone, tmp_path): - fs_root = pathlib.Path(tmp_path, "foo") - lib = make_mbed_lib_reference(fs_root, ref_url="https://git") - # Create a lib reference without touching the fs at this point, we want to mock the effects of a recursive - # reference lookup and we need to assert the reference was resolved. - lib2 = MbedLibReference( - reference_file=(lib.source_code_path / "lib2.lib"), source_code_path=(lib.source_code_path / "lib2") - ) - # Here we mock the effects of a recursive reference lookup. We create a new lib reference as a side effect of - # the first call to the mock. Then we create the src dir, thus resolving the lib, on the second call. - mock_clone.side_effect = lambda url, dst_dir, *args: ( - make_mbed_lib_reference(pathlib.Path(dst_dir), name=lib2.reference_file.name, ref_url="https://valid2"), - lib2.source_code_path.mkdir(), - ) - - lib_refs = LibraryReferences(fs_root, ignore_paths=["mbed-os"]) - lib_refs.fetch() - - assert lib.is_resolved() - assert lib2.is_resolved() - - def test_does_perform_checkout_of_default_repo_branch_if_no_git_ref_exists( - self, mock_get_repo, mock_checkout, mock_fetch, mock_get_default_branch, mock_clone, tmp_path - ): - fs_root = pathlib.Path(tmp_path, "foo") - make_mbed_lib_reference(fs_root, ref_url="https://git", resolved=True) - - lib_refs = LibraryReferences(fs_root, ignore_paths=["mbed-os"]) - lib_refs.checkout(force=False) - - mock_fetch.assert_called_once_with(mock_get_repo(), mock_get_default_branch()) - mock_checkout.assert_called_once_with(mock_get_repo(), "FETCH_HEAD", force=False) - - def test_performs_checkout_if_git_ref_exists(self, mock_get_repo, mock_checkout, mock_fetch, mock_clone, tmp_path): - fs_root = pathlib.Path(tmp_path, "foo") - lib = make_mbed_lib_reference(fs_root, ref_url="https://git#lajdhalk234", resolved=True) - - lib_refs = LibraryReferences(fs_root, ignore_paths=["mbed-os"]) - lib_refs.checkout(force=False) - - mock_fetch.assert_called_once_with(mock_get_repo(), lib.get_git_reference().ref) - mock_checkout.assert_called_once_with(mock_get_repo.return_value, "FETCH_HEAD", force=False) - - def test_fetch_does_not_perform_checkout_if_no_git_ref_exists( - self, mock_get_repo, mock_checkout, mock_fetch, mock_clone, tmp_path - ): - fs_root = pathlib.Path(tmp_path, "foo") - make_mbed_lib_reference(fs_root, ref_url="https://git") - mock_clone.side_effect = lambda url, dst_dir, *args: dst_dir.mkdir() - - lib_refs = LibraryReferences(fs_root, ignore_paths=["mbed-os"]) - lib_refs.fetch() - - mock_fetch.assert_not_called() - mock_checkout.assert_not_called() - - def test_fetch_performs_checkout_if_ref_is_hash( - self, mock_get_repo, mock_clone, mock_fetch, mock_checkout, tmp_path - ): - num_times_called = 0 - - def clone_side_effect(url, dst_dir, *args): - nonlocal num_times_called - if num_times_called == 0: - num_times_called += 1 - raise VersionControlError("Failed to clone") - elif num_times_called == 1: - num_times_called += 1 - dst_dir.mkdir() - else: - assert False - - fs_root = pathlib.Path(tmp_path, "foo") - lib = make_mbed_lib_reference(fs_root, ref_url="https://git#398bc1a63370") - mock_clone.side_effect = clone_side_effect - - lib_refs = LibraryReferences(fs_root, ignore_paths=["mbed-os"]) - lib_refs.fetch() - - mock_clone.assert_called_with(lib.get_git_reference().repo_url, lib.source_code_path) - mock_fetch.assert_called_once_with(None, lib.get_git_reference().ref) - mock_checkout.assert_called_once_with(None, "FETCH_HEAD") - - def test_raises_when_no_such_ref(self, mock_repo, mock_clone, mock_fetch, mock_checkout, tmp_path): - num_times_called = 0 - - def clone_side_effect(url, dst_dir, *args): - nonlocal num_times_called - if num_times_called == 0: - num_times_called += 1 - raise VersionControlError("Failed to clone") - elif num_times_called == 1: - num_times_called += 1 - dst_dir.mkdir() - else: - assert False - - fs_root = pathlib.Path(tmp_path, "foo") - make_mbed_lib_reference(fs_root, ref_url="https://git#lajdhalk234") - - mock_clone.side_effect = clone_side_effect - mock_fetch.side_effect = None - mock_checkout.side_effect = VersionControlError("Failed to checkout") - - with pytest.raises(VersionControlError, match="Failed to checkout"): - lib_refs = LibraryReferences(fs_root, ignore_paths=["mbed-os"]) - lib_refs.fetch() - - def test_doesnt_fetch_for_branch_or_tag(self, mock_clone, mock_fetch, mock_checkout, tmp_path): - fs_root = pathlib.Path(tmp_path, "foo") - make_mbed_lib_reference(fs_root, ref_url="https://git#lajdhalk234") - - mock_clone.side_effect = lambda url, dst_dir, *args: dst_dir.mkdir() - - lib_refs = LibraryReferences(fs_root, ignore_paths=["mbed-os"]) - lib_refs.fetch() - - mock_fetch.assert_not_called() - mock_checkout.assert_not_called() - - def test_does_not_resolve_references_in_ignore_paths(self, mock_get_repo, mock_checkout, mock_clone, tmp_path): - fs_root = pathlib.Path(tmp_path, "mbed-os") - make_mbed_lib_reference(fs_root, ref_url="https://git#lajdhalk234") - - lib_refs = LibraryReferences(fs_root, ignore_paths=["mbed-os"]) - lib_refs.fetch() - - mock_clone.assert_not_called() - - def test_fetches_only_requested_ref(self, mock_repo, tmp_path): - fs_root = pathlib.Path(tmp_path, "foo") - fake_ref = "28eeee2b4c169739192600b92e7970dbbcabd8d0" - make_mbed_lib_reference(fs_root, ref_url=f"https://git#{fake_ref}", resolved=True) - - lib_refs = LibraryReferences(fs_root, ignore_paths=["mbed-os"]) - lib_refs.checkout(force=False) - - mock_repo().git.fetch.assert_called_once_with("origin", fake_ref) diff --git a/tools/python_tests/mbed_tools/project/_internal/test_progress.py b/tools/python_tests/mbed_tools/project/_internal/test_progress.py deleted file mode 100644 index d1bf32ba97b..00000000000 --- a/tools/python_tests/mbed_tools/project/_internal/test_progress.py +++ /dev/null @@ -1,43 +0,0 @@ -# -# Copyright (c) 2020-2021 Arm Limited and Contributors. All rights reserved. -# SPDX-License-Identifier: Apache-2.0 -# -from unittest import TestCase, mock - -from mbed_tools.project._internal.progress import ProgressReporter, ProgressBar - - -class TestProgressBar(TestCase): - @mock.patch("mbed_tools.project._internal.progress.ProgressBar.update") - def test_updates_progress_bar_with_correct_block_size(self, mock_bar_update): - bar = ProgressBar(total=100) - bar.update_progress(1, 1) - - mock_bar_update.assert_called_once_with(1) - - def test_sets_total_attribute_to_value_of_total_size(self): - bar = ProgressBar() - - self.assertIsNone(bar.total) - - bar.update_progress(1, 2, total_size=33) - - self.assertEqual(bar.total, 33) - - -@mock.patch("mbed_tools.project._internal.progress.ProgressBar", autospec=True) -class TestProgressReporter(TestCase): - def test_creates_progress_bar_on_begin_opcode(self, mock_progress_bar): - reporter = ProgressReporter() - reporter._cur_line = "begin" - reporter.update(reporter.BEGIN, 1) - - mock_progress_bar.assert_called_once() - - def test_closes_progress_bar_on_end_opcode(self, mock_progress_bar): - reporter = ProgressReporter() - reporter.bar = mock_progress_bar() - reporter.update(reporter.END, 1) - - reporter.bar.close.assert_called_once() - reporter.bar.update.assert_not_called() diff --git a/tools/python_tests/mbed_tools/project/_internal/test_project_data.py b/tools/python_tests/mbed_tools/project/_internal/test_project_data.py index 6955dbb0219..0547298b113 100644 --- a/tools/python_tests/mbed_tools/project/_internal/test_project_data.py +++ b/tools/python_tests/mbed_tools/project/_internal/test_project_data.py @@ -16,45 +16,12 @@ APP_CONFIG_FILE_NAME_JSON, ) from python_tests.mbed_tools.project.factories import ( - make_mbed_lib_reference, make_mbed_program_files, make_mbed_os_files, ) class TestMbedProgramFiles: - def test_from_new_raises_if_program_files_already_exist(self, tmp_path): - root = pathlib.Path(tmp_path, "foo") - make_mbed_program_files(root) - - with pytest.raises(ValueError): - MbedProgramFiles.from_new(root) - - def test_from_new_returns_valid_program_file_set(self, tmp_path): - root = pathlib.Path(tmp_path, "foo") - root.mkdir() - - program = MbedProgramFiles.from_new(root) - - assert program.app_config_file.exists() - assert program.mbed_os_ref.exists() - assert program.cmakelists_file.exists() - - def test_from_new_calls_render_template_for_gitignore_and_main(self, tmp_path): - with mock.patch( - "mbed_tools.project._internal.project_data.render_cmakelists_template" - ) as render_cmakelists_template, mock.patch( - "mbed_tools.project._internal.project_data.render_main_cpp_template" - ) as render_main_cpp_template, mock.patch( - "mbed_tools.project._internal.project_data.render_gitignore_template" - ) as render_gitignore_template: - root = pathlib.Path(tmp_path, "foo") - root.mkdir() - program_files = MbedProgramFiles.from_new(root) - render_cmakelists_template.assert_called_once_with(program_files.cmakelists_file, "foo") - render_main_cpp_template.assert_called_once_with(root / MAIN_CPP_FILE_NAME) - render_gitignore_template.assert_called_once_with(root / ".gitignore") - def test_from_existing_finds_existing_program_data(self, tmp_path): root = pathlib.Path(tmp_path, "foo") make_mbed_program_files(root) @@ -81,34 +48,6 @@ def test_from_existing_finds_existing_program_data_app_json(self, tmp_path): assert program.cmakelists_file.exists() -class TestMbedLibReference: - def test_is_resolved_returns_true_if_source_code_dir_exists(self, tmp_path): - root = pathlib.Path(tmp_path, "foo") - lib = make_mbed_lib_reference(root, resolved=True) - - assert lib.is_resolved() - - def test_is_resolved_returns_false_if_source_code_dir_doesnt_exist(self, tmp_path): - root = pathlib.Path(tmp_path, "foo") - lib = make_mbed_lib_reference(root) - - assert not lib.is_resolved() - - def test_get_git_reference_returns_lib_file_contents(self, tmp_path): - root = pathlib.Path(tmp_path, "foo") - url = "https://github.com/mylibrepo" - ref = "latest" - references = [f"{url}#{ref}", f"{url}/#{ref}"] - - for full_ref in references: - lib = make_mbed_lib_reference(root, ref_url=full_ref) - - reference = lib.get_git_reference() - - assert reference.repo_url == url - assert reference.ref == ref - - class TestMbedOS: def test_from_existing_finds_existing_mbed_os_data(self, tmp_path): root_path = pathlib.Path(tmp_path, "my-version-of-mbed-os") diff --git a/tools/python_tests/mbed_tools/project/_internal/test_render_templates.py b/tools/python_tests/mbed_tools/project/_internal/test_render_templates.py deleted file mode 100644 index 3c4a74e4e85..00000000000 --- a/tools/python_tests/mbed_tools/project/_internal/test_render_templates.py +++ /dev/null @@ -1,44 +0,0 @@ -# -# Copyright (c) 2020-2021 Arm Limited and Contributors. All rights reserved. -# SPDX-License-Identifier: Apache-2.0 -# -from pathlib import Path -from unittest import mock - -from mbed_tools.project._internal.render_templates import ( - render_cmakelists_template, - render_main_cpp_template, - render_gitignore_template, -) - - -@mock.patch("mbed_tools.project._internal.render_templates.datetime") -class TestRenderTemplates: - def test_renders_cmakelists_template(self, mock_datetime, tmp_path): - the_year = 3999 - mock_datetime.datetime.now.return_value.year = the_year - program_name = "mytestprogram" - file_path = Path(tmp_path, "mytestpath") - - render_cmakelists_template(file_path, program_name) - output = file_path.read_text() - - assert str(the_year) in output - assert program_name in output - - def test_renders_main_cpp_template(self, mock_datetime, tmp_path): - the_year = 3999 - mock_datetime.datetime.now.return_value.year = the_year - file_path = Path(tmp_path, "mytestpath") - - render_main_cpp_template(file_path) - - assert str(the_year) in file_path.read_text() - - def test_renders_gitignore_template(self, _, tmp_path): - file_path = Path(tmp_path, "mytestpath") - - render_gitignore_template(file_path) - - assert "cmake_build" in file_path.read_text() - assert ".mbedbuild" in file_path.read_text() diff --git a/tools/python_tests/mbed_tools/project/factories.py b/tools/python_tests/mbed_tools/project/factories.py index 8a188b918d9..54b002d1b5c 100644 --- a/tools/python_tests/mbed_tools/project/factories.py +++ b/tools/python_tests/mbed_tools/project/factories.py @@ -2,7 +2,6 @@ # Copyright (c) 2020-2021 Arm Limited and Contributors. All rights reserved. # SPDX-License-Identifier: Apache-2.0 # -from mbed_tools.project._internal.libraries import MbedLibReference from mbed_tools.project._internal.project_data import ( CMAKELISTS_FILE_NAME, APP_CONFIG_FILE_NAME_JSON5, @@ -19,23 +18,6 @@ def make_mbed_program_files(root, config_file_name=APP_CONFIG_FILE_NAME_JSON5): (root / CMAKELISTS_FILE_NAME).touch() -def make_mbed_lib_reference(root, name="mylib.lib", resolved=False, ref_url=None): - ref_file = root / name - source_dir = ref_file.with_suffix("") - if not root.exists(): - root.mkdir() - - ref_file.touch() - - if resolved: - source_dir.mkdir() - - if ref_url is not None: - ref_file.write_text(ref_url) - - return MbedLibReference(reference_file=ref_file, source_code_path=source_dir) - - def make_mbed_os_files(root): if not root.exists(): root.mkdir() diff --git a/tools/python_tests/mbed_tools/project/test_mbed_program.py b/tools/python_tests/mbed_tools/project/test_mbed_program.py index d7d6239f253..77ec12328e3 100644 --- a/tools/python_tests/mbed_tools/project/test_mbed_program.py +++ b/tools/python_tests/mbed_tools/project/test_mbed_program.py @@ -8,7 +8,7 @@ from mbed_tools.project import MbedProgram -from mbed_tools.project.exceptions import ExistingProgram, ProgramNotFound, MbedOSNotFound +from mbed_tools.project.exceptions import ExistingProgramError, ProgramNotFoundError, MbedOSNotFoundError from mbed_tools.project.mbed_program import _find_program_root, parse_url from mbed_tools.project._internal.project_data import MbedProgramFiles, BUILD_DIR from python_tests.mbed_tools.project.factories import make_mbed_program_files, make_mbed_os_files @@ -25,66 +25,19 @@ def from_new_set_target_toolchain(program_root): class TestInitialiseProgram: - def test_from_new_local_dir_raises_if_path_is_existing_program(self, tmp_path): - program_root = pathlib.Path(tmp_path, "programfoo") - program_root.mkdir() - (program_root / "mbed-os.lib").touch() - - with pytest.raises(ExistingProgram): - MbedProgram.from_new(program_root) - - def test_from_new_local_dir_generates_valid_program_creating_directory(self, tmp_path): - fs_root = pathlib.Path(tmp_path, "foo") - fs_root.mkdir() - program_root = fs_root / "programfoo" - - program = from_new_set_target_toolchain(program_root) - - assert program.files == MbedProgramFiles.from_existing( - program_root, program_root / BUILD_DIR / DEFAULT_BUILD_SUBDIR - ) - - def test_from_new_local_dir_generates_valid_program_creating_directory_in_cwd(self, tmp_path): - old_cwd = os.getcwd() - try: - fs_root = pathlib.Path(tmp_path, "foo") - fs_root.mkdir() - os.chdir(fs_root) - program_root = pathlib.Path("programfoo") - - program = from_new_set_target_toolchain(program_root) - - assert program.files == MbedProgramFiles.from_existing( - program_root, program_root / BUILD_DIR / DEFAULT_BUILD_SUBDIR - ) - finally: - os.chdir(old_cwd) - - def test_from_new_local_dir_generates_valid_program_existing_directory(self, tmp_path): - fs_root = pathlib.Path(tmp_path, "foo") - fs_root.mkdir() - program_root = fs_root / "programfoo" - program_root.mkdir() - - program = from_new_set_target_toolchain(program_root) - - assert program.files == MbedProgramFiles.from_existing( - program_root, program_root / BUILD_DIR / DEFAULT_BUILD_SUBDIR - ) - def test_from_existing_raises_if_path_is_not_a_program(self, tmp_path): fs_root = pathlib.Path(tmp_path, "foo") fs_root.mkdir() program_root = fs_root / "programfoo" - with pytest.raises(ProgramNotFound): + with pytest.raises(ProgramNotFoundError): MbedProgram.from_existing(program_root, program_root / BUILD_DIR / DEFAULT_BUILD_SUBDIR) def test_from_existing_raises_if_no_mbed_os_dir_found_and_check_mbed_os_is_true(self, tmp_path): fs_root = pathlib.Path(tmp_path, "foo") make_mbed_program_files(fs_root) - with pytest.raises(MbedOSNotFound): + with pytest.raises(MbedOSNotFoundError): MbedProgram.from_existing(fs_root, fs_root / BUILD_DIR / DEFAULT_BUILD_SUBDIR, check_mbed_os=True) def test_from_existing_returns_valid_program(self, tmp_path): @@ -151,5 +104,5 @@ def test_raises_if_no_program_found(self, tmp_path): program_root = pathlib.Path(tmp_path, "foo") program_root.mkdir() - with pytest.raises(ProgramNotFound): + with pytest.raises(ProgramNotFoundError): _find_program_root(program_root) diff --git a/tools/python_tests/mbed_tools/project/test_mbed_project.py b/tools/python_tests/mbed_tools/project/test_mbed_project.py deleted file mode 100644 index ae84e05c5a3..00000000000 --- a/tools/python_tests/mbed_tools/project/test_mbed_project.py +++ /dev/null @@ -1,85 +0,0 @@ -# -# Copyright (c) 2020-2021 Arm Limited and Contributors. All rights reserved. -# SPDX-License-Identifier: Apache-2.0 -# -import pathlib - -import pytest - -from unittest import mock - -from mbed_tools.project import initialise_project, import_project, deploy_project, get_known_libs - - -@pytest.fixture -def mock_libs(): - with mock.patch("mbed_tools.project.project.LibraryReferences") as libs: - yield libs - - -@pytest.fixture -def mock_program(): - with mock.patch("mbed_tools.project.project.MbedProgram") as prog: - yield prog - - -@pytest.fixture -def mock_git(): - with mock.patch("mbed_tools.project.project.git_utils") as gutils: - yield gutils - - -class TestInitialiseProject: - def test_fetches_mbed_os_when_create_only_is_false(self, mock_libs, mock_program): - path = pathlib.Path() - initialise_project(path, create_only=False) - - mock_program.from_new.assert_called_once_with(path) - mock_libs().fetch.assert_called_once() - - def test_skips_mbed_os_when_create_only_is_true(self, mock_libs, mock_program): - path = pathlib.Path() - initialise_project(path, create_only=True) - - mock_program.from_new.assert_called_once_with(path) - mock_libs().fetch.assert_not_called() - - -class TestImportProject: - def test_clones_from_remote(self, mock_git): - url = "https://git.com/gitorg/repo" - import_project(url, recursive=False) - - mock_git.clone.assert_called_once_with(url, pathlib.Path(url.rsplit("/", maxsplit=1)[-1])) - - def test_resolves_libs_when_recursive_is_true(self, mock_git, mock_libs): - url = "https://git.com/gitorg/repo" - import_project(url, recursive=True) - - mock_git.clone.assert_called_once_with(url, pathlib.Path(url.rsplit("/", maxsplit=1)[-1])) - mock_libs().fetch.assert_called_once() - - -class TestDeployProject: - def test_checks_out_libraries(self, mock_libs): - path = pathlib.Path("somewhere") - deploy_project(path, force=False) - - mock_libs().checkout.assert_called_once_with(force=False) - - def test_resolves_libs_if_unresolved_detected(self, mock_libs): - mock_libs().iter_unresolved.return_value = [1] - path = pathlib.Path("somewhere") - deploy_project(path) - - mock_libs().fetch.assert_called_once() - - -class TestPrintLibs: - def test_list_libraries_gets_known_lib_list(self, mock_libs): - path = pathlib.Path("somewhere") - mock_libs().iter_resolved.return_value = ["", ""] - - libs = get_known_libs(path) - - assert libs == ["", ""] diff --git a/tools/python_tests/mbed_tools/sterm/test_terminal.py b/tools/python_tests/mbed_tools/sterm/test_terminal.py index 1c7be667e49..41a7b47db26 100644 --- a/tools/python_tests/mbed_tools/sterm/test_terminal.py +++ b/tools/python_tests/mbed_tools/sterm/test_terminal.py @@ -35,7 +35,7 @@ def test_initialises_serial_port(mock_sterm, mock_serial): terminal.run(port, baud) - mock_serial.assert_called_once_with(port=port, baudrate=str(baud)) + mock_serial.assert_called_once_with(port=port, baudrate=baud) def test_initialises_sterm(mock_sterm, mock_serial): @@ -157,7 +157,7 @@ def test_reset_sends_serial_break(mock_serial, mock_console): term.reset() - mock_serial().sendBreak.assert_called_once() + mock_serial().send_break.assert_called_once() def test_ctrl_b_sends_reset_to_serial_port(mock_serial, mock_console): @@ -168,7 +168,7 @@ def test_ctrl_b_sends_reset_to_serial_port(mock_serial, mock_console): with pytest.raises(StopIteration): term.writer() - mock_serial().sendBreak.assert_called_once() + mock_serial().send_break.assert_called_once() def test_ctrl_h_prints_help_text(mock_serial, mock_console): diff --git a/tools/python_tests/mbed_tools/targets/_internal/targets_json_parsers/test_accumulating_attribute_parser.py b/tools/python_tests/mbed_tools/targets/_internal/targets_json_parsers/test_accumulating_attribute_parser.py index 21aaceb4cd3..e7b80872ac7 100644 --- a/tools/python_tests/mbed_tools/targets/_internal/targets_json_parsers/test_accumulating_attribute_parser.py +++ b/tools/python_tests/mbed_tools/targets/_internal/targets_json_parsers/test_accumulating_attribute_parser.py @@ -185,14 +185,14 @@ def test_remove_element_without_numbers(self): current_attribute_state = {"attribute_1": ["ONE", "TWO=2", "THREE"]} elements_to_remove = ["ONE", "THREE"] expected_result = {"attribute_1": ["TWO=2"]} - result = _remove_attribute_element(current_attribute_state, "attribute_1", elements_to_remove) + _remove_attribute_element(current_attribute_state, "attribute_1", elements_to_remove) - self.assertEqual(result, expected_result) + self.assertEqual(current_attribute_state, expected_result) def test_remove_element_with_numbers(self): current_attribute_state = {"attribute_1": ["ONE", "TWO=2", "THREE"]} elements_to_remove = ["TWO"] expected_result = {"attribute_1": ["ONE", "THREE"]} - result = _remove_attribute_element(current_attribute_state, "attribute_1", elements_to_remove) + _remove_attribute_element(current_attribute_state, "attribute_1", elements_to_remove) - self.assertEqual(result, expected_result) + self.assertEqual(current_attribute_state, expected_result) diff --git a/tools/python_tests/mbed_tools/targets/_internal/test_board_database.py b/tools/python_tests/mbed_tools/targets/_internal/test_board_database.py index c602241ac7f..ff9a1841065 100644 --- a/tools/python_tests/mbed_tools/targets/_internal/test_board_database.py +++ b/tools/python_tests/mbed_tools/targets/_internal/test_board_database.py @@ -13,116 +13,6 @@ from mbed_tools.targets.env import env -class TestGetOnlineBoardData: - """Tests for the method `board_database.get_online_board_data`.""" - - def test_401(self, caplog, requests_mock): - """Given a 401 error code, BoardAPIError is raised.""" - caplog.set_level(logging.DEBUG) - requests_mock.get(board_database._BOARD_API, status_code=401, text="Who are you?") - with pytest.raises(board_database.BoardAPIError): - board_database.get_online_board_data() - assert any(x for x in caplog.records if x.levelno == logging.WARNING and "MBED_API_AUTH_TOKEN" in x.msg), ( - "Auth token should be mentioned" - ) - assert any(x for x in caplog.records if x.levelno == logging.DEBUG and "Who are you?" in x.msg), ( - "Message content should be in the debug message" - ) - - def test_404(self, caplog, requests_mock): - """Given a 404 error code, TargetAPIError is raised.""" - caplog.set_level(logging.DEBUG) - requests_mock.get(board_database._BOARD_API, status_code=404, text="Not Found") - with pytest.raises(board_database.BoardAPIError): - board_database.get_online_board_data() - assert any(x for x in caplog.records if x.levelno == logging.WARNING and "404" in x.msg), ( - "HTTP status code should be mentioned" - ) - assert any(x for x in caplog.records if x.levelno == logging.DEBUG and "Not Found" in x.msg), ( - "Message content should be in the debug message" - ) - - def test_200_invalid_json(self, caplog, requests_mock): - """Given a valid response but invalid json, JSONDecodeError is raised.""" - caplog.set_level(logging.DEBUG) - requests_mock.get(board_database._BOARD_API, text="some text") - with pytest.raises(board_database.ResponseJSONError): - board_database.get_online_board_data() - assert any(x for x in caplog.records if x.levelno == logging.WARNING and "Invalid JSON" in x.msg), ( - "Invalid JSON should be mentioned" - ) - assert any(x for x in caplog.records if x.levelno == logging.DEBUG and "some text" in x.msg), ( - "Message content should be in the debug message" - ) - - def test_200_no_data_field(self, caplog, requests_mock): - """Given a valid response but no data field, ResponseJSONError is raised.""" - caplog.set_level(logging.DEBUG) - requests_mock.get(board_database._BOARD_API, json={"notdata": [], "stillnotdata": {}}) - with pytest.raises(board_database.ResponseJSONError): - board_database.get_online_board_data() - assert any(x for x in caplog.records if x.levelno == logging.WARNING and "missing the 'data' field" in x.msg), ( - "Data field should be mentioned" - ) - assert any(x for x in caplog.records if x.levelno == logging.DEBUG and "notdata, stillnotdata" in x.msg), ( - "JSON keys from message should be in the debug message" - ) - - def test_200_value_data(self, requests_mock): - """Given a valid response, target data is set from the returned json.""" - requests_mock.get(board_database._BOARD_API, json={"data": 42}) - board_data = board_database.get_online_board_data() - assert 42 == board_data, "Target data should match the contents of the target API data" - - @mock.patch("mbed_tools.targets._internal.board_database.requests") - @mock.patch("mbed_tools.targets._internal.board_database.env", spec_set=env) - def test_auth_header_set_with_token(self, env, requests): - """Given an authorization token env variable, get is called with authorization header.""" - env.MBED_API_AUTH_TOKEN = "token" - header = {"Authorization": "Bearer token"} - board_database._get_request() - requests.get.assert_called_once_with(board_database._BOARD_API, headers=header) - - @mock.patch("mbed_tools.targets._internal.board_database.requests") - def test_no_auth_header_set_with_empty_token_var(self, requests): - """Given no authorization token env variable, get is called with no header.""" - board_database._get_request() - requests.get.assert_called_once_with(board_database._BOARD_API, headers=None) - - @mock.patch("mbed_tools.targets._internal.board_database.requests.get") - def test_logs_no_warning_on_success(self, get, caplog): - board_database._get_request() - assert not caplog.records - - @mock.patch("mbed_tools.targets._internal.board_database.requests.get") - def test_raises_tools_error_on_connection_error(self, get, caplog): - get.side_effect = board_database.requests.exceptions.ConnectionError - with pytest.raises(board_database.BoardAPIError): - board_database._get_request() - assert "Unable to connect" in caplog.text - assert len(caplog.records) == 1 - - @mock.patch("mbed_tools.targets._internal.board_database.requests.get") - def test_logs_error_on_requests_ssl_error(self, get, caplog): - get.side_effect = board_database.requests.exceptions.SSLError - with pytest.raises(board_database.BoardAPIError): - board_database._get_request() - assert "verify an SSL" in caplog.text - - @mock.patch("mbed_tools.targets._internal.board_database.requests.get") - def test_logs_error_on_requests_proxy_error(self, get, caplog): - get.side_effect = board_database.requests.exceptions.ProxyError - with pytest.raises(board_database.BoardAPIError): - board_database._get_request() - assert "connect to proxy" in caplog.text - - @mock.patch.dict("os.environ", {"http_proxy": "http://proxy:8080", "https_proxy": "https://proxy:8080"}) - def test_requests_uses_proxy_variables(self, requests_mock): - requests_mock.get(board_database._BOARD_API) - board_database._get_request() - assert requests_mock.last_request.proxies == {"http": "http://proxy:8080", "https": "https://proxy:8080"} - - class TestGetOfflineTargetData: """Tests for the method get_offline_target_data.""" diff --git a/tools/python_tests/mbed_tools/targets/test_boards.py b/tools/python_tests/mbed_tools/targets/test_boards.py index 4983992babb..dd6aeed4756 100644 --- a/tools/python_tests/mbed_tools/targets/test_boards.py +++ b/tools/python_tests/mbed_tools/targets/test_boards.py @@ -10,11 +10,11 @@ from mbed_tools.targets import Board from mbed_tools.targets.boards import Boards -from mbed_tools.targets.exceptions import UnknownBoard +from mbed_tools.targets.exceptions import UnknownBoardError from python_tests.mbed_tools.targets.factories import make_dummy_internal_board_data -@mock.patch("mbed_tools.targets._internal.board_database.get_online_board_data") +@mock.patch("mbed_tools.targets._internal.board_database.get_offline_board_data") class TestBoards(TestCase): """Tests for the class `Boards`.""" @@ -23,7 +23,7 @@ def test_iteration_is_repeatable(self, mocked_get_board_data): fake_board_data = make_dummy_internal_board_data() mocked_get_board_data.return_value = fake_board_data - boards = Boards.from_online_database() + boards = Boards.from_offline_database() tgts_a = [b for b in boards] tgts_b = [b for b in boards] @@ -34,7 +34,7 @@ def test_board_found_in_boards_membership_test(self, mocked_get_board_data): board_data = make_dummy_internal_board_data() mocked_get_board_data.return_value = board_data - boards = Boards.from_online_database() + boards = Boards.from_offline_database() board, *_ = boards self.assertIn(board, boards) @@ -44,7 +44,7 @@ def test_membership_test_returns_false_for_non_board(self, mocked_get_board_data board_data = make_dummy_internal_board_data() mocked_get_board_data.return_value = board_data - boards = Boards.from_online_database() + boards = Boards.from_offline_database() self.assertFalse("a" in boards) @@ -53,7 +53,7 @@ def test_len_boards(self, mocked_get_board_data): board_data = make_dummy_internal_board_data() mocked_get_board_data.return_value = board_data - self.assertEqual(len(Boards.from_online_database()), len(board_data)) + self.assertEqual(len(Boards.from_offline_database()), len(board_data)) def test_get_board_success(self, mocked_get_board_data): """Check a Board can be looked up by arbitrary parameters.""" @@ -62,9 +62,14 @@ def test_get_board_success(self, mocked_get_board_data): {"attributes": {"product_code": "0200"}}, {"attributes": {"product_code": "0100"}}, ] - mocked_get_board_data.return_value = fake_board_data - boards = Boards.from_online_database() + # Boards.from_offline_database expects the data to have been "filtered" through the Boards interface + boards = [Board.from_online_board_entry(b) for b in fake_board_data] + filtered_board_data = [asdict(board) for board in boards] + + mocked_get_board_data.return_value = filtered_board_data + + boards = Boards.from_offline_database() board = boards.get_board(lambda b: b.product_code == "0100") self.assertEqual(board.product_code, "0100", "Board's product code should match the given product code.") @@ -73,37 +78,24 @@ def test_get_board_failure(self, mocked_get_board_data): """Check Boards handles queries without a match.""" mocked_get_board_data.return_value = [] - boards = Boards.from_online_database() + boards = Boards.from_offline_database() - with self.assertRaises(UnknownBoard): + with self.assertRaises(UnknownBoardError): boards.get_board(lambda b: b.product_code == "unknown") - @mock.patch("mbed_tools.targets._internal.board_database.get_offline_board_data") - def test_json_dump_from_raw_and_filtered_data(self, mocked_get_offline_board_data, mocked_get_online_board_data): + def test_json_dump_from_raw_and_filtered_data(self, mocked_get_board_data): raw_board_data = [ {"attributes": {"product_code": "0200", "board": "test"}}, {"attributes": {"product_code": "0100", "board": "test2"}}, ] - mocked_get_online_board_data.return_value = raw_board_data boards = [Board.from_online_board_entry(b) for b in raw_board_data] filtered_board_data = [asdict(board) for board in boards] - mocked_get_offline_board_data.return_value = filtered_board_data - - # Boards.from_online_database handles "raw" board entries from the online db - boards = Boards.from_online_database() - json_str_from_raw = boards.json_dump() - t1_raw, t2_raw = boards + mocked_get_board_data.return_value = filtered_board_data # Boards.from_offline_database expects the data to have been "filtered" through the Boards interface offline_boards = Boards.from_offline_database() json_str_from_filtered = offline_boards.json_dump() t1_filt, t2_filt = offline_boards - self.assertEqual( - json_str_from_raw, - json.dumps([asdict(t1_raw), asdict(t2_raw)], indent=4), - "JSON string should match serialised board __dict__.", - ) - self.assertEqual(json_str_from_filtered, json.dumps([t1_filt.__dict__, t2_filt.__dict__], indent=4)) diff --git a/tools/python_tests/mbed_tools/targets/test_config.py b/tools/python_tests/mbed_tools/targets/test_config.py index bad38a60b7c..3237d4f2f2d 100644 --- a/tools/python_tests/mbed_tools/targets/test_config.py +++ b/tools/python_tests/mbed_tools/targets/test_config.py @@ -12,12 +12,3 @@ class TestMbedApiAuthToken(TestCase): @mock.patch.dict(os.environ, {"MBED_API_AUTH_TOKEN": "sometoken"}) def test_returns_api_token_set_in_env(self): self.assertEqual(env.MBED_API_AUTH_TOKEN, "sometoken") - - -class TestDatabaseMode(TestCase): - @mock.patch.dict(os.environ, {"MBED_DATABASE_MODE": "ONLINE"}) - def test_returns_database_mode_set_in_env(self): - self.assertEqual(env.MBED_DATABASE_MODE, "ONLINE") - - def test_returns_default_database_mode_if_not_set_in_env(self): - self.assertEqual(env.MBED_DATABASE_MODE, "AUTO") diff --git a/tools/python_tests/mbed_tools/targets/test_get_board.py b/tools/python_tests/mbed_tools/targets/test_get_board.py index 93351651d9a..d7ba1da7505 100644 --- a/tools/python_tests/mbed_tools/targets/test_get_board.py +++ b/tools/python_tests/mbed_tools/targets/test_get_board.py @@ -12,9 +12,9 @@ # Import from top level as this is the expected interface for users from mbed_tools.targets import get_board_by_online_id, get_board_by_product_code, get_board_by_jlink_slug -from mbed_tools.targets.get_board import _DatabaseMode, _get_database_mode, get_board +from mbed_tools.targets.get_board import get_board from mbed_tools.targets.env import env -from mbed_tools.targets.exceptions import UnknownBoard, UnsupportedMode +from mbed_tools.targets.exceptions import UnknownBoardError, UnsupportedModeError from python_tests.mbed_tools.targets.factories import make_board @@ -24,12 +24,6 @@ def mock_get_board(): yield gbp -@pytest.fixture -def mock_env(): - with mock.patch("mbed_tools.targets.get_board.env", spec_set=env) as gbp: - yield gbp - - @pytest.fixture def mocked_boards(): with mock.patch("mbed_tools.targets.get_board.Boards", autospec=True) as gbp: @@ -37,17 +31,7 @@ def mocked_boards(): class TestGetBoard: - def test_online_mode(self, mock_env, mocked_boards): - mock_env.MBED_DATABASE_MODE = "ONLINE" - fn = mock.Mock() - - subject = get_board(fn) - - assert subject == mocked_boards.from_online_database().get_board.return_value - mocked_boards.from_online_database().get_board.assert_called_once_with(fn) - - def test_offline_mode(self, mock_env, mocked_boards): - mock_env.MBED_DATABASE_MODE = "OFFLINE" + def test_offline_mode(self, mocked_boards): fn = mock.Mock() subject = get_board(fn) @@ -55,38 +39,6 @@ def test_offline_mode(self, mock_env, mocked_boards): assert subject == mocked_boards.from_offline_database().get_board.return_value mocked_boards.from_offline_database().get_board.assert_called_once_with(fn) - def test_auto_mode_calls_offline_boards_first(self, mock_env, mocked_boards): - mock_env.MBED_DATABASE_MODE = "AUTO" - fn = mock.Mock() - - subject = get_board(fn) - - assert subject == mocked_boards.from_offline_database().get_board.return_value - mocked_boards.from_online_database().get_board.assert_not_called() - mocked_boards.from_offline_database().get_board.assert_called_once_with(fn) - - def test_auto_mode_falls_back_to_online_database_when_board_not_found(self, mock_env, mocked_boards): - mock_env.MBED_DATABASE_MODE = "AUTO" - mocked_boards.from_offline_database().get_board.side_effect = UnknownBoard - fn = mock.Mock() - - subject = get_board(fn) - - assert subject == mocked_boards.from_online_database().get_board.return_value - mocked_boards.from_offline_database().get_board.assert_called_once_with(fn) - mocked_boards.from_online_database().get_board.assert_called_once_with(fn) - - def test_auto_mode_raises_when_board_not_found_offline_with_no_network(self, mock_env, mocked_boards): - mock_env.MBED_DATABASE_MODE = "AUTO" - mocked_boards.from_offline_database().get_board.side_effect = UnknownBoard - mocked_boards.from_online_database().get_board.side_effect = BoardAPIError - fn = mock.Mock() - - with pytest.raises(UnknownBoard): - get_board(fn) - mocked_boards.from_offline_database().get_board.assert_called_once_with(fn) - mocked_boards.from_online_database().get_board.assert_called_once_with(fn) - class TestGetBoardByProductCode: def test_matches_boards_by_product_code(self, mock_get_board): @@ -138,14 +90,3 @@ def test_matches_boards_by_online_id(self, mock_get_board): assert fn(matching_board_2) assert fn(matching_board_3) assert not fn(not_matching_board) - - -class TestGetDatabaseMode: - def test_returns_configured_database_mode(self, mock_env): - mock_env.MBED_DATABASE_MODE = "OFFLINE" - assert _get_database_mode() == _DatabaseMode.OFFLINE - - def test_raises_when_configuration_is_not_supported(self, mock_env): - mock_env.MBED_DATABASE_MODE = "NOT_VALID" - with pytest.raises(UnsupportedMode): - _get_database_mode() diff --git a/tools/run_python_linters.sh b/tools/run_python_linters.sh index aa0cecfba79..3323bfd6be6 100644 --- a/tools/run_python_linters.sh +++ b/tools/run_python_linters.sh @@ -24,8 +24,14 @@ else fi if ! command -v ruff >/dev/null 2>&1; then - echo "Linters optional dependency of Mbed not installed. Please run 'mbed-os/venv/bin/pip install mbed-os/tools[linters]'." + echo "Linters optional dependency of Mbed not installed. Please run 'cd mbed-os && venv/bin/pip install -e ./tools[linters]'." fi -echo ">> Formatting with Ruff..." -ruff format \ No newline at end of file +echo ">> Formatting with ruff..." +ruff format + +echo ">> Linting with ruff..." +ruff check --fix python/mbed_tools python/mbed_platformio + +echo ">> Linting and type checking with basedpyright..." +basedpyright \ No newline at end of file