diff --git a/.ci/scripts/unittest-linux.sh b/.ci/scripts/unittest-linux.sh index dcb15ba5050..d0f107ed338 100755 --- a/.ci/scripts/unittest-linux.sh +++ b/.ci/scripts/unittest-linux.sh @@ -20,8 +20,9 @@ if [[ "$BUILD_TOOL" == "cmake" ]]; then # Setup swiftshader and Vulkan SDK which are required to build the Vulkan delegate source .ci/scripts/setup-vulkan-linux-deps.sh + # We need the runner to test the built library. PYTHON_EXECUTABLE=python \ - CMAKE_ARGS="-DEXECUTORCH_BUILD_PYBIND=ON -DEXECUTORCH_BUILD_XNNPACK=ON -DEXECUTORCH_BUILD_KERNELS_QUANTIZED=ON" \ + CMAKE_ARGS="-DEXECUTORCH_BUILD_EXECUTOR_RUNNER=ON -DEXECUTORCH_BUILD_TESTS=ON" \ .ci/scripts/setup-linux.sh "$@" .ci/scripts/unittest-linux-cmake.sh diff --git a/.ci/scripts/unittest-macos.sh b/.ci/scripts/unittest-macos.sh index 1c202c0c235..602685117d5 100755 --- a/.ci/scripts/unittest-macos.sh +++ b/.ci/scripts/unittest-macos.sh @@ -20,8 +20,9 @@ export PATH="${TMP_DIR}:$PATH" trap 'rm -rfv ${TMP_DIR}' EXIT # Setup MacOS dependencies as there is no Docker support on MacOS atm +# We need the runner to test the built library. PYTHON_EXECUTABLE=python \ -CMAKE_ARGS="-DEXECUTORCH_BUILD_PYBIND=ON -DEXECUTORCH_BUILD_COREML=ON -DEXECUTORCH_BUILD_MPS=ON -DEXECUTORCH_BUILD_XNNPACK=ON -DEXECUTORCH_BUILD_KERNELS_QUANTIZED=ON" \ +CMAKE_ARGS="-DEXECUTORCH_BUILD_EXECUTOR_RUNNER=ON -DEXECUTORCH_BUILD_TESTS=ON" \ ${CONDA_RUN} --no-capture-output \ .ci/scripts/setup-macos.sh "$@" diff --git a/.ci/scripts/wheel/envvar_base.sh b/.ci/scripts/wheel/envvar_base.sh index 15f590f0f68..f57f6131f46 100755 --- a/.ci/scripts/wheel/envvar_base.sh +++ b/.ci/scripts/wheel/envvar_base.sh @@ -11,7 +11,3 @@ # Ensure that CMAKE_ARGS is defined before referencing it. Defaults to empty # if not defined. export CMAKE_ARGS="${CMAKE_ARGS:-}" - -# Link the XNNPACK backend into the pybindings runtime so that users can execute -# ExecuTorch programs that delegate to it. -CMAKE_ARGS="${CMAKE_ARGS} -DEXECUTORCH_BUILD_PYBIND=ON -DEXECUTORCH_BUILD_XNNPACK=ON" diff --git a/.ci/scripts/wheel/envvar_macos.sh b/.ci/scripts/wheel/envvar_macos.sh index ad30f86d20c..3b24b3f7188 100755 --- a/.ci/scripts/wheel/envvar_macos.sh +++ b/.ci/scripts/wheel/envvar_macos.sh @@ -9,7 +9,3 @@ # any variables so that subprocesses will see them. source "${GITHUB_WORKSPACE}/${REPOSITORY}/.ci/scripts/wheel/envvar_base.sh" - -# When building for macOS, link additional backends into the pybindings runtime. -CMAKE_ARGS="${CMAKE_ARGS} -DEXECUTORCH_BUILD_COREML=ON" -CMAKE_ARGS="${CMAKE_ARGS} -DEXECUTORCH_BUILD_MPS=ON" diff --git a/CMakePresets.json b/CMakePresets.json index ac19bbfed5f..eea54453383 100644 --- a/CMakePresets.json +++ b/CMakePresets.json @@ -1,11 +1,5 @@ { - "version": 10, - "cmakeMinimumRequired": { - "major": 3, - "minor": 31, - "patch": 0 - }, - "$comment": "On-device AI across mobile, embedded and edge for PyTorch.", + "version": 6, "configurePresets": [ { "name": "common", diff --git a/backends/openvino/scripts/openvino_build.sh b/backends/openvino/scripts/openvino_build.sh index 83ffd7542f3..39569e64ca5 100755 --- a/backends/openvino/scripts/openvino_build.sh +++ b/backends/openvino/scripts/openvino_build.sh @@ -48,16 +48,11 @@ main() { # Set parameters to configure the project with CMake # Note: Add any additional configuration options you need here export CMAKE_ARGS="-DEXECUTORCH_BUILD_OPENVINO=ON \ - -DEXECUTORCH_BUILD_EXTENSION_DATA_LOADER=ON \ - -DEXECUTORCH_BUILD_EXTENSION_MODULE=ON \ - -DEXECUTORCH_BUILD_EXTENSION_RUNNER_UTIL=ON \ - -DEXECUTORCH_ENABLE_LOGGING=ON \ - -DEXECUTORCH_BUILD_EXTENSION_TENSOR=ON \ - -DEXECUTORCH_BUILD_PYBIND=ON" + -DEXECUTORCH_BUILD_EXTENSION_MODULE=ON" export CMAKE_BUILD_ARGS="--target openvino_backend" # Build the package - pip install . --no-build-isolation + ./install_executorch.sh else echo "Error: Argument is not valid: $build_type" diff --git a/runtime/executor/test/CMakeLists.txt b/runtime/executor/test/CMakeLists.txt index c632ddffaba..c0b9c6c90c9 100644 --- a/runtime/executor/test/CMakeLists.txt +++ b/runtime/executor/test/CMakeLists.txt @@ -32,10 +32,10 @@ add_custom_command( COMMAND ${PYTHON_EXECUTABLE} -m test.models.export_program --modules "ModuleAdd,ModuleAddHalf,ModuleAddMul,ModuleDynamicCatUnallocatedIO,ModuleIndex,ModuleMultipleEntry,ModuleSimpleTrain,ModuleStateful" - --outdir "${CMAKE_CURRENT_BINARY_DIR}" 2> /dev/null + --outdir "${CMAKE_CURRENT_BINARY_DIR}" COMMAND ${PYTHON_EXECUTABLE} -m test.models.export_program --modules "ModuleAddMul" - --external-constants --outdir "${CMAKE_CURRENT_BINARY_DIR}" 2> /dev/null + --external-constants --outdir "${CMAKE_CURRENT_BINARY_DIR}" COMMAND ${PYTHON_EXECUTABLE} -m test.models.export_delegated_program --modules "ModuleAddMul" --backend_id "StubBackend" --outdir "${CMAKE_CURRENT_BINARY_DIR}/delegated/" || true diff --git a/setup.py b/setup.py index 4ad834d8ef9..899e1b3cbf5 100644 --- a/setup.py +++ b/setup.py @@ -48,8 +48,8 @@ import contextlib import os -import platform import re +import shutil import site import sys @@ -58,108 +58,31 @@ import setuptools # noqa: F401 # usort: skip import subprocess -from distutils import log -from distutils.sysconfig import get_python_lib -from functools import cache +from distutils import log # type: ignore[import-not-found] +from distutils.sysconfig import get_python_lib # type: ignore[import-not-found] from pathlib import Path -from typing import Dict, List, Optional +from typing import List, Optional from setuptools import Extension, setup from setuptools.command.build import build from setuptools.command.build_ext import build_ext from setuptools.command.build_py import build_py - -@cache -def _cmake_args_defines() -> Dict[str, str]: - result = {} - - args = re.split(r"\s+", os.environ.get("CMAKE_ARGS", "")) - for arg in args: - if arg.startswith("-D") and "=" in arg: - arg_key, value = arg.split("=") - key = arg_key[2:] # Remove the leading "-D" - result[key] = value - - return result +try: + from tools.cmake.cmake_cache import CMakeCache +except ImportError: + sys.path.insert( + 0, os.path.join(os.path.dirname(os.path.abspath(__file__)), "tools", "cmake") + ) + from cmake_cache import CMakeCache # type: ignore[no-redef, import-not-found] def _is_macos() -> bool: return sys.platform == "darwin" -class ShouldBuild: - """Indicates whether to build various components.""" - - @staticmethod - def _is_truthy(value: Optional[str]) -> bool: - if (value is None) or (value.lower() in ("off", "0", "")): - return False - return True - - @staticmethod - def _is_cmake_arg_enabled(var: str, default: bool) -> bool: - if os.environ.get(var) is not None: - raise RuntimeError( - f"Python wheel building does not support setting '{var}' using environment variables. Use CMAKE_ARGS='-D{var}=ON' instead." - ) - - value = _cmake_args_defines().get(var, None) - if value is None: - return default - return ShouldBuild._is_truthy(value) - - @classmethod - def pybindings(cls) -> bool: - return cls._is_cmake_arg_enabled( - "EXECUTORCH_BUILD_PYBIND", - # If the user hasn't specified anything, we want to turn this on if any - # bindings are requested explicitly. - # - # Please keep this in sync with `VALID_PYBINDS` in install_executorch.py. - default=any( - [ - cls.coreml(), - cls.mps(), - cls.openvino(), - cls.xnnpack(), - cls.neutron(), - cls.training(), - ] - ), - ) - - @classmethod - def coreml(cls) -> bool: - return cls._is_cmake_arg_enabled("EXECUTORCH_BUILD_COREML", default=_is_macos()) - - @classmethod - def mps(cls) -> bool: - return cls._is_cmake_arg_enabled("EXECUTORCH_BUILD_MPS", default=False) - - @classmethod - def openvino(cls) -> bool: - return cls._is_cmake_arg_enabled("EXECUTORCH_BUILD_OPENVINO", default=False) - - @classmethod - def xnnpack(cls) -> bool: - return cls._is_cmake_arg_enabled("EXECUTORCH_BUILD_XNNPACK", default=True) - - @classmethod - def neutron(cls) -> bool: - return cls._is_cmake_arg_enabled("EXECUTORCH_BUILD_NEUTRON", default=False) - - @classmethod - def training(cls) -> bool: - return cls._is_cmake_arg_enabled( - "EXECUTORCH_BUILD_EXTENSION_TRAINING", default=False - ) - - @classmethod - def llama_custom_ops(cls) -> bool: - return cls._is_cmake_arg_enabled( - "EXECUTORCH_BUILD_KERNELS_CUSTOM_AOT", default=True - ) +def _is_windows() -> bool: + return sys.platform == "win32" class Version: @@ -235,21 +158,20 @@ def write_to_python_file(cls, path: str) -> None: # is Release. def get_build_type(is_debug=None) -> str: debug = int(os.environ.get("DEBUG", 0) or 0) if is_debug is None else is_debug - cfg = "Debug" if debug else "Release" - return cfg + return "Debug" if debug else "Release" def get_dynamic_lib_name(name: str) -> str: - if platform.system() == "Windows": - return name + ".dll" - elif platform.system() == "Darwin": - return "lib" + name + ".dylib" + if _is_windows(): + return f"{name}.dll" + elif _is_macos(): + return f"lib{name}.dylib" else: - return "lib" + name + ".so" + return f"lib{name}.so" def get_executable_name(name: str) -> str: - if platform.system() == "Windows": + if _is_windows(): return name + ".exe" else: return name @@ -258,7 +180,13 @@ def get_executable_name(name: str) -> str: class _BaseExtension(Extension): """A base class that maps an abstract source to an abstract destination.""" - def __init__(self, src: str, dst: str, name: str): + def __init__( + self, + src: str, + dst: str, + name: str, + dependent_cmake_flags: List[str], + ): # Source path; semantics defined by the subclass. self.src: str = src @@ -273,15 +201,12 @@ def __init__(self, src: str, dst: str, name: str): # that doesn't look like a module path. self.name: str = name - super().__init__(name=self.name, sources=[]) + self.dependent_cmake_flags = dependent_cmake_flags + self.cmake_cache: Optional[CMakeCache] = None - def src_path(self, installer: "InstallerBuildExt") -> Path: - """Returns the path to the source file, resolving globs. + super().__init__(name=self.name, sources=[]) - Args: - installer: The InstallerBuildExt instance that is installing the - file. - """ + def _get_build_dir(self, installer: "InstallerBuildExt") -> Path: # Share the cmake-out location with CustomBuild. build_cmd = installer.get_finalized_command("build") if "%CMAKE_CACHE_DIR%" in self.src: @@ -292,11 +217,32 @@ def src_path(self, installer: "InstallerBuildExt") -> Path: "command. Please double check if the command is correct." ) else: - build_dir = Path(build_cmd.cmake_cache_dir) + return Path(build_cmd.cmake_cache_dir) else: # If the src path doesn't contain %CMAKE_CACHE_DIR% placeholder, # try to find it under the current directory. - build_dir = Path(".") + return Path(".") + + def is_cmake_artifact_used(self, installer: "InstallerBuildExt") -> bool: + cache_path = str(self._get_build_dir(installer) / "CMakeCache.txt") + if not os.path.exists(cache_path): + # If this is not a CMake folder, then assume it's used. + return True + elif self.cmake_cache is None: + self.cmake_cache = CMakeCache(cache_path=cache_path) + + return all( + self.cmake_cache.is_enabled(flag) for flag in self.dependent_cmake_flags + ) + + def src_path(self, installer: "InstallerBuildExt") -> Path: + """Returns the path to the source file, resolving globs. + + Args: + installer: The InstallerBuildExt instance that is installing the + file. + """ + build_dir = self._get_build_dir(installer) src_path = self.src.replace("%CMAKE_CACHE_DIR%/", "") @@ -343,6 +289,7 @@ def __init__( src_dir: str, src_name: str, dst: str, + dependent_cmake_flags: List[str], is_executable: bool = False, is_dynamic_lib: bool = False, ): @@ -374,7 +321,12 @@ def __init__( # This is not a real extension, so use a unique name that doesn't look # like a module path. Some of setuptools's autodiscovery will look for # extension names with prefixes that match certain module paths. - super().__init__(src=src, dst=dst, name=f"@EXECUTORCH_BuiltFile_{src}:{dst}") + super().__init__( + src=src, + dst=dst, + name=f"@EXECUTORCH_BuiltFile_{src}:{dst}", + dependent_cmake_flags=dependent_cmake_flags, + ) def dst_path(self, installer: "InstallerBuildExt") -> Path: """Returns the path to the destination file. @@ -409,7 +361,13 @@ def inplace_dir(self, installer: "InstallerBuildExt") -> Path: class BuiltExtension(_BaseExtension): """An extension that installs a python extension that was built by cmake.""" - def __init__(self, src: str, modpath: str, src_dir: Optional[str] = None): + def __init__( + self, + src: str, + modpath: str, + dependent_cmake_flags: List[str], + src_dir: Optional[str] = None, + ): """Initializes a BuiltExtension. Args: @@ -427,12 +385,18 @@ def __init__(self, src: str, modpath: str, src_dir: Optional[str] = None): "/" not in modpath ), f"modpath must be a dotted python module path: saw '{modpath}'" full_src = src - if src_dir is None and platform.system() == "Windows": + if src_dir is None and _is_windows(): src_dir = "%BUILD_TYPE%/" if src_dir is not None: full_src = os.path.join(src_dir, src) + self.dependent_cmake_flags = dependent_cmake_flags # This is a real extension, so use the modpath as the name. - super().__init__(src=f"%CMAKE_CACHE_DIR%/{full_src}", dst=modpath, name=modpath) + super().__init__( + src=f"%CMAKE_CACHE_DIR%/{full_src}", + dst=modpath, + name=modpath, + dependent_cmake_flags=self.dependent_cmake_flags, + ) def src_path(self, installer: "InstallerBuildExt") -> Path: """Returns the path to the source file, resolving globs. @@ -448,9 +412,11 @@ def src_path(self, installer: "InstallerBuildExt") -> Path: # looking for a .dylib file instead, in case we're running on macos. if self.src.endswith(".so"): dylib_src = re.sub(r"\.so$", ".dylib", self.src) - return BuiltExtension(src=dylib_src, modpath=self.dst).src_path( - installer - ) + return BuiltExtension( + src=dylib_src, + modpath=self.dst, + dependent_cmake_flags=self.dependent_cmake_flags, + ).src_path(installer) else: raise @@ -506,6 +472,9 @@ def copy_extensions_to_source(self) -> None: Returns: """ for ext in self.extensions: + if not ext.is_cmake_artifact_used(self): + continue + package_dir = ext.inplace_dir(self) # Ensure that the destination directory exists. @@ -531,6 +500,9 @@ def copy_extensions_to_source(self) -> None: # TODO(dbort): Depend on the "build" command to ensure it runs first def build_extension(self, ext: _BaseExtension) -> None: + if not ext.is_cmake_artifact_used(self): + return + src_file: Path = ext.src_path(self) dst_file: Path = ext.dst_path(self) @@ -678,237 +650,169 @@ def initialize_options(self): # setuptools/_distutils/command/build.py for the default. self.build_base = "pip-out" - # Default build parallelism based on number of cores, but allow - # overriding through the environment. - default_parallel = str(os.cpu_count() - 1) - self.parallel = os.environ.get("CMAKE_BUILD_PARALLEL_LEVEL", default_parallel) - def run(self): # noqa C901 self.dump_options() - - cfg = get_build_type(self.debug) - + cmake_build_type = get_build_type(self.debug) # get_python_lib() typically returns the path to site-packages, where # all pip packages in the environment are installed. cmake_prefix_path = os.environ.get("CMAKE_PREFIX_PATH", get_python_lib()) - - # The root of the repo should be the current working directory. Get - # the absolute path. - repo_root = os.fspath(Path.cwd()) - - # If blank, the cmake build system will find an appropriate binary. buck2 = os.environ.get( "BUCK2_EXECUTABLE", os.environ.get("BUCK2", os.environ.get("BUCK", "")) ) + # Put the cmake cache under the temp directory, like + # "pip-out/temp./cmake-out". + pip_build_dir = os.path.join( + os.path.dirname(os.path.abspath(__file__)), self.build_temp + ) + cmake_cache_dir = os.path.join(pip_build_dir, "cmake-out") + self.mkpath(cmake_cache_dir) - cmake_args = [ + cmake_configuration_args = [ f"-DBUCK2={buck2}", f"-DPYTHON_EXECUTABLE={sys.executable}", # Let cmake calls like `find_package(Torch)` find cmake config files # like `TorchConfig.cmake` that are provided by pip packages. f"-DCMAKE_PREFIX_PATH={cmake_prefix_path}", - f"-DCMAKE_BUILD_TYPE={cfg}", - # Enable logging even when in release mode. We are building for - # desktop, where saving a few kB is less important than showing - # useful error information to users. - "-DEXECUTORCH_ENABLE_LOGGING=ON", - "-DEXECUTORCH_LOG_LEVEL=Info", - "-DCMAKE_OSX_DEPLOYMENT_TARGET=10.15", - "-DEXECUTORCH_BUILD_TESTS=ON", + f"-DCMAKE_BUILD_TYPE={cmake_build_type}", ] - build_args = [f"-j{self.parallel}"] - - if ShouldBuild.pybindings(): - cmake_args += [ - "-DEXECUTORCH_BUILD_PYBIND=ON", - "-DEXECUTORCH_BUILD_KERNELS_QUANTIZED=ON", # add quantized ops to pybindings. - "-DEXECUTORCH_BUILD_KERNELS_QUANTIZED_AOT=ON", - ] - - if ShouldBuild.xnnpack(): - cmake_args += ["-DEXECUTORCH_BUILD_XNNPACK=ON"] - - if ShouldBuild.neutron(): - cmake_args += ["-DEXECUTORCH_BUILD_NEUTRON=ON"] - - if ShouldBuild.training(): - build_args += ["--target", "_training_lib"] - - if ShouldBuild.coreml(): - cmake_args += ["-DEXECUTORCH_BUILD_COREML=ON"] - build_args += ["--target", "executorchcoreml"] - - build_args += ["--target", "portable_lib"] - # To link backends into the portable_lib target, callers should - # add entries like `-DEXECUTORCH_BUILD_XNNPACK=ON` to the CMAKE_ARGS - # environment variable. - - if ShouldBuild.llama_custom_ops(): - cmake_args += [ - "-DEXECUTORCH_BUILD_KERNELS_CUSTOM=ON", # add llama sdpa ops to pybindings. - "-DEXECUTORCH_BUILD_KERNELS_CUSTOM_AOT=ON", - "-DEXECUTORCH_BUILD_KERNELS_QUANTIZED=ON", # add quantized ops to pybindings. - "-DEXECUTORCH_BUILD_KERNELS_QUANTIZED_AOT=ON", - ] - build_args += ["--target", "custom_ops_aot_lib"] - build_args += ["--target", "quantized_ops_aot_lib"] # Allow adding extra cmake args through the environment. Used by some # tests and demos to expand the set of targets included in the pip # package. - if "CMAKE_ARGS" in os.environ: - cmake_args += [item for item in os.environ["CMAKE_ARGS"].split(" ") if item] + cmake_configuration_args += [ + item for item in re.split(r"\s+", os.environ.get("CMAKE_ARGS", "")) if item + ] + + with Buck2EnvironmentFixer(): + # Generate the cmake cache from scratch to ensure that the cache state + # is predictable. + if os.path.exists(cmake_cache_dir): + log.info(f"clearing {cmake_cache_dir}") + shutil.rmtree(cmake_cache_dir) + + subprocess.run( + [ + "cmake", + *cmake_configuration_args, + "--preset", + "pybind", + "-B", + cmake_cache_dir, + ], + check=True, + ) + + cmake_cache = CMakeCache( + cache_path=os.path.join(cmake_cache_dir, "CMakeCache.txt") + ) + cmake_build_args = [ + # Default build parallelism based on number of cores, but allow + # overriding through the environment. + "-j{parallelism}".format( + parallelism=os.environ.get( + "CMAKE_BUILD_PARALLEL_LEVEL", os.cpu_count() - 1 + ) + ), + # CMAKE_BUILD_TYPE variable specifies the build type (configuration) for + # single-configuration generators (e.g., Makefile Generators or Ninja). + # For multi-config generators (like Visual Studio), CMAKE_BUILD_TYPE + # isn’t directly applicable. + # During the build step, --config specifies the configuration to build + # for multi-config generators. + f"--config={cmake_build_type}", + ] # Allow adding extra build args through the environment. Used by some # tests and demos to expand the set of targets included in the pip # package. - if "CMAKE_BUILD_ARGS" in os.environ: - build_args += [ - item for item in os.environ["CMAKE_BUILD_ARGS"].split(" ") if item - ] - - # CMAKE_BUILD_TYPE variable specifies the build type (configuration) for - # single-configuration generators (e.g., Makefile Generators or Ninja). - # For multi-config generators (like Visual Studio), CMAKE_BUILD_TYPE - # isn’t directly applicable. - # During the build step, --config specifies the configuration to build - # for multi-config generators. - build_args += ["--config", cfg] + cmake_build_args += [ + item + for item in re.split(r"\s+", os.environ.get("CMAKE_BUILD_ARGS", "")) + if item + ] - # Put the cmake cache under the temp directory, like - # "pip-out/temp./cmake-out". - cmake_cache_dir = os.path.join(repo_root, self.build_temp, "cmake-out") - self.mkpath(cmake_cache_dir) + if cmake_cache.is_enabled("EXECUTORCH_BUILD_PYBIND"): + cmake_build_args += ["--target", "portable_lib"] + + if cmake_cache.is_enabled("EXECUTORCH_BUILD_EXTENSION_TRAINING"): + cmake_build_args += ["--target", "_training_lib"] + + if cmake_cache.is_enabled("EXECUTORCH_BUILD_COREML"): + cmake_build_args += ["--target", "executorchcoreml"] + + if cmake_cache.is_enabled("EXECUTORCH_BUILD_KERNELS_CUSTOM_AOT"): + cmake_build_args += ["--target", "custom_ops_aot_lib"] + cmake_build_args += ["--target", "quantized_ops_aot_lib"] - # Generate the cmake cache from scratch to ensure that the cache state - # is predictable. - cmake_cache_file = Path(cmake_cache_dir) / "CMakeCache.txt" - log.info(f"deleting {cmake_cache_file}") - if not self.dry_run: - # Dry run should log the command but not actually run it. - (Path(cmake_cache_dir) / "CMakeCache.txt").unlink(missing_ok=True) # Set PYTHONPATH to the location of the pip package. os.environ["PYTHONPATH"] = ( site.getsitepackages()[0] + ";" + os.environ.get("PYTHONPATH", "") ) - with Buck2EnvironmentFixer(): - # The context manager may patch the environment while running this - # cmake command, which happens to run buck2 to get some source - # lists. - - # Generate the build system files. - try: - subprocess.run( - ["cmake", "-S", repo_root, "-B", cmake_cache_dir, *cmake_args], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - check=True, - text=True, - ) - except subprocess.CalledProcessError as e: - error = str(e.stderr) - # Our educated guesses from parsing the error message. - # Missing source file, could be related to git submodules not synced or cmake cache is outdated - additional_log = "" - if "Cannot find source file" in error: - additional_log = ( - "\033[31;1mEither CMake cache is outdated or git submodules are not synced.\n" - "Please run the following before retry:\033[0m\n" - " \033[32;1m./install_executorch.sh --clean\033[0m\n" - " \033[32;1mgit submodule sync\033[0m\n" - " \033[32;1mgit submodule update --init\033[0m\n" - ) - raise Exception(error + "\n" + additional_log) from e - # Build the system. - self.spawn(["cmake", "--build", cmake_cache_dir, *build_args]) - + self.spawn(["cmake", "--build", cmake_cache_dir, *cmake_build_args]) # Share the cmake-out location with _BaseExtension. self.cmake_cache_dir = cmake_cache_dir - # Finally, run the underlying subcommands like build_py, build_ext. build.run(self) -def get_ext_modules() -> List[Extension]: - """Returns the set of extension modules to build.""" - - ext_modules = [ +setup( + version=Version.string(), + cmdclass={ + "build": CustomBuild, + "build_ext": InstallerBuildExt, + "build_py": CustomBuildPy, + }, + # Note that setuptools uses the presence of ext_modules as the main signal + # that a wheel is platform-specific. If we install any platform-specific + # files, this list must be non-empty. Therefore, we should always install + # platform-specific files using InstallerBuildExt. + ext_modules=[ BuiltFile( src_dir="%CMAKE_CACHE_DIR%/third-party/flatbuffers_external_project/bin/", src_name="flatc", dst="executorch/data/bin/", is_executable=True, + dependent_cmake_flags=[], ), BuiltFile( src_dir="tools/wheel", src_name="pip_data_bin_init.py.in", dst="executorch/data/bin/__init__.py", + dependent_cmake_flags=[], ), - ] - - if ShouldBuild.pybindings(): - ext_modules.append( - # Install the prebuilt pybindings extension wrapper for the runtime, - # portable kernels, and a selection of backends. This lets users - # load and execute .pte files from python. - BuiltExtension( - ( - "_portable_lib.cp*" - if platform.system() == "Windows" - else "_portable_lib.*" - ), - "executorch.extension.pybindings._portable_lib", - ) - ) - if ShouldBuild.training(): - ext_modules.append( - # Install the prebuilt pybindings extension wrapper for training - BuiltExtension( - "extension/training/_training_lib.*", - "executorch.extension.training.pybindings._training_lib", - ) - ) - if ShouldBuild.coreml(): - ext_modules.append( - BuiltExtension( - src="executorchcoreml.*", - src_dir="backends/apple/coreml", - modpath="executorch.backends.apple.coreml.executorchcoreml", - ) - ) - if ShouldBuild.llama_custom_ops(): - ext_modules.append( - BuiltFile( - src_dir="%CMAKE_CACHE_DIR%/extension/llm/custom_ops/%BUILD_TYPE%/", - src_name="custom_ops_aot_lib", - dst="executorch/extension/llm/custom_ops/", - is_dynamic_lib=True, - ) - ) - ext_modules.append( - # Install the prebuilt library for quantized ops required by custom ops. - BuiltFile( - src_dir="%CMAKE_CACHE_DIR%/kernels/quantized/%BUILD_TYPE%/", - src_name="quantized_ops_aot_lib", - dst="executorch/kernels/quantized/", - is_dynamic_lib=True, - ) - ) - - # Note that setuptools uses the presence of ext_modules as the main signal - # that a wheel is platform-specific. If we install any platform-specific - # files, this list must be non-empty. Therefore, we should always install - # platform-specific files using InstallerBuildExt. - return ext_modules - - -setup( - version=Version.string(), - cmdclass={ - "build": CustomBuild, - "build_ext": InstallerBuildExt, - "build_py": CustomBuildPy, - }, - ext_modules=get_ext_modules(), + # Install the prebuilt pybindings extension wrapper for the runtime, + # portable kernels, and a selection of backends. This lets users + # load and execute .pte files from python. + BuiltExtension( + src="_portable_lib.cp*" if _is_windows() else "_portable_lib.*", + modpath="executorch.extension.pybindings._portable_lib", + dependent_cmake_flags=["EXECUTORCH_BUILD_PYBIND"], + ), + BuiltExtension( + src="extension/training/_training_lib.*", # @lint-ignore https://github.com/pytorch/executorch/blob/cb3eba0d7f630bc8cec0a9cc1df8ae2f17af3f7a/scripts/lint_xrefs.sh + modpath="executorch.extension.training.pybindings._training_lib", + dependent_cmake_flags=["EXECUTORCH_BUILD_EXTENSION_TRAINING"], + ), + BuiltExtension( + src="executorchcoreml.*", + src_dir="backends/apple/coreml", + modpath="executorch.backends.apple.coreml.executorchcoreml", + dependent_cmake_flags=["EXECUTORCH_BUILD_COREML"], + ), + BuiltFile( + src_dir="%CMAKE_CACHE_DIR%/extension/llm/custom_ops/%BUILD_TYPE%/", + src_name="custom_ops_aot_lib", + dst="executorch/extension/llm/custom_ops/", + is_dynamic_lib=True, + dependent_cmake_flags=["EXECUTORCH_BUILD_KERNELS_CUSTOM_AOT"], + ), + BuiltFile( + src_dir="%CMAKE_CACHE_DIR%/kernels/quantized/%BUILD_TYPE%/", + src_name="quantized_ops_aot_lib", + dst="executorch/kernels/quantized/", + is_dynamic_lib=True, + dependent_cmake_flags=["EXECUTORCH_BUILD_KERNELS_CUSTOM_AOT"], + ), + ], ) diff --git a/tools/cmake/cmake_cache.py b/tools/cmake/cmake_cache.py new file mode 100644 index 00000000000..2b249ea3244 --- /dev/null +++ b/tools/cmake/cmake_cache.py @@ -0,0 +1,57 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. +# +# This source code is licensed under the BSD-style license found in the +# LICENSE file in the root directory of this source tree. + +from dataclasses import dataclass +from typing import Dict, Optional + +_FALSE_VALUES = {"off", "0", "", "no"} + + +@dataclass +class CacheValue: + value_type: str + value: str + + +@dataclass +class CMakeCache: + # The path to the CMakeCache.txt file. + cache_path: str + + def __post_init__(self): + self.cache = CMakeCache.read_cmake_cache(cache_path=self.cache_path) + + def get(self, var: str) -> Optional[CacheValue]: + return self.cache.get(var) + + def is_enabled(self, var: str, fallback: bool = False) -> bool: + definition = self.get(var) + if definition is None: + return fallback + return CMakeCache._is_truthy(definition.value) + + @staticmethod + def _is_truthy(value: Optional[str]) -> bool: + if (value is None) or (value.lower().strip() in _FALSE_VALUES): + return False + return True + + @staticmethod + def read_cmake_cache(cache_path: str) -> Dict[str, CacheValue]: + result = {} + with open(cache_path, "r") as cache_file: + for line in cache_file: + line = line.strip() + if "=" in line: + key, value = line.split("=", 1) + value_type = "" + if ":" in key: + key, value_type = key.split(":") + result[key.strip()] = CacheValue( + value_type=value_type, + value=value.strip(), + ) + return result diff --git a/tools/cmake/common/__init__.py b/tools/cmake/common/__init__.py index 5edaa5cfeaa..ed02b39e44d 100644 --- a/tools/cmake/common/__init__.py +++ b/tools/cmake/common/__init__.py @@ -9,10 +9,11 @@ import subprocess import tempfile import unittest -from dataclasses import dataclass from functools import cache from typing import Any, Dict, List, Optional +from tools.cmake.cmake_cache import CMakeCache + # Files to copy from this directory into the temporary workspaces. TESTABLE_CMAKE_FILES = [ "preset.cmake", @@ -67,31 +68,6 @@ def _create_file_tree(tree: Dict[Any, Any], cwd: str) -> None: raise AssertionError("invalid tree value", value) -@dataclass -class _CacheValue: - value_type: str - value: str - - -# Get the key/value pair listed in a CMakeCache.txt file. -@cache -def _list_cmake_cache(cache_path: str) -> Dict[str, _CacheValue]: - result = {} - with open(cache_path, "r") as cache_file: - for line in cache_file: - line = line.strip() - if "=" in line: - key, value = line.split("=", 1) - value_type = "" - if ":" in key: - key, value_type = key.split(":") - result[key.strip()] = _CacheValue( - value_type=value_type, - value=value.strip(), - ) - return result - - class CMakeTestCase(unittest.TestCase): def tearDown(self) -> None: @@ -157,12 +133,10 @@ def assert_cmake_cache( expected_value: str, expected_type: str, ): - cache = _list_cmake_cache( - os.path.join(self.workspace, "cmake-out", "CMakeCache.txt") - ) + cache = CMakeCache(os.path.join(self.workspace, "cmake-out", "CMakeCache.txt")) self.assertEqual( - cache[key].value, expected_value, f"unexpected value for {key}" + cache.get(key).value, expected_value, f"unexpected value for {key}" ) self.assertEqual( - cache[key].value_type, expected_type, f"unexpected value type for {key}" + cache.get(key).value_type, expected_type, f"unexpected value type for {key}" ) diff --git a/tools/cmake/common/preset.cmake b/tools/cmake/common/preset.cmake index 796f4615681..96706380e2c 100644 --- a/tools/cmake/common/preset.cmake +++ b/tools/cmake/common/preset.cmake @@ -39,7 +39,7 @@ function(print_configured_options) endif() endforeach() - message(STATUS "--- Configurated Options ---\n") + message(STATUS "--- Configured Options ---\n") foreach(_option IN LISTS _options) string(LENGTH "${_option}" _option_length) math(EXPR num_spaces "${_longest_name_length} - ${_option_length}") @@ -54,7 +54,7 @@ function(print_configured_options) message(STATUS "${_option}${padding} x (unset)") endif() endforeach() - message(STATUS "---------------------------") + message(STATUS "--------------------------") endfunction() diff --git a/tools/cmake/common/preset_test.py b/tools/cmake/common/preset_test.py index ccb3efb2f88..a3d27e87946 100644 --- a/tools/cmake/common/preset_test.py +++ b/tools/cmake/common/preset_test.py @@ -8,8 +8,6 @@ from tools.cmake.common import CMakeTestCase, TESTABLE_CMAKE_FILES -from . import _list_cmake_cache - class TestPreset(CMakeTestCase): @@ -224,9 +222,6 @@ def test_define_overridable_option_override_existing_cache_with_cli(self): self.run_cmake() self.assert_cmake_cache("EXECUTORCH_TEST_MESSAGE", "default value", "STRING") - # Since we cache the get operations, clear it so that it's read again for tests. - _list_cmake_cache.cache_clear() - self.run_cmake(cmake_args=["-DEXECUTORCH_TEST_MESSAGE='cli value'"]) self.assert_cmake_cache("EXECUTORCH_TEST_MESSAGE", "cli value", "STRING") diff --git a/tools/cmake/preset/pybind.cmake b/tools/cmake/preset/pybind.cmake index 4795ae60c2c..7124a23a9c9 100644 --- a/tools/cmake/preset/pybind.cmake +++ b/tools/cmake/preset/pybind.cmake @@ -17,6 +17,7 @@ set_overridable_option(EXECUTORCH_BUILD_EXTENSION_TENSOR ON) set_overridable_option(EXECUTORCH_BUILD_EXTENSION_RUNNER_UTIL ON) set_overridable_option(EXECUTORCH_BUILD_KERNELS_CUSTOM ON) set_overridable_option(EXECUTORCH_BUILD_KERNELS_CUSTOM_AOT ON) +set_overridable_option(EXECUTORCH_BUILD_EXTENSION_FLAT_TENSOR ON) if(CMAKE_SYSTEM_NAME STREQUAL "Darwin")