diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py index ba3f1d36e4ed..2d88b4aaeb13 100644 --- a/mesonbuild/backend/ninjabackend.py +++ b/mesonbuild/backend/ninjabackend.py @@ -24,6 +24,7 @@ from .. import build from .. import mlog from .. import compilers +from ..compilers.cpp import CPPCompiler from .. import tooldetect from ..arglist import CompilerArgs from ..compilers import Compiler, is_library @@ -48,7 +49,6 @@ from ..compilers.rust import RustCompiler from ..mesonlib import FileOrString from .backends import TargetIntrospectionData - CommandArgOrStr = T.List[T.Union['NinjaCommandArg', str]] RUST_EDITIONS = Literal['2015', '2018', '2021'] @@ -493,6 +493,8 @@ def __init__(self, build: T.Optional[build.Build]): self.implicit_meson_outs: T.List[str] = [] self._uses_dyndeps = False self._generated_header_cache: T.Dict[str, T.List[FileOrString]] = {} + self._first_deps_dd_rule_generated = False + self._all_scan_sources = [] # nvcc chokes on thin archives: # nvlink fatal : Could not open input file 'libfoo.a.p' # nvlink fatal : elfLink internal error @@ -624,10 +626,7 @@ def generate(self, capture: bool = False, vslite_ctx: T.Optional[T.Dict] = None) num_pools = self.environment.coredata.optstore.get_value_for('backend_max_links') if num_pools > 0: - outfile.write(f'''pool link_pool - depth = {num_pools} - -''') + outfile.write(f'pool link_pool\n depth = {num_pools}\n\n') with self.detect_vs_dep_prefix(tempfilename) as outfile: self.generate_rules() @@ -645,6 +644,7 @@ def generate(self, capture: bool = False, vslite_ctx: T.Optional[T.Dict] = None) for t in ProgressBar(self.build.get_targets().values(), desc='Generating targets'): self.generate_target(t) + self.generate_global_dependency_scan_target() mlog.log_timestamp("Targets generated") self.add_build_comment(NinjaComment('Test rules')) self.generate_tests() @@ -1089,9 +1089,6 @@ def generate_target(self, target: T.Union[build.BuildTarget, build.CustomTarget, final_obj_list = self.generate_prelink(target, obj_list) else: final_obj_list = obj_list - - self.generate_dependency_scan_target(target, compiled_sources, source2object, fortran_order_deps) - if target.uses_rust(): self.generate_rust_target(target, outname, final_obj_list, fortran_order_deps) return @@ -1112,10 +1109,14 @@ def should_use_dyndeps_for_target(self, target: 'build.BuildTarget') -> bool: return True if 'cpp' not in target.compilers: return False - if '-fmodules-ts' in target.extra_args['cpp']: + if '-fmodules' in target.extra_args['cpp']: return True # Currently only the preview version of Visual Studio is supported. cpp = target.compilers['cpp'] + if cpp.get_id() == 'clang': + if not mesonlib.version_compare(cpp.version, '>=17'): + raise MesonException('C++20 modules require Clang 17 or newer.') + return True if cpp.get_id() != 'msvc': return False cppversion = self.get_target_option(target, OptionKey('cpp_std', @@ -1136,47 +1137,31 @@ def generate_dependency_scan_target(self, target: build.BuildTarget, if not self.should_use_dyndeps_for_target(target): return self._uses_dyndeps = True - json_file, depscan_file = self.get_dep_scan_file_for(target) - pickle_base = target.name + '.dat' - pickle_file = os.path.join(self.get_target_private_dir(target), pickle_base).replace('\\', '/') - pickle_abs = os.path.join(self.get_target_private_dir_abs(target), pickle_base).replace('\\', '/') - rule_name = 'depscan' - scan_sources = list(self.select_sources_to_scan(compiled_sources)) - - scaninfo = TargetDependencyScannerInfo( - self.get_target_private_dir(target), source2object, scan_sources) - - write = True - if os.path.exists(pickle_abs): - with open(pickle_abs, 'rb') as p: - old = pickle.load(p) - write = old != scaninfo - - if write: - with open(pickle_abs, 'wb') as p: - pickle.dump(scaninfo, p) - - elem = NinjaBuildElement(self.all_outputs, json_file, rule_name, pickle_file) - # A full dependency is required on all scanned sources, if any of them - # are updated we need to rescan, as they may have changed the modules - # they use or export. - for s in scan_sources: - elem.deps.add(s[0]) - elem.orderdeps.update(object_deps) - elem.add_item('name', target.name) - self.add_build(elem) - - infiles: T.Set[str] = set() - for t in target.get_all_linked_targets(): - if self.should_use_dyndeps_for_target(t): - infiles.add(self.get_dep_scan_file_for(t)[0]) - _, od = self.flatten_object_list(target) - infiles.update({self.get_dep_scan_file_for(t)[0] for t in od if t.uses_fortran()}) - - elem = NinjaBuildElement(self.all_outputs, depscan_file, 'depaccumulate', [json_file] + sorted(infiles)) - elem.add_item('name', target.name) - self.add_build(elem) + if not self._first_deps_dd_rule_generated: + self._first_deps_dd_rule_generated = True + self.generate_project_wide_cpp_scanner_rules() + rule_name = 'depscanaccumulate' + elem = NinjaBuildElement(self.all_outputs, "deps.dd", rule_name, "compile_commands.json") + self.add_build(elem) + def generate_project_wide_cpp_scanner_rules(self) -> None: + rulename = 'depscanaccumulate' + if rulename in self.ruledict: + # Scanning command is the same for native and cross compilation. + return + command = self.environment.get_build_command() + \ + ['--internal', 'depscanaccumulate'] + args = ['$in', 'deps.json', '$out'] + description = 'Scanning project for modules' + rule = NinjaRule(rulename, command, args, description) + self.add_rule(rule) + def generate_global_dependency_scan_target(self) -> None: + self._uses_dyndeps = True + self.generate_project_wide_cpp_scanner_rules() + rule_name = 'depscanaccumulate' + elem = NinjaBuildElement(self.all_outputs, "deps.dd", rule_name, "compile_commands.json") + elem.add_dep(self._all_scan_sources) + self.add_build(elem) def select_sources_to_scan(self, compiled_sources: T.List[str], ) -> T.Iterable[T.Tuple[str, Literal['cpp', 'fortran']]]: # in practice pick up C++ and Fortran files. If some other language @@ -2712,21 +2697,7 @@ def generate_scanner_rules(self) -> None: if rulename in self.ruledict: # Scanning command is the same for native and cross compilation. return - - command = self.environment.get_build_command() + \ - ['--internal', 'depscan'] - args = ['$picklefile', '$out', '$in'] - description = 'Scanning target $name for modules' - rule = NinjaRule(rulename, command, args, description) - self.add_rule(rule) - - rulename = 'depaccumulate' - command = self.environment.get_build_command() + \ - ['--internal', 'depaccumulate'] - args = ['$out', '$in'] - description = 'Generating dynamic dependency information for target $name' - rule = NinjaRule(rulename, command, args, description) - self.add_rule(rule) + self.generate_project_wide_cpp_scanner_rules() def generate_compile_rules(self) -> None: for for_machine in MachineChoice: @@ -3121,7 +3092,12 @@ def generate_common_compile_args_per_src_type(self, target: build.BuildTarget) - src_type_to_args[src_type_str] = commands.to_native() return src_type_to_args - + def _get_cpp_module_output_name(self, src_basename: str, + compiler: CPPCompiler, + target: build.BuildTarget): + if not src_basename.endswith('.cppm'): + return 'dummy' + return src_basename.replace('.cppm', '.pcm') def generate_single_compile(self, target: build.BuildTarget, src, is_generated: bool = False, header_deps=None, order_deps: T.Optional[T.List[FileOrString]] = None, @@ -3270,6 +3246,16 @@ def quote_make_target(targetName: str) -> str: result += c return result element.add_item('CUDA_ESCAPED_TARGET', quote_make_target(rel_obj)) + if self.should_use_dyndeps_for_target(target) and compiler.get_language() == 'cpp' and compiler.get_id() == 'clang': + src_basename = os.path.basename(src.fname) + mod_output = self._get_cpp_module_output_name(src_basename, compiler, target) + build_dir = self.environment.get_build_dir() + commands.extend([ + '--start-no-unused-arguments', + f'-fmodule-output={mod_output}', + f'-fprebuilt-module-path={build_dir}', + '--end-no-unused-arguments' + ]) element.add_item('ARGS', commands) self.add_dependency_scanner_entries_to_element(target, compiler, element, src) @@ -3288,7 +3274,7 @@ def add_dependency_scanner_entries_to_element(self, target: build.BuildTarget, c extension = extension.lower() if not (extension in compilers.lang_suffixes['fortran'] or extension in compilers.lang_suffixes['cpp']): return - dep_scan_file = self.get_dep_scan_file_for(target)[1] + dep_scan_file = 'deps.dd' element.add_item('dyndep', dep_scan_file) element.add_orderdep(dep_scan_file) diff --git a/mesonbuild/build.py b/mesonbuild/build.py index 777eca164d97..db9f86e36267 100644 --- a/mesonbuild/build.py +++ b/mesonbuild/build.py @@ -1745,7 +1745,8 @@ def get_used_stdlib_args(self, link_language: str) -> T.List[str]: # subproject stdlib_args.extend(all_compilers[dl].language_stdlib_only_link_flags(self.environment)) return stdlib_args - + def uses_cpp(self) -> bool: + return 'cpp' in self.compilers def uses_rust(self) -> bool: return 'rust' in self.compilers diff --git a/mesonbuild/compilers/cpp.py b/mesonbuild/compilers/cpp.py index 91a1f04ab999..dafa5a6f4f92 100644 --- a/mesonbuild/compilers/cpp.py +++ b/mesonbuild/compilers/cpp.py @@ -4,13 +4,16 @@ from __future__ import annotations import functools +import os import os.path +import re import typing as T +import json from .. import options from .. import mlog -from ..mesonlib import MesonException, version_compare - +from ..mesonlib import (File, MesonException, MesonBugException, Popen_safe_logged, + version_compare) from .compilers import ( gnu_winlibs, msvc_winlibs, @@ -89,7 +92,60 @@ def get_no_stdlib_link_args(self) -> T.List[str]: def sanity_check(self, work_dir: str, environment: 'Environment') -> None: code = 'class breakCCompiler;int main(void) { return 0; }\n' - return self._sanity_check_impl(work_dir, environment, 'sanitycheckcpp.cc', code) + self._sanity_check_impl(work_dir, environment, 'sanitycheckcpp.cc', code) + if environment.coredata.optstore.get_value_for('cpp_import_std'): + self._import_cpp_std_sanity_check(work_dir, environment) + + def compile_import_std_module(self, + env: 'Environment', + code: File): + cpp_std = env.coredata.optstore.get_value_for('cpp_std') + srcname = code.fname + # Construct the compiler command-line + commands = self.compiler_args() + commands.append(f"-std={cpp_std}") + commands.extend(['-Wno-reserved-identifier', '-Wno-reserved-module-identifier']) + commands.append("--precompile") + + all_lists_to_add = [self.get_always_args(), self.get_debug_args(env.coredata.optstore.get_value_for('buildtype') == 'debug'), + self.get_assert_args(disable=env.coredata.optstore.get_value_for('b_ndebug') in ['if-release', 'true'], + env=env)] + for args_list in all_lists_to_add: + for arg in args_list: + commands.append(arg) + commands.append(srcname) + tmpdirname = env.build_dir + + # Preprocess mode outputs to stdout, so no output args + print(f"***{self.get_exelist()}") + output = f'std{self.get_cpp20_module_bmi_extension()}' + commands += self.get_output_args(output) + no_ccache = True + os_env = os.environ.copy() + os_env['LC_ALL'] = 'C' + os_env['CCACHE_DISABLE'] = '1' + command_list = self.get_exelist(ccache=not no_ccache) + commands.to_native() + p, stdo, stde = Popen_safe_logged(command_list, msg="Command line for compiling 'import std' feature", cwd=tmpdirname, env=os_env) + if p.returncode != 0: + raise MesonException("Could not compile library for use with 'import std'") + + def get_import_std_lib_source_args(self, env: Environment) -> T.List[str]: + raise MesonException("Your compiler does not support 'import std' feature or it has not been implemented") + + def get_import_std_lib_source_file(self, env: Environment) -> str: + raise MesonException("Your compiler does not support 'import std' feature or it has not been implemented") + + def get_cpp20_module_bmi_extension(self) -> str: + raise MesonException("Your compiler does not support 'import std' feature or it has not been implemented") + + def get_import_std_compile_args(self, environment: 'Environment') -> T.List[str]: + raise MesonException("Your compiler does not support 'import std' feature or it has not been implemented") + + def check_cpp_import_std_support(self): + raise MesonException("Your compiler does not support 'import std' feature or it has not been implemented") + + def _import_cpp_std_sanity_check(self, work_dir: str, environment: 'Environment') -> None: + self.check_cpp_import_std_support() def get_compiler_check_args(self, mode: CompileCheckMode) -> T.List[str]: # -fpermissive allows non-conforming code to compile which is necessary @@ -175,8 +231,10 @@ def _find_best_cpp_std(self, cpp_std: str) -> str: def get_options(self) -> 'MutableKeyedOptionDictType': opts = super().get_options() key = self.form_compileropt_key('std') + import_std_key = self.form_compileropt_key('import_std') opts.update({ key: options.UserStdOption('cpp', ALL_STDS), + import_std_key: options.UseImportStd('cpp') }) return opts @@ -236,6 +294,59 @@ def __init__(self, ccache: T.List[str], exelist: T.List[str], version: str, for_ '3': default_warn_args + ['-Wextra', '-Wpedantic'], 'everything': ['-Weverything']} + def check_cpp_import_std_support(self): + if int(self.version.split('.')[0]) < 17: + raise MesonException('Your compiler does not support import std feature. Clang support starts at version >= 17') + + def get_import_std_compile_args(self, env: 'Environment') -> T.List[str]: + bmi_path = f'{env.get_build_dir()}/std{self.get_cpp20_module_bmi_extension()}' + return [f'-fmodule-file=std={bmi_path}'] + + def get_cpp20_module_bmi_extension(self) -> str: + return '.pcm' + + def get_import_std_lib_source_args(self, env: Environment) -> T.List[str]: + cpp_std = env.coredata.optstore.get_value_for('cpp_std') + args = [f'-std={cpp_std}', + '-Wno-reserved-identifier', + '-Wno-reserved-module-identifier', + '--precompile'] + + # Add external compile args (includes) + cpp_compile_args = env.coredata.get_external_args(self.for_machine, self.language) + args.extend(cpp_compile_args) + + # Add external link args (library paths) + cpp_link_args = env.coredata.get_external_link_args(self.for_machine, self.language) + args.extend(cpp_link_args) + return args + + + llvm_dir_re = re.compile(r'(/\D*/(?:\.?\d+)+)/.*') + + def get_import_std_lib_source_file(self, env: Environment) -> str: + # Get user-provided link args (these should override compiler defaults) + cpp_link_args = env.coredata.get_external_link_args(self.for_machine, self.language) + link_dirs = [] + for arg in cpp_link_args: + if arg.startswith('-L'): + lib_path = arg[2:] + link_dirs.append(lib_path) + link_dirs.extend(self.get_library_dirs(env)) + for link_dir in link_dirs: + modules_json_path = os.path.join(link_dir, 'libc++.modules.json') + if os.path.exists(modules_json_path): + with open(modules_json_path, 'r') as f: + modules_data = json.load(f) + for module in modules_data.get('modules', []): + if module.get('logical-name') == 'std': + source_path = module.get('source-path') + if source_path: + abs_path = os.path.normpath(os.path.join(link_dir, source_path)) + if os.path.exists(abs_path): + return abs_path + raise MesonBugException('Could not find libc++.modules.json or std.cppm in link directories') + def get_options(self) -> 'MutableKeyedOptionDictType': opts = super().get_options() diff --git a/mesonbuild/interpreter/interpreter.py b/mesonbuild/interpreter/interpreter.py index 0f22c77a511e..477a2a4abee7 100644 --- a/mesonbuild/interpreter/interpreter.py +++ b/mesonbuild/interpreter/interpreter.py @@ -289,6 +289,53 @@ def __init__( self.compilers: PerMachine[T.Dict[str, 'compilers.Compiler']] = PerMachine({}, {}) self.parse_project() self._redetect_machines() + self._cpp_import_std_bmi_dep = None + if self.coredata.optstore.get_value_for('cpp_import_std') and self.subproject == "": + self._cpp_import_std_bmi_dep = self._create_cpp_import_std_dep(self.environment) + + def _create_cpp_import_std_dep(self, env: environment.Environment): + compiler_to_use: T.Optional[compilers.cpp.CPPCompiler] = None + for comp_lang, compiler in self.compilers.host.items(): + if comp_lang == 'cpp': + compiler_to_use = T.cast(compilers.cpp.CPPCompiler, compiler) + if not compiler_to_use: + raise MesonException('cpp_import_std option is set to true but no cpp compiler could be found.' + ' Enable cpp language in your project to use this feature.') + # Construct the compiler command-line + commands = compiler_to_use.compiler_args() + commands.extend(compiler_to_use.get_import_std_lib_source_args(self.environment)) + all_lists_to_add = [compiler_to_use.get_always_args(), compiler_to_use.get_debug_args(env.coredata.optstore.get_value_for('buildtype') == 'debug'), + compiler_to_use.get_assert_args(disable=env.coredata.optstore.get_value_for('b_ndebug') in ['if-release', 'true'], + env=env)] + for args_list in all_lists_to_add: + for arg in args_list: + commands.append(arg) + commands.append("-o") + commands.append("@OUTPUT@") + commands.append("@INPUT@") + no_ccache = True + command_list = compiler_to_use.get_exelist(ccache=not no_ccache) + commands.to_native() + tgt = build.CustomTarget('', + '', '', self.environment, command_list, + sources=[compiler_to_use.get_import_std_lib_source_file(self.environment)], + outputs=[f'std{compiler_to_use.get_cpp20_module_bmi_extension()}']) + self.add_target('_cpp_import_std_bmi', tgt) + bmi_dep = dependencies.InternalDependency( + version='0.0', + incdirs=[], + compile_args=compiler_to_use.get_import_std_compile_args(self.environment), + # compile_args=[], + link_args=[], + libraries=[], + whole_libraries=[], + sources=[tgt], + extra_files=[], + ext_deps=[], + variables=[], + d_module_versions=[], + d_import_dirs=[], + objects=[]) + return bmi_dep def __getnewargs_ex__(self) -> T.Tuple[T.Tuple[object], T.Dict[str, object]]: raise MesonBugException('This class is unpicklable') @@ -3469,6 +3516,9 @@ def build_target(self, node: mparser.BaseNode, args: T.Tuple[str, SourcesVarargs target = targetclass(name, self.subdir, self.subproject, for_machine, srcs, struct, objs, self.environment, self.compilers[for_machine], kwargs) + if target.uses_cpp(): + if self.coredata.optstore.get_value_for('cpp_import_std') and self.subproject == '': + target.add_deps([self._cpp_import_std_bmi_dep]) if objs and target.uses_rust(): FeatureNew.single_use('objects in Rust targets', '1.8.0', self.subproject) diff --git a/mesonbuild/options.py b/mesonbuild/options.py index 196bb6cb53de..9bb633584588 100644 --- a/mesonbuild/options.py +++ b/mesonbuild/options.py @@ -580,6 +580,11 @@ def choices_are_different(a: _U, b: _U) -> bool: return False +class UseImportStd(UserBooleanOption): + def __init__(self, lang): + self.lang = lang.lower() + opt_name =f'{self.lang}_import_std' + super().__init__(opt_name, 'Whether to use import std; module in your targets', False) class UserStdOption(UserComboOption): ''' diff --git a/mesonbuild/scripts/depscan.py b/mesonbuild/scripts/depscan.py index 6bd5cde9aac0..1b03fb196657 100644 --- a/mesonbuild/scripts/depscan.py +++ b/mesonbuild/scripts/depscan.py @@ -200,9 +200,35 @@ def scan(self) -> int: json.dump(description, f) return 0 - +class CppDependenciesScanner: + pass + +class ClangDependencyScanner(CppDependenciesScanner): + def __init__(self, compilation_db_file, json_output_file, dd_output_file=None): + self.compilation_db_file = compilation_db_file + self.json_output_file = json_output_file + self.dd_output_file = dd_output_file + + def scan(self): + try: + result = sp.run( + ["clang-scan-deps", + "-format=p1689", + "-compilation-database", self.compilation_db_file], + capture_output=True, + check=True + ) + print(result.stdout) + return 0 + except sp.SubprocessError: + return 1 + except sp.TimeoutExpired: + return 2 def run(args: T.List[str]) -> int: - assert len(args) == 2, 'got wrong number of arguments!' - outfile, pickle_file = args - scanner = DependencyScanner(pickle_file, outfile) - return scanner.scan() + assert len(args) > 2, 'At least and arguments' + comp_db, json_output, dd_output = args + ClangDependencyScanner(compilation_db_file, output_file) + # assert len(args) == 2, 'got wrong number of arguments!' + # outfile, pickle_file = args + # scanner = DependencyScanner(pickle_file, outfile) + # return scanner.scan() diff --git a/mesonbuild/scripts/depscanaccumulate.py b/mesonbuild/scripts/depscanaccumulate.py new file mode 100644 index 000000000000..f2ebef23b906 --- /dev/null +++ b/mesonbuild/scripts/depscanaccumulate.py @@ -0,0 +1,119 @@ +#!/usr/bin/env python3 + +from collections import defaultdict +from dataclasses import dataclass +import json +import subprocess as sp +import sys +import typing as T + +ModuleName: T.TypeAlias = str +ObjectFile: T.TypeAlias = str + + +@dataclass(frozen=True) +class ModuleProviderInfo: + logical_name: ModuleName + source_path: str + is_interface: bool = False + + +class CppDependenciesScanner: + pass + + +def normalize_filename(fname): + return fname.replace(':', '-') + + +class DynDepRule: + def __init__(self, out: str, imp_outs: T.Optional[T.List[str]], imp_ins: T.List[str]): + self.output = [f'build {out}'] + if imp_outs: + imp_out_str = " ".join([normalize_filename(o) for o in imp_outs]) + self.output.append(f" | {imp_out_str}") + self.output.append(": dyndep") + if imp_ins: + imp_ins_str = " ".join([normalize_filename(inf) for inf in imp_ins]) + self.output.append(" | " + imp_ins_str) + self.output_str = "".join(self.output) + "\n" + + def __str__(self): + return self.output_str + + +class ClangDependencyScanner(CppDependenciesScanner): + def __init__(self, compilation_db_file, json_output_file, dd_output_file=None): + self.compilation_db_file = compilation_db_file + self.json_output_file = json_output_file + self.dd_output_file = dd_output_file + + def scan(self) -> T.Tuple[T.Mapping[ObjectFile, ModuleName], T.Mapping[ObjectFile, ModuleProviderInfo]]: + try: + result = sp.run( + ["clang-scan-deps", + "-format=p1689", + "-compilation-database", self.compilation_db_file], + capture_output=True, + check=False + ) + + if result.returncode != 0: + print(result.stderr.decode()) + raise sp.SubprocessError("Failed to run clang-scan-deps") + + with open(self.json_output_file, 'wb') as f: + f.write(result.stdout) + + dependencies_info = json.loads(result.stdout) + all_deps_per_objfile = self.generate_dependencies(dependencies_info["rules"]) + self.generate_dd_file(all_deps_per_objfile) + return 0 + + except sp.SubprocessError: + return 1 + except sp.TimeoutExpired: + return 2 + + def generate_dd_file(self, deps_per_object_file): + with open('deps.dd', "w") as f: + f.write('ninja_dyndep_version = 1\n') + for obj, reqprov in deps_per_object_file.items(): + requires, provides = reqprov + dd = DynDepRule( + obj, + [p.logical_name + ".pcm" for p in provides], + [r + '.pcm' for r in requires] + ) + f.write(str(dd)) + + def generate_dependencies(self, rules: T.List): + all_entries: T.Mapping[ObjectFile, T.Tuple[T.Set[ModuleName], T.Set[ModuleProviderInfo]]] = \ + defaultdict(lambda: (set(), set())) + + for r in rules: + obj_processed = r["primary-output"] + all_entries[obj_processed] = (set(), set()) + + for req in r.get("requires", []): + all_entries[obj_processed][0].add(req["logical-name"]) + + for prov in r.get("provides", []): + all_entries[obj_processed][1].add(ModuleProviderInfo( + logical_name=prov["logical-name"], + source_path=prov["source-path"], + is_interface=prov.get('is-interface', False) + )) + + return all_entries + + +def run(args: T.List[str]) -> int: + assert len(args) >= 2, 'At least and arguments required' + comp_db_path, json_output_path, dd_output = args + scanner = ClangDependencyScanner(comp_db_path, json_output_path) + return scanner.scan() + + +if __name__ == '__main__': + run(sys.argv[1:]) diff --git a/test cases/cython/1 basic/cytest.py b/test cases/cython/1 basic/cytest.py index c08ffeed358e..7a2019ba124f 100755 --- a/test cases/cython/1 basic/cytest.py +++ b/test cases/cython/1 basic/cytest.py @@ -4,12 +4,12 @@ s = Storer() -if s.get_value() != 0: +if s.get_value_for() != 0: raise SystemExit('Initial value incorrect.') s.set_value(42) -if s.get_value() != 42: +if s.get_value_for() != 42: raise SystemExit('Setting value failed.') try: diff --git a/test cases/cython/1 basic/libdir/storer.pyx b/test cases/cython/1 basic/libdir/storer.pyx index ed551dc5f66c..9a5f57d9653b 100644 --- a/test cases/cython/1 basic/libdir/storer.pyx +++ b/test cases/cython/1 basic/libdir/storer.pyx @@ -9,7 +9,7 @@ cdef class Storer: def __dealloc__(self): cstorer.storer_destroy(self._c_storer) - cpdef int get_value(self): + cpdef int get_value_for(self): return cstorer.storer_get_value(self._c_storer) cpdef set_value(self, int value): diff --git a/test cases/python/3 cython/cytest.py b/test cases/python/3 cython/cytest.py index c08ffeed358e..7a2019ba124f 100755 --- a/test cases/python/3 cython/cytest.py +++ b/test cases/python/3 cython/cytest.py @@ -4,12 +4,12 @@ s = Storer() -if s.get_value() != 0: +if s.get_value_for() != 0: raise SystemExit('Initial value incorrect.') s.set_value(42) -if s.get_value() != 42: +if s.get_value_for() != 42: raise SystemExit('Setting value failed.') try: diff --git a/test cases/python/3 cython/libdir/storer.pyx b/test cases/python/3 cython/libdir/storer.pyx index ed551dc5f66c..9a5f57d9653b 100644 --- a/test cases/python/3 cython/libdir/storer.pyx +++ b/test cases/python/3 cython/libdir/storer.pyx @@ -9,7 +9,7 @@ cdef class Storer: def __dealloc__(self): cstorer.storer_destroy(self._c_storer) - cpdef int get_value(self): + cpdef int get_value_for(self): return cstorer.storer_get_value(self._c_storer) cpdef set_value(self, int value): diff --git a/test cases/python3/3 cython/cytest.py b/test cases/python3/3 cython/cytest.py index 43443dc66514..9b096ffae7c9 100755 --- a/test cases/python3/3 cython/cytest.py +++ b/test cases/python3/3 cython/cytest.py @@ -5,13 +5,13 @@ s = Storer() -if s.get_value() != 0: +if s.get_value_for() != 0: print('Initial value incorrect.') sys.exit(1) s.set_value(42) -if s.get_value() != 42: +if s.get_value_for() != 42: print('Setting value failed.') sys.exit(1) diff --git a/test cases/python3/3 cython/libdir/storer.pyx b/test cases/python3/3 cython/libdir/storer.pyx index ed551dc5f66c..9a5f57d9653b 100644 --- a/test cases/python3/3 cython/libdir/storer.pyx +++ b/test cases/python3/3 cython/libdir/storer.pyx @@ -9,7 +9,7 @@ cdef class Storer: def __dealloc__(self): cstorer.storer_destroy(self._c_storer) - cpdef int get_value(self): + cpdef int get_value_for(self): return cstorer.storer_get_value(self._c_storer) cpdef set_value(self, int value):