diff --git a/.gitignore b/.gitignore index e04aa0daddfb..b57a31f07cab 100644 --- a/.gitignore +++ b/.gitignore @@ -6,7 +6,7 @@ /.cproject /.idea /.vscode - +./pyrightconfig.json __pycache__ /.coverage/ /.coveragerc diff --git a/ci/ciimage/ubuntu-rolling/install.sh b/ci/ciimage/ubuntu-rolling/install.sh index 3460be48e9f5..43705e489e99 100755 --- a/ci/ciimage/ubuntu-rolling/install.sh +++ b/ci/ciimage/ubuntu-rolling/install.sh @@ -29,6 +29,7 @@ pkgs=( itstool openjdk-11-jre jq + npm ) sed -i '/^Types: deb/s/deb/deb deb-src/' /etc/apt/sources.list.d/ubuntu.sources diff --git a/mesonbuild/backend/ninjabackend.py b/mesonbuild/backend/ninjabackend.py index 3c26b39c93fe..e4b51fc02f77 100644 --- a/mesonbuild/backend/ninjabackend.py +++ b/mesonbuild/backend/ninjabackend.py @@ -15,9 +15,8 @@ import os import pickle import re -import subprocess +import subprocess as sp import typing as T - from . import backends from .. import modules from .. import mesonlib @@ -36,6 +35,7 @@ from ..options import OptionKey from .backends import CleanTrees from ..build import GeneratedList, InvalidArguments +import shutil if T.TYPE_CHECKING: from typing_extensions import Literal @@ -508,7 +508,10 @@ def __init__(self, build: T.Optional[build.Build]): # - https://github.com/mesonbuild/meson/pull/9453 # - https://github.com/mesonbuild/meson/issues/9479#issuecomment-953485040 self.allow_thin_archives = PerMachine[bool](True, True) + self._first_deps_dd_rule_generated = False + self._all_scan_sources = [] self.import_std: T.Optional[ImportStdInfo] = None + self._import_std_warning_shown = False def create_phony_target(self, dummy_outfile: str, rulename: str, phony_infilename: str) -> NinjaBuildElement: ''' @@ -558,10 +561,16 @@ def detect_vs_dep_prefix(self, tempfilename: str) -> T.TextIO: # and locale dependent. Any attempt at converting it to # Python strings leads to failure. We _must_ do this detection # in raw byte mode and write the result in raw bytes. - pc = subprocess.Popen(compiler.get_exelist() + - ['/showIncludes', '/c', filebase], - cwd=self.environment.get_scratch_dir(), - stdout=subprocess.PIPE, stderr=subprocess.PIPE) + pc = sp.Popen( + compiler.get_exelist() + [ + '/showIncludes', + '/c', + filebase, + ], + cwd=self.environment.get_scratch_dir(), + stdout=sp.PIPE, + stderr=sp.PIPE, + ) (stdout, stderr) = pc.communicate() # We want to match 'Note: including file: ' in the line @@ -654,6 +663,7 @@ def generate(self, capture: bool = False, vslite_ctx: T.Optional[T.Dict] = None) for t in ProgressBar(self.build.get_targets().values(), desc='Generating targets'): self.generate_target(t) + self.generate_global_dependency_scan_target() mlog.log_timestamp("Targets generated") self.add_build_comment(NinjaComment('Test rules')) self.generate_tests() @@ -695,8 +705,8 @@ def generate(self, capture: bool = False, vslite_ctx: T.Optional[T.Dict] = None) if ((mesonlib.version_compare(self.ninja_version, '>= 1.12.0') or (mesonlib.version_compare(self.ninja_version, '>=1.10.0') and not self._uses_dyndeps)) and os.path.exists(os.path.join(self.environment.build_dir, '.ninja_log'))): - subprocess.call(self.ninja_command + ['-t', 'restat'], cwd=self.environment.build_dir) - subprocess.call(self.ninja_command + ['-t', 'cleandead'], cwd=self.environment.build_dir) + sp.call(self.ninja_command + ['-t', 'restat'], cwd=self.environment.build_dir) + sp.call(self.ninja_command + ['-t', 'cleandead'], cwd=self.environment.build_dir) self.generate_compdb() self.generate_rust_project_json() @@ -738,9 +748,17 @@ def generate_compdb(self) -> None: ninja_compdb = self.ninja_command + ['-t', 'compdb'] + compdb_options + rules builddir = self.environment.get_build_dir() try: - jsondb = subprocess.check_output(ninja_compdb, cwd=builddir) - with open(os.path.join(builddir, 'compile_commands.json'), 'wb') as f: - f.write(jsondb) + jsondb = sp.check_output(ninja_compdb, cwd=builddir) + jsondb_path = os.path.join(builddir, 'compile_commands.json') + existing = None + try: + with open(jsondb_path, 'rb') as f: + existing = f.read() + except FileNotFoundError: + pass + if existing != jsondb: + with open(jsondb_path, 'wb') as f: + f.write(jsondb) except Exception: mlog.warning('Could not create compilation database.', fatal=False) @@ -1097,7 +1115,8 @@ def generate_target(self, target: T.Union[build.BuildTarget, build.CustomTarget, final_obj_list = self.generate_prelink(target, obj_list) else: final_obj_list = obj_list - + if self.should_use_dyndeps_for_target(target): + self._all_scan_sources.extend(compiled_sources) self.generate_dependency_scan_target(target, compiled_sources, source2object, fortran_order_deps) if isinstance(target, build.SharedLibrary): @@ -1119,6 +1138,8 @@ def generate_target(self, target: T.Union[build.BuildTarget, build.CustomTarget, def should_use_dyndeps_for_target(self, target: 'build.BuildTarget') -> bool: if not self.ninja_has_dyndeps: return False + if target.has_pch(): + return False if 'fortran' in target.compilers: return True if 'cpp' not in target.compilers: @@ -1128,6 +1149,8 @@ def should_use_dyndeps_for_target(self, target: 'build.BuildTarget') -> bool: return True # Currently only the preview version of Visual Studio is supported. cpp = target.compilers['cpp'] + if cpp.get_id() == 'clang' and mesonlib.version_compare(cpp.version, '>=20.0.0'): + return True if cpp.get_id() != 'msvc': return False cppversion = self.get_target_option(target, OptionKey('cpp_std', @@ -1141,6 +1164,37 @@ def should_use_dyndeps_for_target(self, target: 'build.BuildTarget') -> bool: return False return True + def generate_project_wide_cpp_scanner_rules(self) -> None: + rulename = 'depscanaccumulate' + if rulename in self.ruledict: + return + command = self.environment.get_build_command() + \ + ['--internal', 'depscanaccumulate'] + try: + cpp_compiler = self.environment.coredata.compilers[MachineChoice.HOST]['cpp'] + cpp_exe = cpp_compiler.get_exelist()[0] + except KeyError: + cpp_exe = 'clang++' + args = ['$in', 'deps.json', '$out', cpp_exe] + description = 'Scanning project for modules' + rule = NinjaRule(rulename, command, args, description) + self.add_rule(rule) + rulename = 'cpp_module_precompile' + command = ['$COMPILER', '--precompile', '-x', 'c++-module', '$in', '-o', '$out'] + args = ['$ARGS'] + description = 'Precompiling C++ module $in' + rule = NinjaRule(rulename, command, args, description) + self.add_rule(rule) + + def generate_global_dependency_scan_target(self) -> None: + self._uses_dyndeps = True + self.generate_project_wide_cpp_scanner_rules() + rule_name = 'depscanaccumulate' + elem = NinjaBuildElement(self.all_outputs, "deps.dd", rule_name, "compile_commands.json") + elem.add_dep(self._all_scan_sources) + elem.add_item('restat', '1') + self.add_build(elem) + def generate_dependency_scan_target(self, target: build.BuildTarget, compiled_sources: T.List[str], source2object: T.Dict[str, str], @@ -1148,43 +1202,38 @@ def generate_dependency_scan_target(self, target: build.BuildTarget, if not self.should_use_dyndeps_for_target(target): return self._uses_dyndeps = True + if 'cpp' in target.compilers and target.compilers['cpp'].get_id() == 'clang' and mesonlib.version_compare(target.compilers['cpp'].version, '>=20.0.0'): + self._all_scan_sources.extend(compiled_sources) + return + # Fortran per-target path json_file, depscan_file = self.get_dep_scan_file_for(target) pickle_base = target.name + '.dat' pickle_file = os.path.join(self.get_target_private_dir(target), pickle_base).replace('\\', '/') pickle_abs = os.path.join(self.get_target_private_dir_abs(target), pickle_base).replace('\\', '/') rule_name = 'depscan' scan_sources = list(self.select_sources_to_scan(compiled_sources)) - scaninfo = TargetDependencyScannerInfo( self.get_target_private_dir(target), source2object, scan_sources) - write = True if os.path.exists(pickle_abs): with open(pickle_abs, 'rb') as p: old = pickle.load(p) write = old != scaninfo - if write: with open(pickle_abs, 'wb') as p: pickle.dump(scaninfo, p) - elem = NinjaBuildElement(self.all_outputs, json_file, rule_name, pickle_file) - # A full dependency is required on all scanned sources, if any of them - # are updated we need to rescan, as they may have changed the modules - # they use or export. for s in scan_sources: elem.deps.add(s[0]) elem.orderdeps.update(object_deps) elem.add_item('name', target.name) self.add_build(elem) - infiles: T.Set[str] = set() for t in target.get_all_linked_targets(): if self.should_use_dyndeps_for_target(t): infiles.add(self.get_dep_scan_file_for(t)[0]) _, od = self.flatten_object_list(target) infiles.update({self.get_dep_scan_file_for(t)[0] for t in od if t.uses_fortran()}) - elem = NinjaBuildElement(self.all_outputs, depscan_file, 'depaccumulate', [json_file] + sorted(infiles)) elem.add_item('name', target.name) self.add_build(elem) @@ -2748,16 +2797,13 @@ def generate_pch_rule_for(self, langname: str, compiler: Compiler) -> None: def generate_scanner_rules(self) -> None: rulename = 'depscan' if rulename in self.ruledict: - # Scanning command is the same for native and cross compilation. return - command = self.environment.get_build_command() + \ ['--internal', 'depscan'] args = ['$picklefile', '$out', '$in'] description = 'Scanning target $name for modules' rule = NinjaRule(rulename, command, args, description) self.add_rule(rule) - rulename = 'depaccumulate' command = self.environment.get_build_command() + \ ['--internal', 'depaccumulate'] @@ -2765,6 +2811,7 @@ def generate_scanner_rules(self) -> None: description = 'Generating dynamic dependency information for target $name' rule = NinjaRule(rulename, command, args, description) self.add_rule(rule) + self.generate_project_wide_cpp_scanner_rules() def generate_compile_rules(self) -> None: for for_machine in MachineChoice: @@ -2779,6 +2826,42 @@ def generate_compile_rules(self) -> None: for mode in compiler.get_modes(): self.generate_compile_rule_for(langname, mode) + def _get_cpp_module_output_name(self, src, compiler, target) -> T.Tuple[str, T.List[str]]: + extra_args = target.extra_args.get('cpp', []) + include_dirs = [] + for inc in target.include_dirs: + for inc_dir in inc.incdirs: + include_dirs.append(f'-I{os.path.join(inc.curdir, inc_dir)}') + flags = extra_args + include_dirs + abs_src = src.absolute_path(self.environment.get_source_dir(), self.environment.get_build_dir()) + scan_deps = shutil.which('clang-scan-deps') + if not scan_deps: + for ver in range(25, 14, -1): + scan_deps = shutil.which(f'clang-scan-deps-{ver}') + if scan_deps: + break + if not scan_deps: + raise MesonException('Could not find clang-scan-deps') + cmd = [ + scan_deps, + "-format=p1689", + "--", + compiler.get_exelist()[0], + "-std=c++26", + abs_src, + ] + flags + result = sp.run(cmd, capture_output=True) + if result.returncode != 0: + return 'dummy', [] + info = json.loads(result.stdout) + required_pcms = [] + for rule in info.get("rules", []): + for req in rule.get("requires", []): + required_pcms.append(req["logical-name"] + ".pcm") + for provides in rule.get("provides", []): + return provides["logical-name"] + ".pcm", required_pcms + return 'dummy', [] + def generate_generator_list_rules(self, target) -> None: # CustomTargets have already written their rules and # CustomTargetIndexes don't actually get generated, so write rules for @@ -3297,6 +3380,19 @@ def quote_make_target(targetName: str) -> str: result += c return result element.add_item('CUDA_ESCAPED_TARGET', quote_make_target(rel_obj)) + if self.should_use_dyndeps_for_target(target) and compiler.get_language() == 'cpp' and compiler.get_id() == 'clang': + mod_output, required_pcms = self._get_cpp_module_output_name(src, compiler, target) + if mod_output != 'dummy': + pcm_path = mod_output + precompile_elem = NinjaBuildElement(self.all_outputs, pcm_path, 'cpp_module_precompile', rel_src) + precompile_elem.add_item('COMPILER', compiler.get_exelist()[0]) + precompile_elem.add_item('ARGS', list(commands) + [f'-fprebuilt-module-path={self.environment.get_build_dir()}']) + for req in required_pcms: + precompile_elem.add_orderdep(req) + self.add_build(precompile_elem) + commands.extend([ + f'-fprebuilt-module-path={self.environment.get_build_dir()}' + ]) element.add_item('ARGS', commands) self.add_dependency_scanner_entries_to_element(target, compiler, element, src) @@ -3319,7 +3415,9 @@ def handle_cpp_import_std(self, target: build.BuildTarget, compiler): istd_dep = [] if not self.target_uses_import_std(target): return istd_args, istd_dep - mlog.warning('Import std support is experimental and might break compatibility in the future.') + if not self._import_std_warning_shown: + self._import_std_warning_shown = True + mlog.warning('Import std support is experimental and might break compatibility in the future.') # At the time of writing, all three major compilers work # wildly differently. Keep this isolated here until things # consolidate. @@ -3354,6 +3452,36 @@ def handle_cpp_import_std(self, target: build.BuildTarget, compiler): self.import_std = ImportStdInfo(elem, mod_file, [mod_obj_file]) istd_dep = [File(True, '', self.import_std.gen_module_file)] return istd_args, istd_dep + elif compiler.id == 'clang' and mesonlib.version_compare(compiler.version, '>=20.0.0'): + if self.import_std is None: + mod_file = 'std.pcm' + # Find libc++.modules.json in lib search dirs and resolve std.cppm path + lib_dirs = self.environment.coredata.get_external_link_args(MachineChoice.HOST, 'cpp') + in_file_str = None + for arg in lib_dirs: + if arg.startswith('-L'): + lib_dir = Path(arg[2:]) + modules_json = lib_dir / 'libc++.modules.json' + if modules_json.is_file(): + data = json.loads(modules_json.read_text()) + for mod in data['modules']: + if mod['logical-name'] == 'std': + in_file_str = str((lib_dir / mod['source-path']).resolve()) + break + if in_file_str: + break + if not in_file_str: + raise MesonException('Could not find libc++.modules.json via -L paths in cpp_link_args.') + elem = NinjaBuildElement(self.all_outputs, [mod_file], 'CUSTOM_COMMAND', [in_file_str]) + compile_args = [a for a in self.environment.coredata.get_external_args(MachineChoice.HOST, 'cpp') + if a != '-fmodules'] + compile_args += compiler.get_option_std_args(target, self.environment) + compile_args += ['--precompile', in_file_str, '-o', mod_file] + elem.add_item('COMMAND', compiler.exelist + compile_args) + self.add_build(elem) + self.import_std = ImportStdInfo(elem, mod_file, []) + istd_dep = [File(True, '', self.import_std.gen_module_file)] + return istd_args, istd_dep else: raise MesonException(f'Import std not supported on compiler {compiler.id} yet.') @@ -3367,7 +3495,10 @@ def add_dependency_scanner_entries_to_element(self, target: build.BuildTarget, c extension = extension.lower() if not (extension in compilers.lang_suffixes['fortran'] or extension in compilers.lang_suffixes['cpp']): return - dep_scan_file = self.get_dep_scan_file_for(target)[1] + if extension in compilers.lang_suffixes['cpp'] and compiler.get_id() == 'clang' and mesonlib.version_compare(compiler.version, '>=20.0.0'): + dep_scan_file = 'deps.dd' + else: + dep_scan_file = self.get_dep_scan_file_for(target)[1] element.add_item('dyndep', dep_scan_file) element.add_orderdep(dep_scan_file) @@ -4012,7 +4143,6 @@ def generate_clangtidy(self) -> None: self.generate_clangtool('tidy', 'fix', need_pch=True) def generate_tags(self, tool: str, target_name: str) -> None: - import shutil if not shutil.which(tool): return if target_name in self.all_outputs: diff --git a/mesonbuild/compilers/cpp.py b/mesonbuild/compilers/cpp.py index 5d319f9939eb..6504d8346f4e 100644 --- a/mesonbuild/compilers/cpp.py +++ b/mesonbuild/compilers/cpp.py @@ -179,6 +179,8 @@ def get_options(self) -> 'MutableKeyedOptionDictType': }) return opts + def get_cpp20_module_bmi_extension(self) -> str: + raise MesonException("Your compiler does not support 'import std' feature or it has not been implemented") class _StdCPPLibMixin(CompilerMixinBase): @@ -259,6 +261,13 @@ def get_options(self) -> 'MutableKeyedOptionDictType': self.make_option_name(key), 'Standard Win libraries to link against', gnu_winlibs) + + if version_compare(self.version, '>=17'): + key = self.form_compileropt_key('importstd') + opts[key] = options.UserComboOption(self.make_option_name(key), + 'Use #import std.', + 'false', + choices=['false', 'true']) return opts def get_option_compile_args(self, target: 'BuildTarget', subproject: T.Optional[str] = None) -> T.List[str]: @@ -332,6 +341,8 @@ def get_cpp_modules_args(self) -> T.List[str]: # Although -fmodules-ts is removed in LLVM 17, we keep this in for compatibility with old compilers. return ['-fmodules', '-fmodules-ts'] + def get_cpp20_module_bmi_extension(self) -> str: + return '.pcm' class ArmLtdClangCPPCompiler(ClangCPPCompiler): diff --git a/mesonbuild/scripts/depscanaccumulate.py b/mesonbuild/scripts/depscanaccumulate.py new file mode 100644 index 000000000000..17a2ecea5fa6 --- /dev/null +++ b/mesonbuild/scripts/depscanaccumulate.py @@ -0,0 +1,144 @@ +from __future__ import annotations + +from collections import defaultdict +from dataclasses import dataclass +import json +import os +import subprocess as sp +import sys +import typing as T +import shutil +from ..mesonlib import MesonException + +if sys.version_info >= (3, 10): + ModuleName: T.TypeAlias = str + ObjectFile: T.TypeAlias = str +else: + ModuleName = str + ObjectFile = str + +@dataclass(frozen=True) +class ModuleProviderInfo: + logical_name: ModuleName + source_path: str + is_interface: bool = False + +class CppDependenciesScanner: + pass + +def normalize_filename(fname: str) -> str: + return fname.replace(':', '-') + +class DynDepRule: + def __init__(self, out: str, imp_outs: T.Optional[T.List[str]], imp_ins: T.List[str]) -> None: + self.output = [f'build {out}'] + if imp_outs: + imp_out_str = " ".join([normalize_filename(o) for o in imp_outs]) + self.output.append(f" | {imp_out_str}") + self.output.append(": dyndep") + if imp_ins: + imp_ins_str = " ".join([normalize_filename(inf) for inf in imp_ins]) + self.output.append(" | " + imp_ins_str) + self.output_str = "".join(self.output) + "\n" + + def __str__(self) -> str: + return self.output_str + +class ClangDependencyScanner(CppDependenciesScanner): + def __init__(self, compilation_db_file: str, json_output_file: str, dd_output_file: str = 'deps.dd', cpp_compiler: str = 'clang++') -> None: + self.compilation_db_file = compilation_db_file + self.json_output_file = json_output_file + self.dd_output_file = dd_output_file + which_result = shutil.which(cpp_compiler) + assert which_result is not None, f'Could not find {cpp_compiler} in PATH' + self.scan_deps = shutil.which('clang-scan-deps') + if not self.scan_deps: + for ver in range(25, 14, -1): + found = shutil.which(f'clang-scan-deps-{ver}') + if found: + self.scan_deps = found + break + if not self.scan_deps: + raise MesonException('Could not find clang-scan-deps') + + def scan(self) -> int: + try: + with open(self.compilation_db_file, 'r', encoding='utf-8') as f: + compile_commands = json.load(f) + + cpp_extensions = {'.cpp', '.cc', '.cxx', '.c++', '.cppm', '.C'} + cpp_commands = [cmd for cmd in compile_commands + if os.path.splitext(cmd['file'])[1] in cpp_extensions] + + for cmd in cpp_commands: + args = cmd['command'].split() + filtered_args = [] + skip_next = False + for arg in args: + if skip_next: + skip_next = False + continue + if arg.startswith('-fprebuilt-module-path') or arg.startswith('-include-pch'): + continue + if arg == '-include-pch': + skip_next = True + continue + filtered_args.append(arg) + cmd['command'] = ' '.join(filtered_args) + filtered_db = self.compilation_db_file + '.filtered.json' + with open(filtered_db, 'w', encoding='utf-8') as f: + json.dump(cpp_commands, f) + + r = sp.run( + [ + str(self.scan_deps), + "-format=p1689", + "-compilation-database", + filtered_db, + ], + capture_output=True, + ) + + if r.returncode != 0: + print(r.stderr) + raise sp.SubprocessError("Failed to run command") + with open(self.json_output_file, 'w', encoding='utf-8') as f: + f.write(r.stdout.decode('utf-8')) + dependencies_info = json.loads(r.stdout) + all_deps_per_objfile = self.generate_dependencies(dependencies_info["rules"]) + self.generate_dd_file(all_deps_per_objfile) + return 0 + except sp.SubprocessError: + return 1 + + def generate_dd_file(self, deps_per_object_file: T.Dict[str, T.Tuple[T.Set[str], T.Set[ModuleProviderInfo]]]) -> None: + with open(self.dd_output_file, "w", encoding='utf-8') as f: + f.write('ninja_dyndep_version = 1\n') + for obj, reqprov in deps_per_object_file.items(): + requires, provides = reqprov + dd = DynDepRule(obj, None, [r + '.pcm' for r in requires]) + f.write(str(dd)) + + def generate_dependencies(self, rules: T.List[T.Any]) -> T.Dict[str, T.Tuple[T.Set[str], T.Set[ModuleProviderInfo]]]: + all_entries: T.Dict[str, T.Tuple[T.Set[str], T.Set[ModuleProviderInfo]]] = defaultdict(lambda: (set(), set())) + for r in rules: + obj_processed = r["primary-output"] + all_entries[obj_processed] = (set(), set()) + for req in r.get("requires", []): + all_entries[obj_processed][0].add(req["logical-name"]) + for prov in r.get("provides", []): + all_entries[obj_processed][1].add(ModuleProviderInfo( + logical_name=prov["logical-name"], + source_path=prov["source-path"], + is_interface=prov.get('is-interface', False))) + return all_entries + +def run(args: T.List[str]) -> int: + assert len(args) >= 3, 'Expected [cpp_compiler] arguments' + comp_db_path, json_output_path, dd_output = args[:3] + cpp = args[3] if len(args) > 3 else 'clang++' + scanner = ClangDependencyScanner(comp_db_path, json_output_path, dd_output, cpp) + return scanner.scan() + +if __name__ == '__main__': + run(sys.argv[1:]) diff --git a/test cases/common/283 wrap override/subprojects/subsub.wrap b/test cases/common/283 wrap override/subprojects/subsub.wrap new file mode 100644 index 000000000000..30faa64ca1a6 --- /dev/null +++ b/test cases/common/283 wrap override/subprojects/subsub.wrap @@ -0,0 +1,2 @@ +[wrap-redirect] +filename = sub/subprojects/subsub.wrap diff --git a/test cases/common/98 subproject subdir/subprojects/subsubsub-1.0/.meson-subproject-wrap-hash.txt b/test cases/common/98 subproject subdir/subprojects/subsubsub-1.0/.meson-subproject-wrap-hash.txt new file mode 100644 index 000000000000..40138659fd02 --- /dev/null +++ b/test cases/common/98 subproject subdir/subprojects/subsubsub-1.0/.meson-subproject-wrap-hash.txt @@ -0,0 +1 @@ +0fd8007dd44a1a5eb5c01af4c138f0993c6cb44da194b36db04484212eff591b diff --git a/test cases/common/98 subproject subdir/subprojects/subsubsub-1.0/meson.build b/test cases/common/98 subproject subdir/subprojects/subsubsub-1.0/meson.build new file mode 100644 index 000000000000..530852c0d3cc --- /dev/null +++ b/test cases/common/98 subproject subdir/subprojects/subsubsub-1.0/meson.build @@ -0,0 +1,3 @@ +project('subsubsub') + +meson.override_dependency('subsubsub', declare_dependency()) diff --git a/test cases/common/98 subproject subdir/subprojects/subsubsub.wrap b/test cases/common/98 subproject subdir/subprojects/subsubsub.wrap new file mode 100644 index 000000000000..5fa019a72cc8 --- /dev/null +++ b/test cases/common/98 subproject subdir/subprojects/subsubsub.wrap @@ -0,0 +1,2 @@ +[wrap-redirect] +filename = sub_implicit/subprojects/subsub/subprojects/subsubsub.wrap diff --git a/unittests/allplatformstests.py b/unittests/allplatformstests.py index 13cf9644ef63..770d02599b10 100644 --- a/unittests/allplatformstests.py +++ b/unittests/allplatformstests.py @@ -1291,7 +1291,8 @@ def test_always_prefer_c_compiler_for_asm(self): for cmd in self.get_compdb(): # Get compiler split = split_args(cmd['command']) - if split[0] in ('ccache', 'sccache'): + # Use basename to handle absolute paths like /usr/bin/ccache + if os.path.basename(split[0]) in ('ccache', 'sccache'): compiler = split[1] else: compiler = split[0] @@ -3104,6 +3105,14 @@ def test_pkg_config_libdir(self): @skipIf(is_osx(), 'Not implemented for Darwin yet') @skipIf(is_windows(), 'POSIX only') def test_python_build_config_extensions(self): + prefix = sysconfig.get_config_var("prefix") + if prefix != "/usr" and prefix != "/usr/local": + raise unittest.SkipTest( + f'Skipping test because python ({sys.executable}) is a non-system installation' + ) + if shutil.which('lld'): + raise unittest.SkipTest(f'Skipping test because python is built with lld') + # I will look into this in detail later testdir = os.path.join(self.unit_test_dir, '126 python extension') diff --git a/unittests/machinefiletests.py b/unittests/machinefiletests.py index 70cf82697917..89f46eb788c7 100644 --- a/unittests/machinefiletests.py +++ b/unittests/machinefiletests.py @@ -207,6 +207,9 @@ def test_config_tool_dep(self): # Do the skip at this level to avoid screwing up the cache if mesonbuild.envconfig.detect_msys2_arch(): raise SkipTest('Skipped due to problems with LLVM on MSYS2') + llvm_config = shutil.which('llvm-config') + if not llvm_config or not llvm_config.startswith(('/usr', '/bin', '/local')): + raise SkipTest('llvm-config is not a system installation') self._simple_test('config_dep', 'llvm-config') def test_python3_module(self):