Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
130 changes: 73 additions & 57 deletions mesonbuild/backend/ninjabackend.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
import pickle
import re
import subprocess
import subprocess as sp
import typing as T

from . import backends
Expand Down Expand Up @@ -508,8 +509,11 @@ def __init__(self, build: T.Optional[build.Build]):
# - https://github.com/mesonbuild/meson/pull/9453
# - https://github.com/mesonbuild/meson/issues/9479#issuecomment-953485040
self.allow_thin_archives = PerMachine[bool](True, True)
self._first_deps_dd_rule_generated = False
self._all_scan_sources = []
self.import_std: T.Optional[ImportStdInfo] = None


def create_phony_target(self, dummy_outfile: str, rulename: str, phony_infilename: str) -> NinjaBuildElement:
'''
We need to use aliases for targets that might be used as directory
Expand Down Expand Up @@ -654,6 +658,7 @@ def generate(self, capture: bool = False, vslite_ctx: T.Optional[T.Dict] = None)

for t in ProgressBar(self.build.get_targets().values(), desc='Generating targets'):
self.generate_target(t)
self.generate_global_dependency_scan_target()
mlog.log_timestamp("Targets generated")
self.add_build_comment(NinjaComment('Test rules'))
self.generate_tests()
Expand Down Expand Up @@ -1097,7 +1102,8 @@ def generate_target(self, target: T.Union[build.BuildTarget, build.CustomTarget,
final_obj_list = self.generate_prelink(target, obj_list)
else:
final_obj_list = obj_list

if self.should_use_dyndeps_for_target(target):
self._all_scan_sources.extend(compiled_sources)
self.generate_dependency_scan_target(target, compiled_sources, source2object, fortran_order_deps)

if isinstance(target, build.SharedLibrary):
Expand Down Expand Up @@ -1128,6 +1134,8 @@ def should_use_dyndeps_for_target(self, target: 'build.BuildTarget') -> bool:
return True
# Currently only the preview version of Visual Studio is supported.
cpp = target.compilers['cpp']
if cpp.get_id() == 'clang':
return True
if cpp.get_id() != 'msvc':
return False
cppversion = self.get_target_option(target, OptionKey('cpp_std',
Expand All @@ -1141,53 +1149,40 @@ def should_use_dyndeps_for_target(self, target: 'build.BuildTarget') -> bool:
return False
return True

def generate_project_wide_cpp_scanner_rules(self) -> None:
rulename = 'depscanaccumulate'
if rulename in self.ruledict:
return
command = self.environment.get_build_command() + \
['--internal', 'depscanaccumulate']
args = ['$in', 'deps.json', '$out']
description = 'Scanning project for modules'
rule = NinjaRule(rulename, command, args, description)
self.add_rule(rule)

rulename = 'cpp_module_precompile'
command = ['$COMPILER', '--precompile', '-x', 'c++-module', '$in', '-o', '$out']
args = ['$ARGS']
description = 'Precompiling C++ module $in'
rule = NinjaRule(rulename, command, args, description)
self.add_rule(rule)

def generate_global_dependency_scan_target(self) -> None:
self._uses_dyndeps = True
self.generate_project_wide_cpp_scanner_rules()
rule_name = 'depscanaccumulate'
elem = NinjaBuildElement(self.all_outputs, "deps.dd", rule_name, "compile_commands.json")
elem.add_dep(self._all_scan_sources)
self.add_build(elem)

def generate_dependency_scan_target(self, target: build.BuildTarget,
compiled_sources: T.List[str],
source2object: T.Dict[str, str],
object_deps: T.List[FileOrString]) -> None:
if not self.should_use_dyndeps_for_target(target):
return
self._uses_dyndeps = True
json_file, depscan_file = self.get_dep_scan_file_for(target)
pickle_base = target.name + '.dat'
pickle_file = os.path.join(self.get_target_private_dir(target), pickle_base).replace('\\', '/')
pickle_abs = os.path.join(self.get_target_private_dir_abs(target), pickle_base).replace('\\', '/')
rule_name = 'depscan'
scan_sources = list(self.select_sources_to_scan(compiled_sources))

scaninfo = TargetDependencyScannerInfo(
self.get_target_private_dir(target), source2object, scan_sources)

write = True
if os.path.exists(pickle_abs):
with open(pickle_abs, 'rb') as p:
old = pickle.load(p)
write = old != scaninfo

if write:
with open(pickle_abs, 'wb') as p:
pickle.dump(scaninfo, p)

elem = NinjaBuildElement(self.all_outputs, json_file, rule_name, pickle_file)
# A full dependency is required on all scanned sources, if any of them
# are updated we need to rescan, as they may have changed the modules
# they use or export.
for s in scan_sources:
elem.deps.add(s[0])
elem.orderdeps.update(object_deps)
elem.add_item('name', target.name)
self.add_build(elem)

infiles: T.Set[str] = set()
for t in target.get_all_linked_targets():
if self.should_use_dyndeps_for_target(t):
infiles.add(self.get_dep_scan_file_for(t)[0])
_, od = self.flatten_object_list(target)
infiles.update({self.get_dep_scan_file_for(t)[0] for t in od if t.uses_fortran()})

elem = NinjaBuildElement(self.all_outputs, depscan_file, 'depaccumulate', [json_file] + sorted(infiles))
elem.add_item('name', target.name)
self.add_build(elem)
self._all_scan_sources.extend(compiled_sources)

def select_sources_to_scan(self, compiled_sources: T.List[str],
) -> T.Iterable[T.Tuple[str, Literal['cpp', 'fortran']]]:
Expand Down Expand Up @@ -2750,21 +2745,7 @@ def generate_scanner_rules(self) -> None:
if rulename in self.ruledict:
# Scanning command is the same for native and cross compilation.
return

command = self.environment.get_build_command() + \
['--internal', 'depscan']
args = ['$picklefile', '$out', '$in']
description = 'Scanning target $name for modules'
rule = NinjaRule(rulename, command, args, description)
self.add_rule(rule)

rulename = 'depaccumulate'
command = self.environment.get_build_command() + \
['--internal', 'depaccumulate']
args = ['$out', '$in']
description = 'Generating dynamic dependency information for target $name'
rule = NinjaRule(rulename, command, args, description)
self.add_rule(rule)
self.generate_project_wide_cpp_scanner_rules()

def generate_compile_rules(self) -> None:
for for_machine in MachineChoice:
Expand All @@ -2779,6 +2760,28 @@ def generate_compile_rules(self) -> None:
for mode in compiler.get_modes():
self.generate_compile_rule_for(langname, mode)

def _get_cpp_module_output_name(self, src, compiler, target) -> T.Tuple[str, T.List[str]]:
extra_args = target.extra_args.get('cpp', [])
include_dirs = []
for inc in target.include_dirs:
for inc_dir in inc.get_incdirs():
include_dirs.append(f'-I{os.path.join(inc.get_curdir(), inc_dir)}')
flags = extra_args + include_dirs
abs_src = src.absolute_path(self.environment.get_source_dir(), self.environment.get_build_dir())
cmd = ["clang-scan-deps", "-format=p1689", "--",
compiler.get_exelist()[0], "-std=c++26", abs_src] + flags
result = sp.run(cmd, capture_output=True)
if result.returncode != 0:
return os.path.splitext(os.path.basename(src.fname))[0] + ".pcm", []
info = json.loads(result.stdout)
required_pcms = []
for rule in info.get("rules", []):
for req in rule.get("requires", []):
required_pcms.append(req["logical-name"] + ".pcm")
for provides in rule.get("provides", []):
return provides["logical-name"] + ".pcm", required_pcms
return 'dummy', []

def generate_generator_list_rules(self, target) -> None:
# CustomTargets have already written their rules and
# CustomTargetIndexes don't actually get generated, so write rules for
Expand Down Expand Up @@ -3297,6 +3300,19 @@ def quote_make_target(targetName: str) -> str:
result += c
return result
element.add_item('CUDA_ESCAPED_TARGET', quote_make_target(rel_obj))
if self.should_use_dyndeps_for_target(target) and compiler.get_language() == 'cpp' and compiler.get_id() == 'clang':
mod_output, required_pcms = self._get_cpp_module_output_name(src, compiler, target)
if mod_output != 'dummy':
pcm_path = mod_output
precompile_elem = NinjaBuildElement(self.all_outputs, pcm_path, 'cpp_module_precompile', rel_src)
precompile_elem.add_item('COMPILER', compiler.get_exelist()[0])
precompile_elem.add_item('ARGS', list(commands) + [f'-fprebuilt-module-path={self.environment.get_build_dir()}'])
for req in required_pcms:
precompile_elem.add_orderdep(req)
self.add_build(precompile_elem)
commands.extend([
f'-fprebuilt-module-path={self.environment.get_build_dir()}'
])
element.add_item('ARGS', commands)

self.add_dependency_scanner_entries_to_element(target, compiler, element, src)
Expand Down Expand Up @@ -3367,7 +3383,7 @@ def add_dependency_scanner_entries_to_element(self, target: build.BuildTarget, c
extension = extension.lower()
if not (extension in compilers.lang_suffixes['fortran'] or extension in compilers.lang_suffixes['cpp']):
return
dep_scan_file = self.get_dep_scan_file_for(target)[1]
dep_scan_file = 'deps.dd'
element.add_item('dyndep', dep_scan_file)
element.add_orderdep(dep_scan_file)

Expand Down
4 changes: 4 additions & 0 deletions mesonbuild/compilers/cpp.py
Original file line number Diff line number Diff line change
Expand Up @@ -179,6 +179,8 @@ def get_options(self) -> 'MutableKeyedOptionDictType':
})
return opts

def get_cpp20_module_bmi_extension(self) -> str:
raise MesonException("Your compiler does not support 'import std' feature or it has not been implemented")

class _StdCPPLibMixin(CompilerMixinBase):

Expand Down Expand Up @@ -332,6 +334,8 @@ def get_cpp_modules_args(self) -> T.List[str]:
# Although -fmodules-ts is removed in LLVM 17, we keep this in for compatibility with old compilers.
return ['-fmodules', '-fmodules-ts']

def get_cpp20_module_bmi_extension(self) -> str:
return '.pcm'

class ArmLtdClangCPPCompiler(ClangCPPCompiler):

Expand Down
35 changes: 32 additions & 3 deletions mesonbuild/scripts/depscan.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,9 @@
import pickle
import re
import typing as T
import subprocess as sp
import shutil
from mesonbuild.compilers import cpp

if T.TYPE_CHECKING:
from typing_extensions import Literal, TypedDict, NotRequired
Expand Down Expand Up @@ -201,8 +204,34 @@ def scan(self) -> int:

return 0

class CppDependenciesScanner:
pass

class ClangDependencyScanner(CppDependenciesScanner):
def __init__(self, compilation_db_file: str, json_output_file: str, dd_output_file: str = 'deps.dd'):
self.compilation_db_file = compilation_db_file
self.json_output_file = json_output_file
self.dd_output_file = dd_output_file
self.clang_scan_deps = os.path.join(os.path.dirname(shutil.which(cpp)), 'clang-scan-deps')
def scan(self) -> int:
try:
result = sp.run(
[self.clang_scan_deps,
"-format=p1689",
"-compilation-database", self.compilation_db_file],
capture_output=True,
check=True
)
with open(self.json_output_file, 'wb') as f:
f.write(result.stdout)
dependencies_info = json.loads(result.stdout)
all_deps_per_objfile = self.generate_dependencies(dependencies_info["rules"])
self.generate_dd_file(all_deps_per_objfile)
return 0
except sp.SubprocessError:
return 1

def run(args: T.List[str]) -> int:
assert len(args) == 2, 'got wrong number of arguments!'
outfile, pickle_file = args
scanner = DependencyScanner(pickle_file, outfile)
comp_db, json_output, dd_output = args
scanner = ClangDependencyScanner(comp_db, json_output, dd_output)
return scanner.scan()
95 changes: 95 additions & 0 deletions mesonbuild/scripts/depscanaccumulate.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,95 @@
from collections import defaultdict
from dataclasses import dataclass
import json
import os
import subprocess as sp
import sys
import typing as T
import shutil

ModuleName: T.TypeAlias = str
ObjectFile: T.TypeAlias = str

@dataclass(frozen=True)
class ModuleProviderInfo:
logical_name: ModuleName
source_path: str
is_interface: bool = False

class CppDependenciesScanner:
pass

def normalize_filename(fname):
return fname.replace(':', '-')

class DynDepRule:
def __init__(self, out: str, imp_outs: T.Optional[T.List[str]], imp_ins: T.List[str]):
self.output = [f'build {out}']
if imp_outs:
imp_out_str = " ".join([normalize_filename(o) for o in imp_outs])
self.output.append(f" | {imp_out_str}")
self.output.append(": dyndep")
if imp_ins:
imp_ins_str = " ".join([normalize_filename(inf) for inf in imp_ins])
self.output.append(" | " + imp_ins_str)
self.output_str = "".join(self.output) + "\n"
def __str__(self):
return self.output_str

class ClangDependencyScanner(CppDependenciesScanner):
def __init__(self, compilation_db_file: str, json_output_file: str, dd_output_file: str = 'deps.dd', cpp: str = 'clang++'):
self.compilation_db_file = compilation_db_file
self.json_output_file = json_output_file
self.dd_output_file = dd_output_file
self.clang_scan_deps = os.path.join(os.path.dirname(shutil.which(cpp) or cpp), 'clang-scan-deps')

def scan(self) -> int:
try:
r = sp.run([self.clang_scan_deps,
"-format=p1689",
"-compilation-database", self.compilation_db_file],
capture_output=True)
if r.returncode != 0:
print(r.stderr)
raise sp.SubprocessError("Failed to run command")
with open(self.json_output_file, 'wb') as f:
f.write(r.stdout)
dependencies_info = json.loads(r.stdout)
all_deps_per_objfile = self.generate_dependencies(dependencies_info["rules"])
self.generate_dd_file(all_deps_per_objfile)
return 0
except sp.SubprocessError:
return 1

def generate_dd_file(self, deps_per_object_file):
with open(self.dd_output_file, "w") as f:
f.write('ninja_dyndep_version = 1\n')
for obj, reqprov in deps_per_object_file.items():
requires, provides = reqprov
dd = DynDepRule(obj, None,
[r + '.pcm' for r in requires])
f.write(str(dd))

def generate_dependencies(self, rules: T.List):
all_entries: T.Mapping[ObjectFile, T.Tuple[T.Set[ModuleName], T.Set[ModuleProviderInfo]]] = defaultdict(lambda: (set(), set()))
for r in rules:
obj_processed = r["primary-output"]
all_entries[obj_processed] = (set(), set())
for req in r.get("requires", []):
all_entries[obj_processed][0].add(req["logical-name"])
for prov in r.get("provides", []):
all_entries[obj_processed][1].add(ModuleProviderInfo(
logical_name=prov["logical-name"],
source_path=prov["source-path"],
is_interface=prov.get('is-interface', False)))
return all_entries

def run(args: T.List[str]) -> int:
assert len(args) >= 3, 'Expected <compilation_db> <json_output> <dd_output> [cpp_compiler] arguments'
comp_db_path, json_output_path, dd_output = args[:3]
cpp = args[3] if len(args) > 3 else 'clang++'
scanner = ClangDependencyScanner(comp_db_path, json_output_path, dd_output, cpp)
return scanner.scan()

if __name__ == '__main__':
run(sys.argv[1:])
Loading