diff --git a/.github/workflows/os_comp.yml b/.github/workflows/os_comp.yml index 4b9b7a4a6eae..3ccb407c2460 100644 --- a/.github/workflows/os_comp.yml +++ b/.github/workflows/os_comp.yml @@ -162,3 +162,25 @@ jobs: update-alternatives --set i686-w64-mingw32-g++ /usr/bin/i686-w64-mingw32-g++-posix ./run_tests.py $RUN_TESTS_ARGS -- $MESON_ARGS + + check-toolchain-and-convert: + name: 'Check toolchain and build system conversion' + runs-on: ubuntu-latest + container: + image: mesonbuild/ubuntu-rolling + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Run check-toolchain unit tests + shell: bash + run: | + source /ci/env_vars.sh + python3 run_unittests.py CheckToolchainTests + + - name: Run convert unit tests + shell: bash + run: | + source /ci/env_vars.sh + python3 run_unittests.py ConvertTests diff --git a/docs/markdown/Commands.md b/docs/markdown/Commands.md index f3324f23e4e3..3be757774d19 100644 --- a/docs/markdown/Commands.md +++ b/docs/markdown/Commands.md @@ -17,6 +17,71 @@ Optional arguments. The most common workflow is to run For the full list of all available options for a specific command use the following syntax: `meson COMMAND --help` +### check-toolchain + +{{ check-toolchain_usage.inc }} + +Check the properties of the specified toolchain and sysroot. +The output is displayed as a TOML file. + +The checked properties are an amalgamation of checks performed +by popular Meson-based projects. Users are encouraged to +upstream properties of interest. + +{{ check-toolchain_arguments.inc }} + +#### Examples: + +Check the properties of the default system toolchain. + +``` +meson check-toolchain +``` + +Check the properties with a cross-file specified toolchain. + +``` +meson check-toolchain --cross-file CROSS_FILE +``` + +Check the properties of Android NDK toolchains. + +``` +meson check-toolchain --android-ndk-path PATH_TO_ANDROID_NDK +``` + +### convert + +{{ convert_usage.inc }} + +Converts the meson project to an another build system. The input +are a series of TOML files, and the output are files in the target +build system. Soong and Bazel are supported to varying degrees. + +Meson maintains reference TOML files for certain hermetic trees +and projects, and users are encouraged to upstream their use +cases. + +{{ convert_arguments.inc }} + +#### Examples: + +Convert Mesa3D to Soong using reference TOML files. + +``` +meson convert android aosp_mesa3d +``` + +Power user maintains own TOML for their hermetic project. + +``` +meson convert --config=/path/to/myproject.toml \ +--project-dir=/path/to/my/target_repo +--toolchain=/path/to/mytoolchain.toml \ +--dependencies=/path/to/mydeps.toml +--output_dir=/path/to/output-dir +``` + ### configure {{ configure_usage.inc }} diff --git a/mesonbuild/backend/nonebackend.py b/mesonbuild/backend/nonebackend.py index ab11e8d4034d..08def3007df8 100644 --- a/mesonbuild/backend/nonebackend.py +++ b/mesonbuild/backend/nonebackend.py @@ -20,8 +20,10 @@ def generate(self, capture: bool = False, vslite_ctx: T.Optional[T.Dict] = None) if vslite_ctx: raise MesonBugException('We do not expect the none backend to be given a valid \'vslite_ctx\'') - if self.build.get_targets(): - raise MesonBugException('None backend cannot generate target rules, but should have failed earlier.') - mlog.log('Generating simple install-only backend') - self.serialize_tests() - self.create_install_data_files() + # The `meson convert` tool generates build targets, but uses the none backend + # The below clause covers the non-convert use cases of the none backend, when + # build targets are not generated. + if not self.build.get_targets(): + mlog.log('Generating simple install-only backend') + self.serialize_tests() + self.create_install_data_files() diff --git a/mesonbuild/check_toolchain/__init__.py b/mesonbuild/check_toolchain/__init__.py new file mode 100644 index 000000000000..a512afc0ea2f --- /dev/null +++ b/mesonbuild/check_toolchain/__init__.py @@ -0,0 +1,2 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2026 The Meson Authors diff --git a/mesonbuild/check_toolchain/android.py b/mesonbuild/check_toolchain/android.py new file mode 100644 index 000000000000..6538b80ef83a --- /dev/null +++ b/mesonbuild/check_toolchain/android.py @@ -0,0 +1,372 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2026 The Meson Authors + +from __future__ import annotations +import os +import re +import sys +import tempfile +import typing as T +import copy +import hashlib +import urllib.request +import subprocess +from dataclasses import dataclass + +from .defs import HostMachine, CompilerInfo, Toolchain, WrapInfo + + +@dataclass +class CrossFileContext: + android_ndk_path: str + target: str + android_api_level: int + rust_target: str + system: str + cpu_family: str + cpu: str + endian: str + c_flags: T.List[str] + cpp_flags: T.List[str] + + +ANDROID_CROSS_FILE_TEMPLATE = """ +[binaries] +ar = '{android_ndk_path}/toolchains/llvm/prebuilt/linux-x86_64/bin/llvm-ar' +c = ['ccache', '{android_ndk_path}/toolchains/llvm/prebuilt/linux-x86_64/bin/{target}{android_api_level}-clang'] +cpp = ['ccache', '{android_ndk_path}/toolchains/llvm/prebuilt/linux-x86_64/bin/{target}{android_api_level}-clang++'] +rust = ['rustc', '--target', '{rust_target}'] +c_ld = 'lld' +cpp_ld = 'lld' +strip = '{android_ndk_path}/toolchains/llvm/prebuilt/linux-x86_64/bin/llvm-strip' +pkg-config = ['/usr/bin/pkgconf'] + +[host_machine] +system = '{system}' +cpu_family = '{cpu_family}' +cpu = '{cpu}' +endian = '{endian}' + +[built-in options] +c_args = [{c_flags_str}] +cpp_args = [{cpp_flags_str}] + +[properties] +needs_exe_wrapper = true +sys_root = '{android_ndk_path}/toolchains/llvm/prebuilt/linux-x86_64/sysroot' +pkg_config_libdir = '{android_ndk_path}/toolchains/llvm/prebuilt/linux-x86_64/sysroot/usr/lib/{target}/pkgconfig/' +""" + +AOSP_LINUX_COMMON_TOOLCHAIN_INFO: T.Dict[str, T.Any] = { + "host_machine": { + "system": "linux", + "endian": "little", + }, + "c": { + "compiler_id": "clang", + "linker_id": "ld.lld", + "version": "21.0.0", + }, + "cpp": { + "compiler_id": "clang", + "linker_id": "ld.lld", + "version": "21.0.0", + }, + "rust": { + "compiler_id": "rustc", + "linker_id": "ld.lld", + "version": "1.90.0", + }, + "c_headers_fails": ["pthread_np.h", "linux/udmabuf.h"], + "c_header_symbols_fails": { + "sys/mkdev.h": ["major", "minor", "makedev"], + "errno.h": ["program_invocation_name"], + }, + "c_functions_fails": [ + "memfd_create", + "qsort_s", + "pthread_setaffinity_np", + "thrd_create", + "getrandom", + "__builtin_add_overflow_p", + "__builtin_sub_overflow_p", + ], + "c_supported_arguments_fails": ["-Wno-nonnull-compare"], + "cpp_supported_arguments_fails": ["-flifetime-dse=1"], +} + + +def generate_cross_file(context: CrossFileContext) -> str: + c_flags_str = ", ".join([f"'{f}'" for f in context.c_flags]) + cpp_flags_str = ", ".join([f"'{f}'" for f in context.cpp_flags]) + + return ANDROID_CROSS_FILE_TEMPLATE.format( + android_ndk_path=context.android_ndk_path, + target=context.target, + android_api_level=context.android_api_level, + rust_target=context.rust_target, + system=context.system, + cpu_family=context.cpu_family, + cpu=context.cpu, + endian=context.endian, + c_flags_str=c_flags_str, + cpp_flags_str=cpp_flags_str, + ) + + +def _get_aosp_linux_toolchain(arch: str, libc: str) -> Toolchain: + info = copy.deepcopy(AOSP_LINUX_COMMON_TOOLCHAIN_INFO) + + if arch == "x86_64": + info["host_machine"]["cpu_family"] = "x86_64" + info["host_machine"]["cpu"] = "x86_64" + elif arch == "aarch64": + info["host_machine"]["cpu_family"] = "aarch64" + info["host_machine"]["cpu"] = "aarch64" + else: + raise ValueError(f"Unsupported architecture: {arch}") + + if libc == "glibc": + info["c_functions_fails"].append("reallocarray") + elif libc == "musl": + info["c_headers_fails"].append("xlocale.h") + else: + raise ValueError(f"Unsupported libc: {libc}") + + return Toolchain( + name=f"linux_{libc}_{arch}", + host_machine=HostMachine(**info["host_machine"]), + c=CompilerInfo(**info["c"]), + cpp=CompilerInfo(**info["cpp"]), + rust=CompilerInfo(**info["rust"]), + c_headers_fails=info["c_headers_fails"], + c_header_symbols_fails=info["c_header_symbols_fails"], + c_functions_fails=info["c_functions_fails"], + c_supported_arguments_fails=info["c_supported_arguments_fails"], + cpp_supported_arguments_fails=info["cpp_supported_arguments_fails"], + ) + + +def get_aosp_linux_toolchains() -> T.List[Toolchain]: + toolchains: T.List[Toolchain] = [] + configs = [ + ("x86_64", "glibc"), + ("x86_64", "musl"), + ("aarch64", "musl"), + ] + for arch, libc in configs: + toolchains.append(_get_aosp_linux_toolchain(arch, libc)) + return toolchains + + +@dataclass +class AndroidConfig: + name: str + target: str + rust_target: str + system: str + cpu_family: str + cpu: str + endian: str + c_flags: T.List[str] + cpp_flags: T.List[str] + + +def generate_android_toolchains( + run_compiler_checks_callback: T.Callable[ + [str, str, T.List[str], T.List[str]], Toolchain], + ndk_version: str, + ndk_platform: str = "linux") -> T.List[Toolchain]: # fmt: skip + """ + Generates and checks a set of standard Android toolchain configurations. + + This function iterates through predefined Android target configurations (e.g., + aarch64, x86_64, x86). + + The specified NDK version is downloaded for Bionic targets. For build machine + toolchains, the correct values for linux_musl and linux_glibc_* from AOSP are + hard-coded (those toolchains are not easily downloadable). + """ + ndk_url = f"https://dl.google.com/android/repository/android-ndk-{ndk_version}-{ndk_platform}.zip" + + temp_dir = tempfile.mkdtemp() + ndk_zip = os.path.join(temp_dir, f"android-ndk-{ndk_version}.zip") + print(f"Downloading Android NDK {ndk_version}: {ndk_url}...") + urllib.request.urlretrieve(ndk_url, ndk_zip) + + sha256 = hashlib.sha256() + with open(ndk_zip, "rb") as f: + for chunk in iter(lambda: f.read(4096), b""): + sha256.update(chunk) + ndk_hash = sha256.hexdigest() + + print(f"Extracting Android NDK {ndk_version}...") + subprocess.run(["unzip", "-q", "-o", ndk_zip, "-d", temp_dir], check=True) + + # NDK usually extracts to a directory named android-ndk- + # But sometimes it's different, let's find it. + extracted_dirs = [ + d for d in os.listdir(temp_dir) if os.path.isdir(os.path.join(temp_dir, d)) + ] + if not extracted_dirs: + sys.exit(f"Failed to find extracted NDK directory in {temp_dir}") + + # Prefer directory starting with android-ndk + ndk_dir_name = extracted_dirs[0] + for d in extracted_dirs: + if d.startswith("android-ndk"): + ndk_dir_name = d + break + + ndk_path = os.path.join(temp_dir, ndk_dir_name) + + wrap_binaries = { + "cc": "toolchains/llvm/prebuilt/linux-x86_64/bin/clang", + "cpp": "toolchains/llvm/prebuilt/linux-x86_64/bin/clang++", + "ar": "toolchains/llvm/prebuilt/linux-x86_64/bin/llvm-ar", + "strip": "toolchains/llvm/prebuilt/linux-x86_64/bin/llvm-strip", + "toolchain_id": "clang-android", + } + + toolchains: T.List[Toolchain] = [] + configurations = [ + AndroidConfig( + name="android_arm64", + target="aarch64-linux-android", + rust_target="aarch64-linux-android", + system="android", + cpu_family="aarch64", + cpu="aarch64", + endian="little", + c_flags=[ + "-fno-exceptions", + "-fno-unwind-tables", + "-fno-asynchronous-unwind-tables", + ], + cpp_flags=[ + "-fno-exceptions", + "-fno-unwind-tables", + "-fno-asynchronous-unwind-tables", + "--start-no-unused-arguments", + "-static-libstdc++", + "--end-no-unused-arguments", + ], + ), + AndroidConfig( + name="android_x86", + target="i686-linux-android", + rust_target="i686-linux-android", + system="android", + cpu_family="x86", + cpu="i686", + endian="little", + c_flags=[ + "-m32", + "-march=slm", + "-fno-exceptions", + "-fno-unwind-tables", + "-fno-asynchronous-unwind-tables", + ], + cpp_flags=[ + "-m32", + "-march=slm", + "-fno-exceptions", + "-fno-unwind-tables", + "-fno-asynchronous-unwind-tables", + "--start-no-unused-arguments", + "-static-libstdc++", + "--end-no-unused-arguments", + ], + ), + AndroidConfig( + name="android_x86_64", + target="x86_64-linux-android", + rust_target="x86_64-linux-android", + system="android", + cpu_family="x86_64", + cpu="x86_64", + endian="little", + c_flags=[ + "-fno-exceptions", + "-fno-unwind-tables", + "-fno-asynchronous-unwind-tables", + ], + cpp_flags=[ + "-fno-exceptions", + "-fno-unwind-tables", + "-fno-asynchronous-unwind-tables", + "--start-no-unused-arguments", + "-static-libstdc++", + "--end-no-unused-arguments", + ], + ), + ] + + bin_path = os.path.join( + ndk_path, "toolchains", "llvm", "prebuilt", "linux-x86_64", "bin" + ) + api_levels = [] + for filename in os.listdir(bin_path): + match = re.search(r"(\d+)-clang", filename) + if match: + api_levels.append(int(match.group(1))) + + if not api_levels: + sys.exit( + f"Could not determine the highest API level from the NDK path: {bin_path}" + ) + + highest_api_level = max(api_levels) + print(f"Detected highest API level: {highest_api_level}") + + for config in configurations: + config.c_flags.extend( + [f"-D__ANDROID_MIN_SDK_VERSION__={highest_api_level}", "-D__USE_GNU"] + ) + config.cpp_flags.extend( + [f"-D__ANDROID_MIN_SDK_VERSION__={highest_api_level}", "-D__USE_GNU"] + ) + + context = CrossFileContext( + android_ndk_path=ndk_path, + target=config.target, + android_api_level=highest_api_level, + rust_target=config.rust_target, + system=config.system, + cpu_family=config.cpu_family, + cpu=config.cpu, + endian=config.endian, + c_flags=config.c_flags, + cpp_flags=config.cpp_flags, + ) + cross_content = generate_cross_file(context) + + temp_cross_file = None + try: + with tempfile.NamedTemporaryFile( + mode="w", delete=False, suffix=".txt" + ) as tf: + tf.write(cross_content) + temp_cross_file = tf.name + + t = run_compiler_checks_callback( + temp_cross_file, + config.name, + config.c_flags, + config.cpp_flags, + ) + + t.compilers_wrap = WrapInfo( + source_url=ndk_url, + source_filename=os.path.basename(ndk_url), + source_hash=ndk_hash, + binaries=wrap_binaries, + ) + + toolchains.append(t) + finally: + if temp_cross_file: + os.unlink(temp_cross_file) + + toolchains += get_aosp_linux_toolchains() + return toolchains diff --git a/mesonbuild/check_toolchain/checker.py b/mesonbuild/check_toolchain/checker.py new file mode 100644 index 000000000000..6ce44e5963b8 --- /dev/null +++ b/mesonbuild/check_toolchain/checker.py @@ -0,0 +1,379 @@ +#!/usr/bin/env python3 +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2026 The Meson Authors + +from __future__ import annotations +import argparse +import os +import platform +import tempfile +import typing as T + +from .. import environment, mesonlib, compilers +from ..envconfig import detect_cpu_family +from .defs import CompilerInfo, HostMachine, Toolchain + + +def run_compiler_checks( + cross_file: str, + name: str, + c_flags: T.List[str], + cpp_flags: T.List[str]) -> Toolchain: # fmt: skip + """ + This function sets up a temporary Meson environment configured with a given + cross file to initialize C and C++ compilers. + + It then performs an extensive series of checks to probe the capabilities and + limitations of the toolchain. + + The checks are mostly stolen from Mesa3D, but it could be an union of checks + performed by important Meson enjoying projects. + + Any failures or unsupported features are recorded in a `Toolchain` object, + which is returned to be serialized into a TOML configuration file. + """ + with tempfile.TemporaryDirectory() as temp_dir: + options = argparse.Namespace( + cross_file=[cross_file] if cross_file else [], + native_file=[], + cmd_line_options={}, + ) + env = environment.Environment(os.getcwd(), temp_dir, options) + cc = compilers.detect_c_compiler(env, mesonlib.MachineChoice.HOST) + cpp = compilers.detect_cpp_compiler(env, mesonlib.MachineChoice.HOST) + + if cross_file is None: + # Explicitly update host machine info as it might not be fully populated for native builds + env.machines.host.cpu_family = detect_cpu_family({"c": cc, "cpp": cpp}) + env.machines.host.cpu = platform.machine().lower() + + results = Toolchain( + name=name, + host_machine=HostMachine( + cpu_family=env.machines.host.cpu_family, + cpu=env.machines.host.cpu, + system=env.machines.host.system, + endian=env.machines.host.endian, + ), + c=CompilerInfo( + compiler_id=cc.get_id(), + linker_id=cc.get_linker_id(), + version=cc.version, + ), + cpp=CompilerInfo( + compiler_id=cpp.get_id(), + linker_id=cpp.get_linker_id(), + version=cpp.version, + ), + rust=CompilerInfo( + compiler_id="rustc", + linker_id="ld.lld", + version="1.90.0", + ), + ) + + if cc.get_id() == "gcc" and mesonlib.version_compare(cc.version, "< 4.4.6"): + raise mesonlib.MesonException( + "When using GCC, version 4.4.6 or later is required." + ) + + if not cc.has_multi_link_arguments(["-Wl,--gdb-index"])[0]: + results.c_supported_link_arguments_fails.append("-Wl,--gdb-index") + + builtins_to_detect = { + "bswap32": "int main() { return __builtin_bswap32(0); }", + "bswap64": "int main() { return __builtin_bswap64(0); }", + "clz": "#include \nint main() { int x = 0; return __builtin_clz(x); }", + "clzll": "#include \nint main() { long long x = 0; return __builtin_clzll(x); }", + "ctz": "#include \nint main() { int x = 0; return __builtin_ctz(x); }", + "expect": "int main() { return __builtin_expect(0, 0); }", + "ffs": "#include \nint main() { return ffs(0); }", + "ffsll": "#include \nint main() { return ffsll(0); }", + "popcount": "int main() { return __builtin_popcount(0); }", + "popcountll": "int main() { return __builtin_popcountll(0); }", + "unreachable": "int main() { __builtin_unreachable(); }", + "types_compatible_p": "int main() { return __builtin_types_compatible_p(int, int); }", + } + for f, code in builtins_to_detect.items(): + if not cc.compiles(code, extra_args=c_flags)[0]: + results.c_functions_fails.append(f) + + _attributes = [ + "const", + "flatten", + "malloc", + "pure", + "unused", + "warn_unused_result", + "weak", + "format", + "packed", + "returns_nonnull", + "alias", + "noreturn", + ] + for attr in _attributes: + if not cc.has_func_attribute(attr)[0]: + results.c_function_attributes_fails.append(attr) + + if not cc.has_func_attribute("visibility:hidden")[0]: + results.c_function_attributes_fails.append("visibility:hidden") + + if not cc.compiles("__uint128_t foo(void) { return 0; }", extra_args=c_flags)[ + 0 + ]: + results.c_compiles_fails.append("__uint128_t") + + if not cc.links( + "static char unused() { return 5; } int main() { return 0; }", + extra_args=["-Wl,--gc-sections"], + )[0]: + results.c_links_fails.append("gc-sections") + + if not cc.compiles( + """#include + int main() { + struct { + uint64_t *v; + } x; + return (int)__atomic_load_n(x.v, __ATOMIC_ACQUIRE) & + (int)__atomic_add_fetch(x.v, (uint64_t)1, __ATOMIC_ACQ_REL); + }""", + extra_args=c_flags, + )[0]: + results.c_compiles_fails.append("GCC atomic builtins") + if not cc.links( + """#include + uint64_t v; + int main() { + return __sync_add_and_fetch(&v, (uint64_t)1); + }}""", + extra_args=c_flags, + )[0]: + results.c_links_fails.append("GCC 64bit atomics") + + if not cc.has_header_symbol("sys/sysmacros.h", "major", "", extra_args=c_flags)[ + 0 + ]: + results.c_header_symbols_fails.setdefault("sys/sysmacros.h", []).append( + "major" + ) + + if not cc.has_header_symbol("sys/sysmacros.h", "minor", "", extra_args=c_flags)[ + 0 + ]: + results.c_header_symbols_fails.setdefault("sys/sysmacros.h", []).append( + "minor" + ) + + if not cc.has_header_symbol( + "sys/sysmacros.h", "makedev", "", extra_args=c_flags + )[0]: + results.c_header_symbols_fails.setdefault("sys/sysmacros.h", []).append( + "makedev" + ) + + if not cc.has_header_symbol("sys/mkdev.h", "major", "", extra_args=c_flags)[0]: + results.c_header_symbols_fails.setdefault("sys/mkdev.h", []).append("major") + + if not cc.has_header_symbol("sys/mkdev.h", "minor", "", extra_args=c_flags)[0]: + results.c_header_symbols_fails.setdefault("sys/mkdev.h", []).append("minor") + + if not cc.has_header_symbol("sys/mkdev.h", "makedev", "", extra_args=c_flags)[ + 0 + ]: + results.c_header_symbols_fails.setdefault("sys/mkdev.h", []).append( + "makedev" + ) + + if not cc.check_header("sched.h", "", extra_args=c_flags)[0]: + results.c_headers_fails.append("sched.h") + elif not cc.has_function("sched_getaffinity", "", extra_args=c_flags)[0]: + results.c_functions_fails.append("sched_getaffinity") + + for h in [ + "xlocale.h", + "linux/futex.h", + "endian.h", + "dlfcn.h", + "sys/shm.h", + "cet.h", + "poll.h", + "sys/inotify.h", + "linux/udmabuf.h", + "threads.h", + "pthread_np.h", + ]: + if not cc.check_header(h, "", extra_args=c_flags)[0]: + results.c_headers_fails.append(h) + + if not cc.has_header_symbol( + "time.h", "struct timespec", "", extra_args=c_flags + )[0]: + results.c_header_symbols_fails["time.h"] = ['"struct timespec"'] + + if not cc.has_header_symbol( + "errno.h", "program_invocation_name", "", extra_args=c_flags + )[0]: + results.c_header_symbols_fails["errno.h"] = ["program_invocation_name"] + + if not cc.has_function("posix_memalign", "", extra_args=c_flags)[0]: + results.c_functions_fails.append("posix_memalign") + + if not cc.has_members( + "struct dirent", + ["d_type"], # noqa + prefix="""#include + #include """, + extra_args=c_flags, + )[0]: + results.c_members_fails["struct dirent"] = ["d_type"] + + if not cc.links( + "int main() { return 0; }", extra_args=["-Wl,-Bsymbolic"] + c_flags + )[0]: + results.c_links_fails.append("Bsymbolic") + + if not cc.links( + """#define _GNU_SOURCE +#include +#include +#ifdef HAVE_XLOCALE_H +#include +#endif +int main() { + locale_t loc = newlocale(LC_CTYPE_MASK, "C", NULL); + const char *s = "1.0"; + char *end; + double d = strtod_l(s, &end, loc); + float f = strtof_l(s, &end, loc); + freelocale(loc); + return 0; +}""", + extra_args=c_flags, + )[0]: + results.c_links_fails.append("xlocale") + + gnu_qsort_r = """ + #define _GNU_SOURCE + #include + + static int dcomp(const void *l, const void *r, void *t) { return 0; } + + int main(int ac, char **av) { + int arr[] = { 1 }; + void *t = NULL; + qsort_r((void*)&arr[0], 1, 1, dcomp, t); + return (0); + }""" + if not cpp.links(gnu_qsort_r, extra_args=cpp_flags)[0]: + results.cpp_links_fails.add("qsort_r") + + bsd_qsort_r = """ + #include + + static int dcomp(void *t, const void *l, const void *r) { return 0; } + + int main(int ac, char **av) { + int arr[] = { 1 }; + void *t = NULL; + qsort_r((void*)&arr[0], 1, 1, t, dcomp); + return (0); + }""" + if not cpp.links(bsd_qsort_r, extra_args=cpp_flags)[0]: + results.cpp_links_fails.add("qsort_r") + + functions_to_detect = { + "strtof": "", + "mkostemp": "", + "memfd_create": "#include ", + "flock": "", + "strtok_r": "", + "getrandom": "", + "qsort_s": "", + "posix_fallocate": "", + "secure_getenv": "", + "sysconf": "#include ", + "thrd_create": "#include ", + "pthread_setaffinity_np": "#include ", + "reallocarray": "", + "fmemopen": "", + "dladdr": "", + "dl_iterate_phdr": "", + "clock_gettime": "", + "__builtin_add_overflow": "", + "__builtin_add_overflow_p": "", + "__builtin_sub_overflow_p": "", + } + for f, prefix in functions_to_detect.items(): + if f == "thrd_create" and "threads.h" in results.c_headers_fails: + results.c_functions_fails.append(f) + continue + if not cc.has_function(f, prefix, extra_args=c_flags)[0]: + results.c_functions_fails.append(f) + + if not cc.has_multi_link_arguments(["-Wl,--build-id=sha1"])[0]: + results.c_supported_link_arguments_fails.append("-Wl,--build-id=sha1") + + if cc.get_argument_syntax() != "msvc": + _trial_c = [ + "-Werror=implicit-function-declaration", + "-Werror=missing-prototypes", + "-Werror=return-type", + "-Werror=empty-body", + "-Werror=gnu-empty-initializer", + "-Werror=incompatible-pointer-types", + "-Werror=int-conversion", + "-Werror=pointer-arith", + "-Werror=vla", + "-Wimplicit-fallthrough", + "-Wmisleading-indentation", + "-Wno-missing-field-initializers", + "-Wno-format-truncation", + "-fno-math-errno", + "-fno-trapping-math", + "-Qunused-arguments", + "-fno-common", + "-Wno-initializer-overrides", + "-Wno-override-init", + "-Wno-unknown-pragmas", + "-Wno-microsoft-enum-value", + "-Wno-unused-function", + "-Wno-nonnull-compare", + "-Werror=format", + "-Wformat-security", + "-Werror=thread-safety", + "-ffunction-sections", + "-fdata-sections", + ] + _trial_cpp = [ + "-Werror=return-type", + "-Werror=empty-body", + "-Wmisleading-indentation", + "-Wno-non-virtual-dtor", + "-Wno-missing-field-initializers", + "-flifetime-dse=1", + "-Wno-format-truncation", + "-fno-math-errno", + "-fno-trapping-math", + "-Qunused-arguments", + "-Wno-unknown-pragmas", + "-Wno-microsoft-enum-value", + "-Werror=format", + "-Wformat-security", + "-ffunction-sections", + "-fdata-sections", + "-Werror=pointer-arith", + "-Werror=vla", + "-Werror=gnu-empty-initializer", + ] + for arg in _trial_c: + args = [arg] if arg.startswith("-Werror=") else ["-Werror", arg] + if not cc.has_multi_arguments(args)[0]: + results.c_supported_arguments_fails.append(arg) + for arg in _trial_cpp: + args = [arg] if arg.startswith("-Werror=") else ["-Werror", arg] + if not cpp.has_multi_arguments(args)[0]: + results.cpp_supported_arguments_fails.append(arg) + + return results diff --git a/mesonbuild/check_toolchain/defs.py b/mesonbuild/check_toolchain/defs.py new file mode 100644 index 000000000000..886d3a7b8a6d --- /dev/null +++ b/mesonbuild/check_toolchain/defs.py @@ -0,0 +1,53 @@ +#!/usr/bin/env python3 +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2026 The Meson Authors + +from __future__ import annotations +import typing as T +from dataclasses import dataclass, field + + +@dataclass +class HostMachine: + cpu_family: str + cpu: str + system: str + endian: str + + +@dataclass +class CompilerInfo: + compiler_id: str + linker_id: str + version: str + + +@dataclass +class WrapInfo: + source_url: str + source_filename: str + source_hash: str + binaries: T.Dict[str, str] + + +@dataclass +class Toolchain: + name: str + host_machine: HostMachine + c: CompilerInfo + cpp: CompilerInfo + rust: T.Optional[CompilerInfo] = None + wrap: T.Optional[WrapInfo] = None + compilers_wrap: T.Optional[WrapInfo] = None + sysroot_wrap: T.Optional[WrapInfo] = None + c_compiles_fails: T.List[str] = field(default_factory=list) + c_links_fails: T.List[str] = field(default_factory=list) + c_headers_fails: T.List[str] = field(default_factory=list) + c_header_symbols_fails: T.Dict[str, T.List[str]] = field(default_factory=dict) + c_functions_fails: T.List[str] = field(default_factory=list) + c_function_attributes_fails: T.List[str] = field(default_factory=list) + c_members_fails: T.Dict[str, T.List[str]] = field(default_factory=dict) + c_supported_arguments_fails: T.List[str] = field(default_factory=list) + c_supported_link_arguments_fails: T.List[str] = field(default_factory=list) + cpp_links_fails: T.Set[str] = field(default_factory=set) + cpp_supported_arguments_fails: T.List[str] = field(default_factory=list) diff --git a/mesonbuild/check_toolchain/emitter.py b/mesonbuild/check_toolchain/emitter.py new file mode 100644 index 000000000000..558320faa4b6 --- /dev/null +++ b/mesonbuild/check_toolchain/emitter.py @@ -0,0 +1,144 @@ +#!/usr/bin/env python3 +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2026 The Meson Authors + +from __future__ import annotations +import typing as T +import argparse +import sys + +from .defs import Toolchain + + +class ToolchainEmitter: + def __init__(self, toolchains: T.List[Toolchain], args: argparse.Namespace): + self.toolchains = toolchains + self.args = args + + def emit(self, output_filename: str) -> None: + """ + Serializes the collected toolchain data into a TOML file. + """ + output = [] + cmd_args = sys.argv[1:] + output.append("# Copyright 2026 The Meson Development Team") + output.append("# SPDX-License-Identifier-Apache-2.0") + output.append(f"\n# Generated via meson {' '.join(cmd_args)}\n") + + if not self.toolchains: + return + + # Take global wrap info from the first toolchain + tc = self.toolchains[0] + + if tc.compilers_wrap: + output.append("[compiler_binaries.wrap]") + output.append(f'source_url = "{tc.compilers_wrap.source_url}"') + output.append(f'source_filename = "{tc.compilers_wrap.source_filename}"') + output.append(f'source_hash = "{tc.compilers_wrap.source_hash}"') + + output.append("\n[compiler_binaries.binary_paths]") + for name, path in tc.compilers_wrap.binaries.items(): + output.append(f'{name} = "{path}"') + output.append("") + + if tc.sysroot_wrap: + output.append("[sysroot.wrap]") + output.append(f'source_url = "{tc.sysroot_wrap.source_url}"') + output.append(f'source_filename = "{tc.sysroot_wrap.source_filename}"') + output.append(f'source_hash = "{tc.sysroot_wrap.source_hash}"') + output.append("") + + for i, toolchain in enumerate(self.toolchains): + output.append("[[toolchain]]") + output.append(f'name = "{toolchain.name}"') + + output.append("\n[toolchain.host_machine]") + output.append(f'cpu_family = "{toolchain.host_machine.cpu_family}"') + output.append(f'cpu = "{toolchain.host_machine.cpu}"') + output.append(f'system = "{toolchain.host_machine.system}"') + output.append(f'endian = "{toolchain.host_machine.endian}"') + + output.append("\n[toolchain.c]") + output.append(f'compiler_id = "{toolchain.c.compiler_id}"') + output.append(f'linker_id = "{toolchain.c.linker_id}"') + output.append(f'version = "{toolchain.c.version}"') + + output.append("\n[toolchain.cpp]") + output.append(f'compiler_id = "{toolchain.cpp.compiler_id}"') + output.append(f'linker_id = "{toolchain.cpp.linker_id}"') + output.append(f'version = "{toolchain.cpp.version}"') + + if toolchain.rust: + output.append("\n[toolchain.rust]") + output.append(f'compiler_id = "{toolchain.rust.compiler_id}"') + output.append(f'linker_id = "{toolchain.rust.linker_id}"') + output.append(f'version = "{toolchain.rust.version}"') + + if toolchain.c_compiles_fails: + output.append("\n[toolchain.c.compiles.fails]") + for item in toolchain.c_compiles_fails: + output.append(f'"{item}" = true') + + if toolchain.c_links_fails: + output.append("\n[toolchain.c.links.fails]") + for item in toolchain.c_links_fails: + output.append(f'"{item}" = true') + + if toolchain.c_headers_fails: + output.append("\n[toolchain.c.check_header.fails]") + for item in toolchain.c_headers_fails: + output.append(f'"{item}" = true') + + if toolchain.c_header_symbols_fails: + output.append("\n[toolchain.c.has_header_symbol.fails]") + for header, symbols in toolchain.c_header_symbols_fails.items(): + symbol_str = ", ".join([f"{s} = true" for s in symbols]) + output.append(f'"{header}" = {{ {symbol_str} }}') + + if toolchain.c_functions_fails: + output.append("\n[toolchain.c.has_function.fails]") + for func in toolchain.c_functions_fails: + output.append(f"{func} = true") + + if toolchain.c_function_attributes_fails: + output.append("\n[toolchain.c.has_function_attribute.fails]") + for attr in toolchain.c_function_attributes_fails: + output.append(f'"{attr}" = true') + if toolchain.c_members_fails: + output.append("\n[toolchain.c.has_member.fails]") + for struct, members in toolchain.c_members_fails.items(): + member_str = ", ".join([f"{m} = true" for m in members]) + output.append(f'"{struct}" = {{ {member_str} }}') + + if toolchain.c_supported_arguments_fails: + output.append("\n[toolchain.c.supported_arguments.fails]") + output.append("args = [") + for arg in toolchain.c_supported_arguments_fails: + output.append(f' "{arg}",') + output.append("]") + + if toolchain.c_supported_link_arguments_fails: + output.append("\n[toolchain.c.supported_link_arguments.fails]") + output.append("args = [") + for arg in toolchain.c_supported_link_arguments_fails: + output.append(f' "{arg}",') + output.append("]") + if toolchain.cpp_links_fails: + output.append("\n[toolchain.cpp.links.fails]") + for item in toolchain.cpp_links_fails: + output.append(f'"{item}" = true') + if toolchain.cpp_supported_arguments_fails: + output.append("\n[toolchain.cpp.supported_arguments.fails]") + output.append("args = [") + for arg in toolchain.cpp_supported_arguments_fails: + output.append(f' "{arg}",') + output.append("]") + + if i < len(self.toolchains) - 1: + output.append("") + + output_content = "\n".join(output) + + with open(output_filename, "w", encoding="utf-8") as f: + f.write(output_content) diff --git a/mesonbuild/check_toolchain/fuchsia.py b/mesonbuild/check_toolchain/fuchsia.py new file mode 100644 index 000000000000..d8155d590bc8 --- /dev/null +++ b/mesonbuild/check_toolchain/fuchsia.py @@ -0,0 +1,193 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2026 The Meson Authors + +from __future__ import annotations +import os +import hashlib +import tempfile +import urllib.request +import subprocess +import typing as T +from dataclasses import dataclass + +from .defs import Toolchain, WrapInfo + + +@dataclass +class FuchsiaConfig: + name: str + target: str + cpu_family: str + cpu: str + endian: str + sysroot_arch: str + + +FUCHSIA_CROSS_FILE_TEMPLATE = """ +[binaries] +c = ['{cc_path}', '--target={target}', '--sysroot={sysroot}'] +cpp = ['{cpp_path}', '--target={target}', '--sysroot={sysroot}'] +ar = '{ar_path}' +strip = '{strip_path}' +c_ld = 'lld' +cpp_ld = 'lld' + +[host_machine] +system = 'fuchsia' +cpu_family = '{cpu_family}' +cpu = '{cpu}' +endian = '{endian}' +""" + + +def generate_fuchsia_toolchains( + clang_instance_id: str, + sdk_instance_id: str, + run_compiler_checks_callback: T.Callable[ + [str, str, T.List[str], T.List[str]], Toolchain]) -> T.List[Toolchain]: # fmt: skip + """ + Downloads a Fuchsia Clang toolchain and Core SDK to perform compiler checks. + + This function fetches both the toolchain and the SDK from Chrome Infrastructure Package + Deployment (CIPD). + + It identifies the sysroot paths within the SDK for each target architecture + and uses them to generate accurate compiler checks. + """ + clang_url = f"https://chrome-infra-packages.appspot.com/dl/fuchsia/third_party/clang/linux-amd64/+/{clang_instance_id}" + sdk_url = f"https://chrome-infra-packages.appspot.com/dl/fuchsia/sdk/core/linux-amd64/+/{sdk_instance_id}" + + with tempfile.TemporaryDirectory() as temp_dir: + # Download and extract Clang + clang_zip = os.path.join(temp_dir, "fuchsia-clang.zip") + print(f"Downloading Clang: {clang_url}...") + urllib.request.urlretrieve(clang_url, clang_zip) + + sha256 = hashlib.sha256() + with open(clang_zip, "rb") as f: + for chunk in iter(lambda: f.read(4096), b""): + sha256.update(chunk) + clang_hash = sha256.hexdigest() + + clang_extract_path = os.path.join(temp_dir, "clang_extract") + os.makedirs(clang_extract_path, exist_ok=True) + subprocess.run( + ["unzip", "-q", "-o", clang_zip, "-d", clang_extract_path], check=True + ) + + # Download and extract SDK + sdk_zip = os.path.join(temp_dir, "fuchsia-sdk.zip") + print(f"Downloading SDK: {sdk_url}...") + urllib.request.urlretrieve(sdk_url, sdk_zip) + + sha256 = hashlib.sha256() + with open(sdk_zip, "rb") as f: + for chunk in iter(lambda: f.read(4096), b""): + sha256.update(chunk) + sdk_hash = sha256.hexdigest() + + sdk_extract_path = os.path.join(temp_dir, "sdk_extract") + os.makedirs(sdk_extract_path, exist_ok=True) + subprocess.run( + ["unzip", "-q", "-o", sdk_zip, "-d", sdk_extract_path], check=True + ) + + wrap_binaries = { + "cc": "bin/clang", + "cpp": "bin/clang++", + "ld": "bin/ld.lld", + "ar": "bin/llvm-ar", + "nm": "bin/llvm-nm", + "objcopy": "bin/llvm-objcopy", + "objdump": "bin/llvm-objdump", + "gcov": "bin/llvm-cov", + "strip": "bin/llvm-strip", + "as": "bin/llvm-as", + "toolchain_id": "clang-fuchsia", + } + + configurations = [ + FuchsiaConfig( + name="fuchsia_x86_64", + target="x86_64-unknown-fuchsia", + cpu_family="x86_64", + cpu="x86_64", + endian="little", + sysroot_arch="x64", + ), + FuchsiaConfig( + name="fuchsia_aarch64", + target="aarch64-unknown-fuchsia", + cpu_family="aarch64", + cpu="aarch64", + endian="little", + sysroot_arch="arm64", + ), + FuchsiaConfig( + name="fuchsia_riscv", + target="riscv64-unknown-fuchsia", + cpu_family="riscv64", + cpu="riscv64", + endian="little", + sysroot_arch="riscv64", + ), + ] + + toolchains = [] + for config in configurations: + cc_path = os.path.join(clang_extract_path, "bin", "clang") + cpp_path = os.path.join(clang_extract_path, "bin", "clang++") + ar_path = os.path.join(clang_extract_path, "bin", "llvm-ar") + strip_path = os.path.join(clang_extract_path, "bin", "llvm-strip") + sysroot_path = os.path.join( + sdk_extract_path, "arch", config.sysroot_arch, "sysroot" + ) + + if not os.path.exists(sysroot_path): + print(f"Warning: sysroot not found for {config.name} at {sysroot_path}") + + cross_content = FUCHSIA_CROSS_FILE_TEMPLATE.format( + cc_path=cc_path, + cpp_path=cpp_path, + ar_path=ar_path, + strip_path=strip_path, + target=config.target, + sysroot=sysroot_path, + cpu_family=config.cpu_family, + cpu=config.cpu, + endian=config.endian, + ) + + temp_cross_file = None + try: + with tempfile.NamedTemporaryFile( + mode="w", delete=False, suffix=".txt" + ) as tf: + tf.write(cross_content) + temp_cross_file = tf.name + + t = run_compiler_checks_callback(temp_cross_file, config.name, [], []) + + # The Fuchsia SDK does have memfd_create, but not MFD_CLOEXEC | MFD_ALLOW_SEALING. + # Those are so common we might as well report not having memfd. + if "memfd_create" not in t.c_functions_fails: + t.c_functions_fails.append("memfd_create") + + t.compilers_wrap = WrapInfo( + source_url=clang_url, + source_filename="fuchsia-clang-linux-amd64.zip", + source_hash=clang_hash, + binaries=wrap_binaries, + ) + t.sysroot_wrap = WrapInfo( + source_url=sdk_url, + source_filename="fuchsia-sdk-core-linux-amd64.zip", + source_hash=sdk_hash, + binaries={}, + ) + toolchains.append(t) + finally: + if temp_cross_file: + os.unlink(temp_cross_file) + + return toolchains diff --git a/mesonbuild/convert/__init__.py b/mesonbuild/convert/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/mesonbuild/convert/abstract/__init__.py b/mesonbuild/convert/abstract/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/mesonbuild/convert/abstract/abstract_dependencies.py b/mesonbuild/convert/abstract/abstract_dependencies.py new file mode 100644 index 000000000000..f89e732b0731 --- /dev/null +++ b/mesonbuild/convert/abstract/abstract_dependencies.py @@ -0,0 +1,33 @@ +#!/usr/bin/env python3 +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2026 The Meson Development Team + +from __future__ import annotations +import typing as T + + +class AbstractDependencies: + """Wrapper around the dependencies TOML data""" + + def __init__(self, dependencies_data: T.Any): + self._data = dependencies_data + + @property + def shared_libraries(self) -> T.Any: + return self._data.get("shared_libraries", {}) + + @property + def static_libraries(self) -> T.Any: + return self._data.get("static_libraries", {}) + + @property + def header_libraries(self) -> T.Any: + return self._data.get("header_libraries", {}) + + @property + def programs(self) -> T.Any: + return self._data.get("programs", {}) + + @property + def python_libraries(self) -> T.Dict[str, str]: + return T.cast(T.Dict[str, str], self._data.get("python_libraries", {})) diff --git a/mesonbuild/convert/abstract/abstract_toolchain.py b/mesonbuild/convert/abstract/abstract_toolchain.py new file mode 100644 index 000000000000..c83e22bf136f --- /dev/null +++ b/mesonbuild/convert/abstract/abstract_toolchain.py @@ -0,0 +1,409 @@ +#!/usr/bin/env python3 +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2026 The Meson Development Team + +from __future__ import annotations +import contextlib +import typing as T + +from mesonbuild.arglist import CompilerArgs +from mesonbuild.compilers.compilers import Compiler, CompileCheckMode +from mesonbuild.environment import Environment +from mesonbuild.mesonlib import File, LibType, PerMachine +from mesonbuild.dependencies.base import Dependency + +from mesonbuild.envconfig import MachineInfo +from mesonbuild.mesonlib import MachineChoice +from mesonbuild.compilers.c import ClangCCompiler +from mesonbuild.compilers.cpp import ClangCPPCompiler +from mesonbuild.compilers.rust import RustCompiler +from mesonbuild.linkers.linkers import GnuBFDDynamicLinker +from mesonbuild.compilers.compilers import CompileResult +from mesonbuild import options + + +class AbstractCompiler(Compiler): + """Base class for compilers in the convert tool, simulating compiler checks.""" + + def __init__(self, conf: T.Dict[T.Any, T.Any], *args: T.Any, **kwargs: T.Any): + self.conf = conf + super().__init__(*args, **kwargs) + + def get_options(self) -> options.MutableKeyedOptionDictType: + opts = super().get_options() + key = self.form_compileropt_key("args") + opts[key] = options.UserStringArrayOption( + self.make_option_name(key), + "Extra arguments passed to the compiler", + [], + ) + key = self.form_compileropt_key("link_args") + opts[key] = options.UserStringArrayOption( + self.make_option_name(key), + "Extra arguments passed to the linker", + [], + ) + return opts + + def find_library( + self, + libname: str, + extra_dirs: T.List[str], + libtype: LibType = LibType.PREFER_SHARED, + lib_prefix_warning: bool = True, + ignore_system_dirs: bool = False, + skip_link_check: bool = False, + ) -> T.Optional[T.List[str]]: + if libname == "rt": + return [""] + return None + + def has_header_symbol( + self, + hname: str, + symbol: str, + prefix: str, + *, + extra_args: T.Union[ + T.List[str], T.Callable[[CompileCheckMode], T.List[str]], None + ] = None, + dependencies: T.Optional[T.List[Dependency]] = None, + ) -> T.Tuple[bool, bool]: + fails = self.conf.get("has_header_symbol", {}).get("fails", {}) + header_symbols_fails = fails.get(hname, {}) + return (symbol not in header_symbols_fails, True) + + def has_multi_arguments( + self, args: T.List[str], *a: T.Any, **kw: T.Any + ) -> T.Tuple[bool, bool]: + fails = ( + self.conf.get("supported_arguments", {}).get("fails", {}).get("args", []) + ) + return (all(a not in fails for a in args), True) + + def has_multi_link_arguments( + self, args: T.List[str], *a: T.Any, **kw: T.Any + ) -> T.Tuple[bool, bool]: + fails = ( + self.conf.get("supported_link_arguments", {}) + .get("fails", {}) + .get("args", []) + ) + return (all(a not in fails for a in args), True) + + def compiles( + self, + code: T.Union[File, str], + *, + extra_args: T.Union[ + T.List[str], + CompilerArgs, + T.Callable[[CompileCheckMode], T.List[str]], + None, + ] = None, + dependencies: T.Optional[T.List[Dependency]] = None, + mode: CompileCheckMode = CompileCheckMode.COMPILE, + disable_cache: bool = False, + ) -> T.Tuple[bool, bool]: + fails = self.conf.get("compiles", {}).get("fails", {}) + if isinstance(code, list): + snippet_str = "\n".join(code) + elif not isinstance(code, str): + # We don't have the file contents, so we assume it compiles + return (True, True) + else: + snippet_str = code + + for failure_marker in fails: + if failure_marker in snippet_str: + return (False, True) + return (True, True) + + def get_supported_function_attributes(self, attributes: T.List[str]) -> T.List[str]: + fails = self.conf.get("has_function_attribute", {}).get("fails", {}) + + return [a for a in attributes if a not in fails] + + def has_func_attribute(self, name: str) -> T.Tuple[bool, bool]: + fails = self.conf.get("has_function_attribute", {}).get("fails", {}) + + return name not in fails, True + + def has_function( + self, + funcname: str, + prefix: str, + *, + extra_args: T.Optional[T.List[str]] = None, + dependencies: T.Optional[T.List[Dependency]] = None, + ) -> T.Tuple[bool, bool]: + fails = self.conf.get("has_function", {}).get("fails", {}) + return (funcname not in fails, True) + + def cross_compute_int( + self, + expression: str, + low: T.Optional[int], + high: T.Optional[int], + upper: T.Optional[int], + ) -> int: + return 0 + + def get_default_include_dirs(self) -> T.List[str]: + return [] + + def get_define(self, *args: T.Any, **kwargs: T.Any) -> T.Tuple[str, bool]: + return ("", False) + + def thread_flags(self, *args: T.Any, **kwargs: T.Any) -> T.List[str]: + return [] + + def thread_link_flags(self, *args: T.Any, **kwargs: T.Any) -> T.List[str]: + return [] + + def links( + self, + code: T.Union[str, File], + *, + compiler: T.Optional[Compiler] = None, + extra_args: T.Union[ + None, + T.List[str], + CompilerArgs, + T.Callable[[CompileCheckMode], T.List[str]], + ] = None, + dependencies: T.Optional[T.List[Dependency]] = None, + disable_cache: bool = False, + ) -> T.Tuple[bool, bool]: + fails = self.conf.get("links", {}).get("fails", {}) + if not isinstance(code, str): + return (True, True) + + for failure_marker in fails: + if failure_marker in code: + return (False, True) + return (True, True) + + def check_header( + self, + hname: str, + prefix: str, + *, + extra_args: T.Union[ + None, T.List[str], T.Callable[[CompileCheckMode], T.List[str]] + ] = None, + dependencies: T.Optional[T.List[Dependency]] = None, + ) -> T.Tuple[bool, bool]: + fails = self.conf.get("check_header", {}).get("fails", {}) + return (hname not in fails, True) + + def has_member( + self, + typename: str, + membername: str, + prefix: str, + *, + extra_args: T.Union[ + None, T.List[str], T.Callable[[CompileCheckMode], T.List[str]] + ] = None, + dependencies: T.Optional[T.List[Dependency]] = None, + ) -> T.Tuple[bool, bool]: + fails = self.conf.get("has_member", {}).get("fails", {}) + type_members_fails = fails.get(typename, {}) + return (membername not in type_members_fails, True) + + @contextlib.contextmanager + def compile( + self, *args: T.Any, **kwargs: T.Any + ) -> T.Generator[CompileResult, None, None]: + yield CompileResult("", "", [], 0, "") + + def sanity_check(self, work_dir: str) -> None: + pass + + +# Only support Clang C Compiler for now +class AbstractClangCCompiler(AbstractCompiler, ClangCCompiler): + """Abstract Clang C Compiler for convert tool.""" + + def __init__( + self, conf: T.Dict[str, T.Any], choice: MachineChoice, env: Environment + ): + version = conf.get("version") + linker_id = conf.get("linker_id") + exelist = ["/usr/bin/true"] + linker = None + if linker_id: + linker = GnuBFDDynamicLinker( + [f"/dev/null/{linker_id}"], env, choice, "", [] + ) + super().__init__( + conf, + [], + exelist, + version, + choice, + env, + linker=linker, + full_version=version, + ) + + +# Only support Clang C++ Compiler for now +class AbstractClangCppCompiler(AbstractCompiler, ClangCPPCompiler): + """Abstract Clang C++ Compiler for convert tool.""" + + def __init__( + self, conf: T.Dict[str, T.Any], choice: MachineChoice, env: Environment + ): + version = conf.get("version") + linker_id = conf.get("linker_id") + exelist = ["/usr/bin/true"] + linker = None + if linker_id: + linker = GnuBFDDynamicLinker( + [f"/dev/null/{linker_id}"], env, choice, "", [] + ) + super().__init__( + conf, + [], + exelist, + version, + choice, + env, + linker=linker, + full_version=version, + ) + + +class AbstractRustCompiler(AbstractCompiler, RustCompiler): + """Abstract Rust Compiler for convert tool.""" + + def __init__( + self, conf: T.Dict[str, T.Any], choice: MachineChoice, env: Environment + ): + version = conf.get("version") + exelist = ["/usr/bin/true"] + super().__init__( + conf, + exelist, + version, + choice, + env, + full_version=version, + ) + self.native_static_libs = [] + + +class AbstractToolchainWrap: + """Holds information about the toolchain archive and its contents.""" + + def __init__(self, wrap_config: T.Dict[str, T.Any]): + self.url = wrap_config.get("source_url") + self.sha256 = wrap_config.get("source_hash") + self.filename = wrap_config.get("source_filename") + self.binaries = wrap_config.get("binaries", {}) + self.strip_prefix = "" + if self.filename: + # Derive strip_prefix from source_filename + for ext in [".tar.gz", ".tar.xz", ".zip"]: + if self.filename.endswith(ext): + self.strip_prefix = self.filename[: -len(ext)] + break + else: + self.strip_prefix = self.filename.split(".")[0] + + +class AbstractToolchainInfo: + """Holds information about the build and host machines for a toolchain.""" + + def __init__( + self, + build_machine: str, + host_machine: str, + toolchain_config: T.Dict[str, T.Any], + global_config: T.Optional[T.Dict[str, T.Any]] = None, + ): + self.name = host_machine + self.toolchains = PerMachine(build_machine, host_machine) + self.machine_info = PerMachine( + MachineInfo.from_literal( + toolchain_config.get(build_machine, {}).get("host_machine", {}) + ), + MachineInfo.from_literal( + toolchain_config.get(host_machine, {}).get("host_machine", {}) + ), + ) + wrap_config = toolchain_config.get(host_machine, {}).get("wrap") + self.wrap = AbstractToolchainWrap(wrap_config) if wrap_config else None + + self.compilers_wrap = None + self.sysroot_wrap = None + if global_config: + comp_wrap_config = global_config.get("compiler_binaries", {}).get("wrap") + if comp_wrap_config: + self.compilers_wrap = AbstractToolchainWrap(comp_wrap_config) + # The compilers wrap has binaries at binary_paths in the TOML + self.compilers_wrap.binaries = global_config.get( + "compiler_binaries", {} + ).get("binary_paths", {}) + + sysroot_wrap_config = global_config.get("sysroot", {}).get("wrap") + if sysroot_wrap_config: + self.sysroot_wrap = AbstractToolchainWrap(sysroot_wrap_config) + + def host_supported(self) -> bool: + return ( + self.toolchains[MachineChoice.HOST] == self.toolchains[MachineChoice.BUILD] + ) + + +class AbstractToolchain: + """Represents a toolchain configuration for the convert tool.""" + + def __init__( + self, + env: T.Any, + host_machine_toolchain: str, + build_machine_toolchain: str, + toolchain_config: T.Dict[str, T.Any], + global_config: T.Optional[T.Dict[str, T.Any]] = None, + ): + self.env = env + self.toolchains: T.Dict[MachineChoice, T.Dict[str, T.Any]] = {} + self.toolchain_info = AbstractToolchainInfo( + build_machine_toolchain, + host_machine_toolchain, + toolchain_config, + global_config, + ) + self.toolchains[MachineChoice.HOST] = toolchain_config.get( + host_machine_toolchain + ) + self.toolchains[MachineChoice.BUILD] = toolchain_config.get( + build_machine_toolchain + ) + + def create_c_compiler( + self, choice: MachineChoice + ) -> T.Optional[AbstractClangCCompiler]: + c_info = self.toolchains[choice].get("c") + if not c_info: + return None + return AbstractClangCCompiler(c_info, choice, self.env) + + def create_cpp_compiler( + self, choice: MachineChoice + ) -> T.Optional[AbstractClangCppCompiler]: + cpp_info = self.toolchains[choice].get("cpp") + if not cpp_info: + return None + return AbstractClangCppCompiler(cpp_info, choice, self.env) + + def create_rust_compiler( + self, choice: MachineChoice + ) -> T.Optional[AbstractRustCompiler]: + rs_info = self.toolchains[choice].get("rust") + if not rs_info: + return None + return AbstractRustCompiler(rs_info, choice, self.env) diff --git a/mesonbuild/convert/build_systems/__init__.py b/mesonbuild/convert/build_systems/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/mesonbuild/convert/build_systems/bazel/__init__.py b/mesonbuild/convert/build_systems/bazel/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/mesonbuild/convert/build_systems/bazel/bzlmod_emitter.py b/mesonbuild/convert/build_systems/bazel/bzlmod_emitter.py new file mode 100644 index 000000000000..5a2c042daee7 --- /dev/null +++ b/mesonbuild/convert/build_systems/bazel/bzlmod_emitter.py @@ -0,0 +1,351 @@ +#!/usr/bin/env python3 +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2026 The Meson Development Team + +from __future__ import annotations +import typing as T +import os +import shutil + +from mesonbuild.mesonlib import MachineChoice +from mesonbuild.convert.build_systems.common import ConvertStateTracker +from mesonbuild.convert.build_systems.bazel.state import BazelBackend + +BAZEL_MODULE_TEMPLATE = """\ +module(name = "{project_name}", version = "1.0") + +bazel_dep(name = "rules_cc", version = "0.2.17") +bazel_dep(name = "platforms", version = "1.0.0") +bazel_dep(name = "rules_license", version = "1.0.0") +bazel_dep(name = "rules_python", version = "1.7.0") + +meson_repos = use_extension("//bazel:toolchains.bzl", "meson_repos") + +{extension_usage} + +{use_repos} + +{register_toolchains} + +{python_setup} +""" + +PYTHON_MODULE_SETUP_TEMPLATE = """\ +python = use_extension("@rules_python//python/extensions:python.bzl", "python") +python.toolchain( + python_version = "3.10", + is_default = True, +) + +pip = use_extension("@rules_python//python/extensions:pip.bzl", "pip") +pip.parse( + hub_name = "meson_python_deps", + python_version = "3.10", + requirements_lock = "//bazel:requirements.txt", +) + +use_repo(pip, "meson_python_deps") +""" + +EXTENSION_TOOLCHAIN_TAG_TEMPLATE = """\ +meson_repos.repo( + name = "{name}", + url = "{url}", + sha256 = "{sha256}", + build_file = "{build_file}", + type = "{file_type}", +) +""" + +EXTENSION_SYSROOT_TAG_TEMPLATE = """\ +meson_repos.repo( + name = "{name}", + url = "{url}", + sha256 = "{sha256}", + build_file_content = 'filegroup(name = "all_files", srcs = glob(["**"]), visibility = ["//visibility:public"])', + type = "zip", +) +""" + +EXTENSION_GENERAL_TAG_TEMPLATE = """\ +meson_repos.repo( + name = "{name}", + url = "{url}", + sha256 = "{sha256}", +) +""" + +COMPILER_MAPPING_TEMPLATE = """\ +package(default_visibility = ["//visibility:public"]) + +filegroup( + name = "compiler_files", + srcs = glob(["**/*"]), +) + +filegroup( + name = "all_files", + srcs = [":compiler_files"] + {sysroot_files}, +) + +filegroup(name = "empty") + +load("@//bazel:toolchains.bzl", "define_meson_toolchain") + +define_meson_toolchain( + name = "{name}", + cpu = "{cpu}", + target_system_name = "{target_system_name}", + tool_paths = {{ + {tool_paths} + }}, + sysroot_repo = "{sysroot_repo}", + fuchsia_cpu = "{fuchsia_cpu}", +) +""" + +TOOLCHAINS_BUILD_TEMPLATE = """\ +toolchain( + name = "{name}_toolchain", + target_compatible_with = [ + "@platforms//cpu:{cpu}", + "@platforms//os:{os}", + ], + toolchain = "@{name}//:{name}_cc_toolchain", + toolchain_type = "@bazel_tools//tools/cpp:toolchain_type", +) +""" + +PLATFORMS_BUILD_TEMPLATE = """\ +package(default_visibility = ["//visibility:public"]) + +platform( + name = "{name}_platform", + constraint_values = [ + "@platforms//cpu:{cpu}", + "@platforms//os:{os}", + ], +) +""" + + +def _emit_python_requirements(output_dir: str, + state_tracker: ConvertStateTracker) -> bool: # fmt: skip + python_libraries = state_tracker.project_config.dependencies.python_libraries + if not python_libraries: + return False + + bazel_dir = os.path.join(output_dir, "bazel") + os.makedirs(bazel_dir, exist_ok=True) + + content = "" + for dep, version in sorted(python_libraries.items()): + content += f"{dep}=={version}\n" + + with open(os.path.join(bazel_dir, "requirements.txt"), "w", encoding="utf-8") as f: + f.write(content) + + # Ensure bazel/ is a package + with open(os.path.join(bazel_dir, "BUILD.bazel"), "w", encoding="utf-8") as f: + f.write("") + + return True + + +def _emit_toolchains_and_platforms( + output_dir: str, + state_tracker: ConvertStateTracker, + copyright_string: str) -> T.Tuple[T.List[str], T.List[str], T.List[str]]: # fmt: skip + backend = T.cast(BazelBackend, state_tracker.backend) + extension_usage: T.List[str] = [] + register_toolchains: T.List[str] = [] + repos: T.List[str] = [] + + for dep in sorted(list(backend.external_deps), key=lambda x: x.repo): + extension_usage.append( + EXTENSION_GENERAL_TAG_TEMPLATE.format( + name=dep.repo, + url=dep.source_url, + sha256=dep.source_hash or "", + ) + ) + repos.append(f'"{dep.repo}"') + + toolchains_with_wrap = [ + tc for tc in state_tracker.all_toolchains if tc.compilers_wrap + ] + if not toolchains_with_wrap: + return (extension_usage, register_toolchains, repos) + + toolchains_build_content = copyright_string + "\n" + toolchains_build_content += 'filegroup(name = "empty")\n\n' + platforms_build_content = copyright_string + "\n" + + toolchains_dir = os.path.join(output_dir, "bazel", "toolchains") + platforms_dir = os.path.join(output_dir, "bazel", "platforms") + os.makedirs(toolchains_dir, exist_ok=True) + os.makedirs(platforms_dir, exist_ok=True) + + for tc in toolchains_with_wrap: + compilers_wrap = tc.compilers_wrap + name = tc.name + extension_usage.append( + EXTENSION_TOOLCHAIN_TAG_TEMPLATE.format( + name=name, + url=compilers_wrap.url, + sha256=compilers_wrap.sha256 or "", + build_file=f"//bazel/toolchains:{name}_compiler.BUILD", + file_type="zip", + ) + ) + register_toolchains.append( + f'register_toolchains("//bazel/toolchains:{name}_toolchain")' + ) + repos.append(f'"{name}"') + + sysroot_files = "[]" + if tc.sysroot_wrap: + sysroot_name = f"{name}_sysroot" + sysroot_wrap = tc.sysroot_wrap + extension_usage.append( + EXTENSION_SYSROOT_TAG_TEMPLATE.format( + name=sysroot_name, + url=sysroot_wrap.url, + sha256=sysroot_wrap.sha256 or "", + ) + ) + sysroot_files = f'["@{sysroot_name}//:all_files"]' + repos.append(f'"{sysroot_name}"') + + binaries = compilers_wrap.binaries + # Extract all available tools for tool_paths + tool_mapping = [ + ("gcc", ["ccc", "gcc", "cc"]), + ("cpp", ["cpp"]), + ("ld", ["ld"]), + ("ar", ["ar"]), + ("nm", ["nm"]), + ("objcopy", ["objcopy"]), + ("objdump", ["objdump"]), + ("gcov", ["gcov"]), + ("strip", ["strip"]), + ("as", ["as"]), + ] + + tool_paths_items = [] + for bazel_name, toml_names in tool_mapping: + for toml_name in toml_names: + if toml_name in binaries: + tool_paths_items.append(f'"{bazel_name}": "{binaries[toml_name]}"') + break + + tool_paths_str = ",\n ".join(tool_paths_items) + + machine_info = tc.machine_info[MachineChoice.HOST] + cpu = machine_info.cpu_family + os_name = machine_info.system + + # Fuchsia uses 'x64' and 'arm64' in its SDK paths + fuchsia_cpu = cpu + if cpu == "x86_64": + fuchsia_cpu = "x64" + elif cpu == "aarch64": + fuchsia_cpu = "arm64" + + # Bzlmod Resolution: + # We use stable names and resolve actual paths in the define_meson_toolchain macro. + sysroot_repo = f"@{name}_sysroot" if tc.sysroot_wrap else "" + + # Standard Fuchsia Triple + target_triple = f"{cpu}-fuchsia" + + with open( + os.path.join(toolchains_dir, f"{name}_compiler.BUILD"), + "w", + encoding="utf-8", + ) as f: + f.write( + COMPILER_MAPPING_TEMPLATE.format( + name=name, + cpu=cpu, + os=os_name, + target_system_name=target_triple, + tool_paths=tool_paths_str, + sysroot_files=sysroot_files, + sysroot_repo=sysroot_repo, + fuchsia_cpu=fuchsia_cpu, + ) + ) + + toolchains_build_content += TOOLCHAINS_BUILD_TEMPLATE.format( + name=name, + cpu=machine_info.cpu_family, + os=machine_info.system, + ) + platforms_build_content += PLATFORMS_BUILD_TEMPLATE.format( + name=name, + cpu=machine_info.cpu_family, + os=machine_info.system, + ) + + with open(os.path.join(toolchains_dir, "BUILD.bazel"), "w", encoding="utf-8") as f: + f.write(toolchains_build_content) + + with open(os.path.join(platforms_dir, "BUILD.bazel"), "w", encoding="utf-8") as f: + f.write(platforms_build_content) + + return (extension_usage, register_toolchains, repos) + + +def _emit_bazel_rules(output_dir: str) -> None: + bazel_dir = os.path.join(output_dir, "bazel") + os.makedirs(bazel_dir, exist_ok=True) + + # Ensure bazel/ BUILD.bazel exists + build_file = os.path.join(bazel_dir, "BUILD.bazel") + if not os.path.exists(build_file): + with open(build_file, "w", encoding="utf-8") as f: + f.write("") + + shutil.copy( + os.path.join(os.path.dirname(__file__), "starlark", "meson_rules.bzl"), + os.path.join(bazel_dir, "meson_rules.bzl"), + ) + + +def _emit_toolchain_extension(output_dir: str) -> None: + bazel_dir = os.path.join(output_dir, "bazel") + os.makedirs(bazel_dir, exist_ok=True) + shutil.copy( + os.path.join(os.path.dirname(__file__), "starlark", "toolchains.bzl"), + os.path.join(bazel_dir, "toolchains.bzl"), + ) + + +def _emit_module_bazel( + output_dir: str, + state_tracker: ConvertStateTracker, + copyright_string: str) -> None: # fmt: skip + (extension_usage, register_toolchains, repos) = _emit_toolchains_and_platforms( + output_dir, state_tracker, copyright_string + ) + _emit_bazel_rules(output_dir) + _emit_toolchain_extension(output_dir) + + python_setup = "" + if _emit_python_requirements(output_dir, state_tracker): + python_setup = PYTHON_MODULE_SETUP_TEMPLATE + + use_repos = "\n".join([f"use_repo(meson_repos, {r})" for r in repos]) + + module_content = copyright_string + "\n" + module_content += BAZEL_MODULE_TEMPLATE.format( + project_name=state_tracker.project_config.project_name or "meson_project", + extension_usage="\n".join(extension_usage), + use_repos=use_repos, + register_toolchains="\n".join(register_toolchains), + python_setup=python_setup, + ) + + with open(os.path.join(output_dir, "MODULE.bazel"), "w", encoding="utf-8") as f: + f.write(module_content) diff --git a/mesonbuild/convert/build_systems/bazel/emitter.py b/mesonbuild/convert/build_systems/bazel/emitter.py new file mode 100644 index 000000000000..2cef2882bb4f --- /dev/null +++ b/mesonbuild/convert/build_systems/bazel/emitter.py @@ -0,0 +1,220 @@ +#!/usr/bin/env python3 +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2026 The Meson Development Team + +from __future__ import annotations +import typing as T +from collections import defaultdict + +from mesonbuild.convert.build_systems.target import ( + ConvertAttr, + ConvertTarget, + ConvertTargetType, + ConvertAttrNode, +) +from mesonbuild.convert.build_systems.emitter import ( + ConvertEmitterBackend, + generic_emit_attribute_values, + COMMON_INDENT, +) +from mesonbuild.convert.build_systems.bazel.bzlmod_emitter import _emit_module_bazel + +if T.TYPE_CHECKING: + from mesonbuild.convert.build_systems.common import ConvertStateTracker + +COPYRIGHT_HEADER_TEMPLATE = """\ +# Copyright (C) 2025-2026 The Magma GPU Project +# SPDX-License-Identifier: Apache-2.0 +# +# Generated via: +# https://github.com/mesonbuild/meson/tree/master/mesonbuild/convert +# +# Submit patches, do not hand-edit.""" + +LICENSE_BLOCK_TEMPLATE = """\ +package( + default_applicable_licenses = ["//:{root_license_name}"], + default_visibility = ["//visibility:public"], +)""" + +ROOT_LICENSE_TEMPLATE = """\ +license( + name = "{license_name}", + license_kinds = [ +{license_kinds} + ], +)""" + +BAZEL_ATTR_MAP = { + ConvertAttr.NAME: "name", + ConvertAttr.SRCS: "srcs", + ConvertAttr.INCLUDES: "export_include_dirs", + ConvertAttr.RUSTFLAGS: "rustc_flags", + ConvertAttr.OUT: "outs", + ConvertAttr.TOOLS: "tools", + ConvertAttr.PYTHON_MAIN: "main", + ConvertAttr.RUST_CRATE_NAME: "crate_name", + ConvertAttr.RUST_EDITION: "edition", + ConvertAttr.LDFLAGS: "linkopts", + ConvertAttr.BAZEL_DEPS: "deps", + ConvertAttr.BAZEL_FLAGS: "flags", + ConvertAttr.BAZEL_HDRS: "hdrs", +} + + +BAZEL_MODULE_MAP = { + ConvertTargetType.FILEGROUP: "filegroup", + ConvertTargetType.PYTHON_TARGET: "py_binary", + ConvertTargetType.CUSTOM_TARGET: "meson_genrule", + ConvertTargetType.INCLUDE_DIRECTORY: "meson_cc_headers", + ConvertTargetType.FLAG: "meson_cc_flags", + ConvertTargetType.RUST_FLAG: "meson_rust_flags", + ConvertTargetType.STATIC_LIBRARY: "meson_cc_library", + ConvertTargetType.SHARED_LIBRARY: "meson_cc_library", + ConvertTargetType.RUST_LIBRARY: "rust_library", + ConvertTargetType.RUST_FFI_STATIC: "rust_static_library", + ConvertTargetType.RUST_FFI_SHARED: "rust_shared_library", +} + + +BAZEL_LOAD_MAP = { + ConvertTargetType.PYTHON_TARGET: ( + "@rules_python//python:py_binary.bzl", + "py_binary", + ), + ConvertTargetType.CUSTOM_TARGET: ("//bazel:meson_rules.bzl", "meson_genrule"), + ConvertTargetType.STATIC_LIBRARY: ("//bazel:meson_rules.bzl", "meson_cc_library"), + ConvertTargetType.SHARED_LIBRARY: ("//bazel:meson_rules.bzl", "meson_cc_library"), + ConvertTargetType.RUST_LIBRARY: ("@rules_rust//rust:defs.bzl", "rust_library"), + ConvertTargetType.RUST_FFI_STATIC: ( + "@rules_rust//rust:defs.bzl", + "rust_static_library", + ), + ConvertTargetType.RUST_FFI_SHARED: ( + "@rules_rust//rust:defs.bzl", + "rust_shared_library", + ), + ConvertTargetType.INCLUDE_DIRECTORY: ( + "//bazel:meson_rules.bzl", + "meson_cc_headers", + ), + ConvertTargetType.FLAG: ("//bazel:meson_rules.bzl", "meson_cc_flags"), + ConvertTargetType.RUST_FLAG: ("//bazel:rust_rules.bzl", "meson_rust_flags"), +} + + +def _emit_python_aliases(python_libs: T.Dict[str, str]) -> str: + content = "" + for dep in sorted(list(python_libs)): + content += "alias(\n" + content += f' name = "{dep}",\n' + content += f' actual = "@meson_python_deps//{dep}",\n' + content += ' visibility = ["//visibility:public"],\n' + content += ")\n\n" + return content + + +class BazelEmitterBackend(ConvertEmitterBackend): + def emit_begin(self, output_dir: str, state_tracker: ConvertStateTracker) -> None: + _emit_module_bazel(output_dir, state_tracker, self.get_copyright_header({})) + + def get_attr_map(self) -> T.Dict[ConvertAttr, str]: + return BAZEL_ATTR_MAP + + def get_module_map(self) -> T.Dict[ConvertTargetType, str]: + return BAZEL_MODULE_MAP + + def get_attr_separator(self) -> str: + return " =" + + def get_opening_brace(self) -> str: + return "(" + + def get_closing_brace(self) -> str: + return ")" + + def get_build_file_name(self) -> str: + return "BUILD.bazel" + + def get_copyright_header(self, copyright_info: T.Dict[str, T.Any]) -> str: + return COPYRIGHT_HEADER_TEMPLATE + + def get_license_block( + self, copyright_info: T.Dict[str, T.Any], is_root: bool + ) -> str: + if "license_name" in copyright_info: + root_license_name = copyright_info["license_name"] + content = "" + if is_root: + license_kinds = "\n".join( + [ + f' "@rules_license//licenses/spdx:{lic}",' + for lic in copyright_info.get("licenses", []) + ] + ) + content += "\n\n" + ROOT_LICENSE_TEMPLATE.format( + license_name=root_license_name, + license_kinds=license_kinds, + ) + content += "\n\n" + LICENSE_BLOCK_TEMPLATE.format( + root_license_name=root_license_name + ) + return content + return "" + + def emit_extra_root_info(self, state_tracker: ConvertStateTracker) -> str: + content = "" + python_libs = state_tracker.project_config.dependencies.python_libraries + if python_libs: + content += _emit_python_aliases(python_libs) + return content + + def emit_module_load_info( + self, targets: T.List[ConvertTarget], is_root: bool + ) -> str: + file_to_rules = defaultdict(set) + if is_root: + file_to_rules["@rules_license//rules:license.bzl"].add("license") + + for t in targets: + if t.target_type in BAZEL_LOAD_MAP: + load_file, rule = BAZEL_LOAD_MAP[t.target_type] + file_to_rules[load_file].add(rule) + + if not file_to_rules: + return "" + + load_lines = [] + for load_file in sorted(file_to_rules.keys()): + rules = sorted(list(file_to_rules[load_file])) + rules_str = ", ".join([f'"{r}"' for r in rules]) + load_lines.append(f'load("{load_file}", {rules_str})') + + return "\n".join(load_lines) + "\n\n" + + def emit_special_target_info(self, target: ConvertTarget) -> str: + from mesonbuild.convert.build_systems.target import ConvertCustomTarget + + if isinstance(target, ConvertCustomTarget): + return f' cmd = "{getattr(target, "cmd", "")}",\n' + return "" + + def format_conditionals(self, indent: int, node: ConvertAttrNode) -> str: + content_str = "" + select_nodes = node.get_select_nodes() + if not select_nodes: + return content_str + + select_node = select_nodes[0] + content_str += "select({\n" + + value_indent = indent + COMMON_INDENT + indent_str = " " * value_indent + for select_values, attribute_values in select_node.select_tuples: + key = ":".join(select_values) + if key == "default": + key = "//conditions:default" + content_str += f'{indent_str}"{key}": {generic_emit_attribute_values(value_indent, attribute_values)},\n' + + content_str += " " * indent + "})" + return content_str diff --git a/mesonbuild/convert/build_systems/bazel/starlark/meson_rules.bzl b/mesonbuild/convert/build_systems/bazel/starlark/meson_rules.bzl new file mode 100644 index 000000000000..e4e1f9ffcfa6 --- /dev/null +++ b/mesonbuild/convert/build_systems/bazel/starlark/meson_rules.bzl @@ -0,0 +1,266 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2026 The Meson Development Team + +load("@rules_cc//cc:defs.bzl", "CcInfo", "cc_common") + +# This is the custom set of native Starlark rules used by the convert tool. +# +# Why meson_cc_flags? +# - The normal cc_library does not propagate copts ('-Werror'), but does +# propagate defines ('-DFOO`). meson's notion that both are strings to +# the compiler is a simpler and accurate model, so this codifies it. +# Why meson_cc_library? +# - So we can use meson_cc_flags. +# Why meson_cc_headers? +# - Meson's include_directories function is relative to current directory. +# cc_library's is relative to the MODULE.bazel workspace root. We need +# to do the remapping here. +# Why meson_genrule? +# - The normal genrule does not export_include_directories in the sandbox +# in which it was run. So it's impossible to #include generated_header in +# non-root directory locations. + +MesonFlagInfo = provider(fields = { + "defines": "depset of compiler defines (-D)", + "copts": "depset of compiler options", + "linkopts": "depset of linker options", +}) + +def _add_bin_dir(ctx, path): + """Adds the bazel-out (bin_dir) counterpart to a workspace-relative path.""" + return [path, ctx.bin_dir.path + "/" + path] + +def _package_relative_includes(ctx, includes): + """Processes includes relative to the current package's BUILD file.""" + package_path = ctx.label.package + rebased = [] + for i in includes: + if i == ".": + path = package_path if package_path else "." + else: + path = package_path + "/" + i if package_path else i + + rebased.extend(_add_bin_dir(ctx, path)) + return rebased + +def _workspace_relative_includes(ctx, includes): + """Processes includes relative to the workspace root.""" + rebased = [] + for path in includes: + path = path if path else "." + rebased.extend(_add_bin_dir(ctx, path)) + return rebased + +def _meson_cc_flags_impl(ctx): + defines, copts, linkopts = [], [], [] + for f in ctx.attr.flags: + if f.startswith("-D"): + defines.append(f[2:]) + elif f.startswith("-l") or f.startswith("-L") or f.startswith("-Wl,"): + linkopts.append(f) + else: + copts.append(f) + + # Combine with attributes and transitive dependencies + return [ + MesonFlagInfo( + defines = depset( + defines, + transitive = [ + d[MesonFlagInfo].defines for d in ctx.attr.deps if MesonFlagInfo in d + ], + ), + copts = depset( + copts, + transitive = [ + d[MesonFlagInfo].copts for d in ctx.attr.deps if MesonFlagInfo in d + ], + ), + linkopts = depset( + linkopts + ctx.attr.linkopts, + transitive = [ + d[MesonFlagInfo].linkopts for d in ctx.attr.deps if MesonFlagInfo in d + ], + ), + ), + ] + +meson_cc_flags = rule( + implementation = _meson_cc_flags_impl, + attrs = { + "flags": attr.string_list(), + "linkopts": attr.string_list(), + "deps": attr.label_list(), + }, +) + +def _meson_cc_library_impl(ctx): + cc_toolchain = ctx.attr._cc_toolchain[cc_common.CcToolchainInfo] + feature_config = cc_common.configure_features(ctx = ctx, cc_toolchain = cc_toolchain) + + # Collect flags from MesonFlagInfo dependencies. + all_defines = depset( + transitive = [ + d[MesonFlagInfo].defines for d in ctx.attr.deps if MesonFlagInfo in d + ], + ) + all_copts = depset( + transitive = [ + d[MesonFlagInfo].copts for d in ctx.attr.deps if MesonFlagInfo in d + ], + ) + all_linkopts = depset( + transitive = [ + d[MesonFlagInfo].linkopts for d in ctx.attr.deps if MesonFlagInfo in d + ], + ) + + # Meson always adds current directory to include path + rebased_includes = _package_relative_includes(ctx, ["."]) + + compilation_contexts = [d[CcInfo].compilation_context for d in ctx.attr.deps if CcInfo in d] + local_compilation_context = cc_common.create_compilation_context(defines = all_defines) + + (comp_context, comp_outputs) = cc_common.compile( + name = ctx.label.name, + actions = ctx.actions, + cc_toolchain = cc_toolchain, + feature_configuration = feature_config, + srcs = ctx.files.srcs, + system_includes = rebased_includes, + user_compile_flags = all_copts.to_list(), + compilation_contexts = [local_compilation_context] + compilation_contexts, + ) + + # Link + linking_contexts = [d[CcInfo].linking_context for d in ctx.attr.deps if CcInfo in d] + + # Add linkopts from MesonFlagInfo + local_linker_input = None + if all_linkopts: + local_linker_input = cc_common.create_linker_input( + owner = ctx.label, + user_link_flags = all_linkopts, + ) + local_linking_context = cc_common.create_linking_context( + linker_inputs = depset([local_linker_input]) if local_linker_input else depset(), + ) + + linking_outputs = cc_common.link( + name = ctx.label.name, + actions = ctx.actions, + cc_toolchain = cc_toolchain, + feature_configuration = feature_config, + compilation_outputs = comp_outputs, + linking_contexts = [local_linking_context] + linking_contexts, + output_type = "dynamic_library", + ) + + linker_input = None + if linking_outputs.library_to_link: + linker_input = cc_common.create_linker_input( + owner = ctx.label, + libraries = depset([linking_outputs.library_to_link]), + ) + + linking_context = cc_common.create_linking_context( + linker_inputs = depset([linker_input]) if linker_input else depset(), + ) + + if linking_contexts: + linking_context = cc_common.merge_linking_contexts( + linking_contexts = [linking_context] + linking_contexts, + ) + + outputs = [] + if linking_outputs.library_to_link: + if linking_outputs.library_to_link.static_library: + outputs.append(linking_outputs.library_to_link.static_library) + elif linking_outputs.library_to_link.dynamic_library: + outputs.append(linking_outputs.library_to_link.dynamic_library) + + return [ + DefaultInfo(files = depset(outputs)), + CcInfo( + compilation_context = comp_context, + linking_context = linking_context, + ), + MesonFlagInfo(defines = all_defines, copts = all_copts, linkopts = all_linkopts), + ] + +meson_cc_library = rule( + implementation = _meson_cc_library_impl, + attrs = { + "srcs": attr.label_list(allow_files = True), + "deps": attr.label_list(), + "_cc_toolchain": attr.label(default = "@bazel_tools//tools/cpp:current_cc_toolchain"), + }, + fragments = ["cpp"], + toolchains = ["@bazel_tools//tools/cpp:toolchain_type"], +) + +def _meson_cc_headers_impl(ctx): + # Collect transitive compilation contexts from dependencies (likely other headers) + compilation_contexts = [d[CcInfo].compilation_context for d in ctx.attr.deps if CcInfo in d] + + return [ + DefaultInfo(files = depset(ctx.files.hdrs)), + CcInfo( + compilation_context = cc_common.merge_compilation_contexts( + compilation_contexts = [ + cc_common.create_compilation_context( + headers = depset(ctx.files.hdrs), + system_includes = depset( + _package_relative_includes(ctx, ctx.attr.export_include_dirs), + ), + ), + ] + compilation_contexts, + ), + ), + ] + +meson_cc_headers = rule( + implementation = _meson_cc_headers_impl, + attrs = { + "hdrs": attr.label_list(allow_files = True), + "export_include_dirs": attr.string_list(), + "deps": attr.label_list(), + }, +) + +def _meson_genrule_impl(ctx): + command = ctx.expand_location(ctx.attr.cmd, targets = ctx.attr.srcs + ctx.attr.tools) + ruledir = ctx.bin_dir.path + "/" + ctx.label.package + command = command.replace("$(genDir)", ruledir).replace("$(GENDIR)", ruledir) + + ctx.actions.run_shell( + inputs = ctx.files.srcs, + tools = [t.files_to_run for t in ctx.attr.tools], + outputs = ctx.outputs.outs, + command = command, + mnemonic = "MesonGenrule", + progress_message = "Generating %s" % ctx.label.name, + ) + + return [ + DefaultInfo(files = depset(ctx.outputs.outs)), + CcInfo( + compilation_context = cc_common.create_compilation_context( + system_includes = depset( + _workspace_relative_includes(ctx, ctx.attr.export_include_dirs), + ), + headers = depset(ctx.outputs.outs), + ), + ), + ] + +meson_genrule = rule( + implementation = _meson_genrule_impl, + attrs = { + "srcs": attr.label_list(allow_files = True), + "outs": attr.output_list(mandatory = True), + "tools": attr.label_list(allow_files = True, cfg = "exec"), + "cmd": attr.string(mandatory = True), + "export_include_dirs": attr.string_list(), + }, +) diff --git a/mesonbuild/convert/build_systems/bazel/starlark/toolchains.bzl b/mesonbuild/convert/build_systems/bazel/starlark/toolchains.bzl new file mode 100644 index 000000000000..0a8baca4ab02 --- /dev/null +++ b/mesonbuild/convert/build_systems/bazel/starlark/toolchains.bzl @@ -0,0 +1,86 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2026 The Meson Authors + +load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") +load("@bazel_tools//tools/cpp:unix_cc_toolchain_config.bzl", "cc_toolchain_config") +load("@rules_cc//cc:defs.bzl", "cc_toolchain") + +def _meson_repos_impl(ctx): + for mod in ctx.modules: + for repo in mod.tags.repo: + kwargs = { + "name": repo.name, + "url": repo.url, + "sha256": repo.sha256, + } + if repo.build_file: + kwargs["build_file"] = repo.build_file + if repo.build_file_content: + kwargs["build_file_content"] = repo.build_file_content + if repo.type: + kwargs["type"] = repo.type + http_archive(**kwargs) + +repo_tag = tag_class( + attrs = { + "name": attr.string(mandatory = True), + "url": attr.string(mandatory = True), + "sha256": attr.string(mandatory = True), + "build_file": attr.string(), + "build_file_content": attr.string(), + "type": attr.string(), + }, +) + +meson_repos = module_extension( + implementation = _meson_repos_impl, + tag_classes = {"repo": repo_tag}, +) + +def define_meson_toolchain(name, cpu, target_system_name, tool_paths, sysroot_repo, fuchsia_cpu): + workspace_name = Label(sysroot_repo + "//:all").workspace_name + sysroot_path = "external/" + workspace_name + "/arch/" + fuchsia_cpu + "/sysroot" + + cc_toolchain_config( + name = name + "_config", + cpu = cpu, + compiler = "gcc", + toolchain_identifier = name, + host_system_name = "local", + target_system_name = target_system_name, + target_libc = "unknown", + abi_version = "unknown", + abi_libc_version = "unknown", + builtin_sysroot = sysroot_path, + cxx_builtin_include_directories = [ + ".", + sysroot_path + "/include", + ], + compile_flags = [ + "-target", + target_system_name, + "-no-canonical-prefixes", + "--sysroot", + sysroot_path, + ], + link_flags = [ + "-target", + target_system_name, + "-no-canonical-prefixes", + "--sysroot", + sysroot_path, + ], + tool_paths = tool_paths, + ) + + cc_toolchain( + name = name + "_cc_toolchain", + all_files = ":all_files", + compiler_files = ":all_files", + dwp_files = ":empty", + linker_files = ":all_files", + objcopy_files = ":all_files", + strip_files = ":all_files", + toolchain_config = ":" + name + "_config", + supports_param_files = 0, + ) diff --git a/mesonbuild/convert/build_systems/bazel/state.py b/mesonbuild/convert/build_systems/bazel/state.py new file mode 100644 index 000000000000..d87f39db0724 --- /dev/null +++ b/mesonbuild/convert/build_systems/bazel/state.py @@ -0,0 +1,383 @@ +#!/usr/bin/env python3 +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2026 The Meson Development Team + +from __future__ import annotations +import typing as T +import os + +from mesonbuild import mlog +from mesonbuild.mesonlib import MachineChoice +from mesonbuild.convert.abstract.abstract_toolchain import ( + AbstractToolchainInfo, +) + +from mesonbuild.convert.common_defs import ( + SelectInstance, + SelectId, + SelectKind, +) + +from mesonbuild.convert.instance.convert_instance_utils import ( + ConvertDep, + ConvertSrc, + ConvertInstanceFlag, + ConvertInstanceIncludeDirectory, + ConvertInstanceFileGroup, +) +from mesonbuild.convert.build_systems.common import ( + ConvertBackend, + ConvertTreeNode, + ConvertStateTracker, +) +from mesonbuild.convert.build_systems.target import ( + ConvertAttr, + ConvertFileGroup, + ConvertIncludeDirectory, + ConvertPythonTarget, + ConvertFlag, + ConvertBuildTarget, + ConvertStaticLibrary, + ConvertSharedLibrary, + ConvertTarget, + ConvertCustomTarget, +) +from mesonbuild.convert.instance.convert_instance_build_target import ( + ConvertInstanceStaticLibrary, + ConvertInstanceSharedLibrary, +) +from mesonbuild.convert.instance.convert_instance_custom_target import ( + ConvertInstanceCustomTarget, + ConvertInstancePythonTarget, + ConvertCustomTargetCmdPart, + ConvertCustomTargetCmdPartType, +) + +GLOB_HEADERS: str = ( + """glob(["**/*.h", "**/*.hpp", "**/*.inl", "**/*.inc"], allow_empty = True)""" +) + + +class BazelBackend(ConvertBackend): + """Bazel backend for build system conversion.""" + + def __init__(self) -> None: + self.converted_custom_targets: T.Dict[str, T.Tuple[str, str]] = {} + self.external_deps: T.Set[ConvertDep] = set() + + def get_os_info( + self, toolchain: AbstractToolchainInfo, choice: MachineChoice + ) -> SelectInstance: + machine_info = toolchain.machine_info[choice] + os_string = machine_info.system + os_select = SelectInstance(SelectId(SelectKind.OS, "", "os"), os_string) + return os_select + + def get_arch_info( + self, toolchain: AbstractToolchainInfo, choice: MachineChoice + ) -> SelectInstance: + machine_info = toolchain.machine_info[choice] + select_id = SelectId(SelectKind.ARCH, "", "arch") + arch_select = SelectInstance(select_id, machine_info.cpu_family) + return arch_select + + def add_python_config( + self, target: ConvertPythonTarget, instance: ConvertInstancePythonTarget + ) -> None: + bazel_main = self._get_bazel_sources([instance.main], target.subdir)[0] + target.single_attributes[ConvertAttr.PYTHON_MAIN] = f'"{bazel_main}"' + target.get_attribute_node(ConvertAttr.SRCS).add_common_values( + self._get_bazel_sources(instance.srcs, target.subdir) + ) + target.get_attribute_node(ConvertAttr.BAZEL_DEPS).add_common_values( + [f"//:{lib}" for lib in instance.libs] + ) + + def add_flag_config( + self, + target: ConvertFlag, + instance: ConvertInstanceFlag, + toolchain: AbstractToolchainInfo, + custom_instances: T.Set[SelectInstance], + ) -> None: + os_select = self.get_os_info(toolchain, MachineChoice.HOST) + arch_select = self.get_arch_info(toolchain, MachineChoice.HOST) + label = {arch_select, os_select} | custom_instances + + target.get_attribute_node(ConvertAttr.BAZEL_FLAGS).add_conditional_values( + label, instance.compile_args + ) + if instance.link_args: + target.get_attribute_node(ConvertAttr.LDFLAGS).add_conditional_values( + label, instance.link_args + ) + + def add_include_dir_config( + self, + target: ConvertIncludeDirectory, + instance: ConvertInstanceIncludeDirectory, + toolchain: AbstractToolchainInfo, + custom_instances: T.Set[SelectInstance], + ) -> None: + os_select = self.get_os_info(toolchain, MachineChoice.HOST) + arch_select = self.get_arch_info(toolchain, MachineChoice.HOST) + label = {arch_select, os_select} | custom_instances + + target.get_attribute_node(ConvertAttr.INCLUDES).add_conditional_values( + label, list(instance.paths) + ) + + target.single_attributes[ConvertAttr.BAZEL_HDRS] = GLOB_HEADERS + + def add_file_group_config( + self, target: ConvertFileGroup, instance: ConvertInstanceFileGroup + ) -> None: + target.get_attribute_node(ConvertAttr.SRCS).add_common_values(instance.srcs) + + def _get_custom_target_cmd( + self, convert_instance_cmds: T.List[ConvertCustomTargetCmdPart], subdir: str + ) -> str: + final_cmd = [] + for p in convert_instance_cmds: + if isinstance(p, ConvertCustomTargetCmdPart): + if p.cmd_type == ConvertCustomTargetCmdPartType.TOOL: + bazel_src = self._get_bazel_sources([p.src], subdir)[0] + final_cmd.append(f"$(location {bazel_src})") + elif p.cmd_type == ConvertCustomTargetCmdPartType.PYTHON_BINARY: + bazel_src = self._get_bazel_sources([p.src], subdir)[0] + final_cmd.append(f"$(location {bazel_src})") + elif p.cmd_type == ConvertCustomTargetCmdPartType.INPUT: + bazel_src = self._get_bazel_sources([p.src], subdir)[0] + final_cmd.append(f"$(location {bazel_src})") + elif p.cmd_type == ConvertCustomTargetCmdPartType.OUTPUT: + # Basename needed for export_include_dirs case + cmd = os.path.basename(p.cmd) + final_cmd.append(f"$(location {cmd})") + elif p.cmd_type == ConvertCustomTargetCmdPartType.STRING: + processed_cmd = p.cmd.replace("@@GEN_DIR@@", "$(GENDIR)") + final_cmd.append(processed_cmd) + return " ".join(final_cmd) + + def add_custom_target( + self, state_tracker: ConvertStateTracker, ct: ConvertInstanceCustomTarget + ) -> None: + if ct.name not in state_tracker.targets: + state_tracker.targets[ct.name] = ConvertCustomTarget(ct.name, ct.subdir, ct) + + target = T.cast(ConvertCustomTarget, state_tracker.targets[ct.name]) + if target.instance != ct: + state_tracker.targets.pop(ct.name) + mlog.warning("Dropped custom target that differed across configs") + return + + # Bazel outputs must be relative to the package directory. + # Mostly happens with export_include_dirs workaround + out = [] + for o in ct.generated_headers + ct.generated_sources: + out.append(os.path.basename(o)) + + target.get_attribute_node(ConvertAttr.OUT).add_common_values(out) + target.get_attribute_node(ConvertAttr.SRCS).add_common_values( + self._get_bazel_sources(ct.srcs, ct.subdir) + ) + target.get_attribute_node(ConvertAttr.TOOLS).add_common_values( + self._get_bazel_sources(ct.tools, ct.subdir) + ) + target.get_attribute_node(ConvertAttr.INCLUDES).add_common_values( + ct.export_include_dirs + ) + target.cmd = self._get_custom_target_cmd(ct.convert_instance_cmds, ct.subdir) + + def add_build_target_config( + self, + target: ConvertBuildTarget, + instance: T.Union[ConvertInstanceStaticLibrary, ConvertInstanceSharedLibrary], + toolchain: AbstractToolchainInfo, + custom_instances: T.Set[SelectInstance], + ) -> None: + os_select = self.get_os_info(toolchain, instance.machine_choice) + arch_select = self.get_arch_info(toolchain, instance.machine_choice) + label = {arch_select, os_select} | custom_instances + + bazel_generated_flags: T.List[ConvertDep] = [] + for generated_flag in instance.generated_flags.values(): + bazel_generated_flags.append( + ConvertDep(generated_flag.name, generated_flag.subdir) + ) + + bazel_generated_includes: T.List[ConvertDep] = [] + for generated_dir in instance.generated_include_dirs.values(): + bazel_generated_includes.append( + ConvertDep(generated_dir.name, generated_dir.subdir) + ) + + all_deps = ( + self._get_bazel_targets(bazel_generated_flags) + + self._get_bazel_targets(bazel_generated_includes) + + self._get_bazel_targets(instance.header_libs) + + self._get_bazel_targets(instance.static_libs) + + self._get_bazel_targets(instance.shared_libs) + + self._get_bazel_targets(instance.whole_static_libs) + + self._get_bazel_targets(instance.generated_headers) + + self._get_bazel_targets(instance.generated_sources) + ) + + target.get_attribute_node(ConvertAttr.SRCS).add_conditional_values( + label, self._get_bazel_sources(instance.srcs, target.subdir) + ) + target.get_attribute_node(ConvertAttr.BAZEL_DEPS).add_conditional_values( + label, all_deps + ) + + def _process_include_dependencies( + self, + state_tracker: ConvertStateTracker, + node: ConvertTreeNode, + label: T.Set[SelectInstance], + ) -> T.List[ConvertTarget]: + """ + Recursively discovers and links include targets. Returns a list of header providers + for this node's subtree that are active in 'label'. + + This is needed for Bazel because it handles include_directories weirdly. Say I have + a directory structure like this: + + include/ + -- BUILD.bazel + -- clang/ + BUILD.bazel + + If the include/BUILD.bazel has: + meson_include_directories( + name = "inc_include", + hdrs = glob[**/*.h], + include = ["."] + ) + + Build targets that depend on inc_include will not be able to access include/clang. That's because + the include/clang/BUILD.bazel 'blocks' visibility into the package. This is quite annoying. + + We utilize tree structure to add dependencies on child packages. + + meson_include_directories( + name = "inc_include", + hdrs = glob[**/*.h], + include = ["."] + deps = [//include/clang:inc_clang] + ) + + The above works as expected. The below code also considers "labels", since you only want to + depend on child include dirs with the same label as the current. It could be a bit of + over-engineering here. + """ + child_providers: T.List[ConvertTarget] = [] + for child_key in sorted(node.child_nodes.keys()): + child_providers.extend( + self._process_include_dependencies( + state_tracker, node.child_nodes[child_key], label + ) + ) + + # Identify or create local header providers + local_providers = [ + t for t in node.targets if isinstance(t, ConvertIncludeDirectory) + ] + + if not local_providers and node.targets: + abs_subdir = os.path.join(state_tracker.project_dir, node.subdir) + if os.path.isdir(abs_subdir): + has_headers = any( + f.endswith((".h", ".hpp")) for f in os.listdir(abs_subdir) + ) + if has_headers: + name = "inc_" + (node.subdir.replace("/", "_") or "root") + name = state_tracker.project_config.sanitize_target_name(name) + inc = ConvertIncludeDirectory(name, node.subdir) + inc.single_attributes[ConvertAttr.BAZEL_HDRS] = GLOB_HEADERS + node.add_target(inc) + local_providers = [inc] + + # Ensure auto-generated providers are marked active for the current label + for provider in local_providers: + if provider.name.startswith("inc_"): + provider.get_attribute_node( + ConvertAttr.INCLUDES + ).add_conditional_values(label, ["."]) + + # Establish links for this configuration + # We only care about providers that are active for the current label + active_local_providers: T.List[ConvertTarget] = [ + p + for p in local_providers + if p.is_active_for_label(ConvertAttr.INCLUDES, label) + ] + if active_local_providers: + inc_labels = [ + f"//{p.subdir}:{p.name}" if p.subdir else f"//:{p.name}" + for p in active_local_providers + ] + + # All build targets in this node depend on its active header providers + for t in node.targets: + if isinstance(t, (ConvertStaticLibrary, ConvertSharedLibrary)): + t.get_attribute_node(ConvertAttr.BAZEL_DEPS).add_conditional_values( + label, inc_labels + ) + + # Local header providers depend on child header providers (bubbling up) + if child_providers: + child_labels = [ + f"//{p.subdir}:{p.name}" if p.subdir else f"//:{p.name}" + for p in child_providers + ] + for p in active_local_providers: + p.get_attribute_node(ConvertAttr.BAZEL_DEPS).add_conditional_values( + label, child_labels + ) + + return active_local_providers + + # No active local provider, propagate children's providers to the parent + return child_providers + + def finish_current_config(self, state_tracker: ConvertStateTracker) -> None: + os_select = self.get_os_info( + state_tracker.current_toolchain, MachineChoice.HOST + ) + arch_select = self.get_arch_info( + state_tracker.current_toolchain, MachineChoice.HOST + ) + label = {arch_select, os_select} | state_tracker.current_custom_select_instances + + self._process_include_dependencies( + state_tracker, state_tracker.targets.root, label + ) + + def _get_bazel_targets(self, convert_deps: T.List[ConvertDep]) -> T.List[str]: + bazel_targets: T.List[str] = [] + for dep in convert_deps: + if dep.repo: + if dep.subdir: + bazel_target = f"@{dep.repo}//{dep.subdir}:{dep.target}" + else: + bazel_target = f"@{dep.repo}//:{dep.target}" + if dep.source_url: + self.external_deps.add(dep) + else: + bazel_target = f"//{dep.subdir}:{dep.target}" + + bazel_targets.append(bazel_target) + + return bazel_targets + + def _get_bazel_sources( + self, convert_srcs: T.List[ConvertSrc], subdir: str + ) -> T.List[str]: + bazel_srcs: T.List[str] = [] + for src in convert_srcs: + if src.target_dep: + bazel_srcs.extend(self._get_bazel_targets([src.target_dep])) + else: + bazel_srcs.append(src.source) + + return bazel_srcs diff --git a/mesonbuild/convert/build_systems/common.py b/mesonbuild/convert/build_systems/common.py new file mode 100644 index 000000000000..250107f9e5f4 --- /dev/null +++ b/mesonbuild/convert/build_systems/common.py @@ -0,0 +1,306 @@ +#!/usr/bin/env python3 +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2026 The Meson Development Team + +from __future__ import annotations +import typing as T +from bisect import insort + +from mesonbuild.mesonlib import MachineChoice +from mesonbuild.convert.common_defs import ( + SelectInstance, +) +from mesonbuild.convert.convert_project_config import ( + ConvertProjectConfig, +) +from mesonbuild.convert.instance.convert_instance_custom_target import ( + ConvertInstanceCustomTarget, + ConvertInstancePythonTarget, +) + +from mesonbuild.convert.instance.convert_instance_utils import ( + ConvertInstanceFlag, + ConvertInstanceIncludeDirectory, + ConvertInstanceFileGroup, +) +from mesonbuild.convert.abstract.abstract_toolchain import ( + AbstractToolchainInfo, +) +from mesonbuild.convert.instance.convert_instance_build_target import ( + ConvertInstanceStaticLibrary, + ConvertInstanceSharedLibrary, +) + +from mesonbuild.convert.build_systems.target import ( + ConvertTargetType, + ConvertTarget, + ConvertFileGroup, + ConvertPythonTarget, + ConvertFlag, + ConvertIncludeDirectory, + ConvertBuildTarget, + ConvertStaticLibrary, + ConvertSharedLibrary, +) + + +class ConvertBackend: + """Interface for build system backends.""" + + def get_os_info( + self, toolchain: AbstractToolchainInfo, choice: MachineChoice + ) -> SelectInstance: + raise NotImplementedError + + def get_arch_info( + self, toolchain: AbstractToolchainInfo, choice: MachineChoice + ) -> SelectInstance: + raise NotImplementedError + + def add_python_config( + self, target: ConvertPythonTarget, instance: ConvertInstancePythonTarget + ) -> None: + raise NotImplementedError + + def add_flag_config( + self, + target: ConvertFlag, + instance: ConvertInstanceFlag, + toolchain: AbstractToolchainInfo, + custom_instances: T.Set[SelectInstance], + ) -> None: + raise NotImplementedError + + def add_include_dir_config( + self, + target: ConvertIncludeDirectory, + instance: ConvertInstanceIncludeDirectory, + toolchain: AbstractToolchainInfo, + custom_instances: T.Set[SelectInstance], + ) -> None: + raise NotImplementedError + + def add_file_group_config( + self, target: ConvertFileGroup, instance: ConvertInstanceFileGroup + ) -> None: + raise NotImplementedError + + def add_custom_target( + self, state_tracker: ConvertStateTracker, instance: ConvertInstanceCustomTarget + ) -> None: + raise NotImplementedError + + def add_build_target_config( + self, + target: ConvertBuildTarget, + instance: T.Union[ConvertInstanceStaticLibrary, ConvertInstanceSharedLibrary], + toolchain: AbstractToolchainInfo, + custom_instances: T.Set[SelectInstance], + ) -> None: + raise NotImplementedError + + def finish_current_config(self, state_tracker: ConvertStateTracker) -> None: + pass + + +class ConvertTreeNode: + """A single node in an ConvertTree. Contains subdir, targets, and children""" + + def __init__(self, subdir: str): + self.subdir: str = subdir + self.targets: T.List[ConvertTarget] = [] + self.target_types: T.Set[ConvertTargetType] = set() + self.child_nodes: T.Dict[str, ConvertTreeNode] = {} + + @property + def is_root(self) -> bool: + return self.subdir == "" + + def find_target(self, target_name: str) -> T.Optional[ConvertTarget]: + for target in self.targets: + if target.name == target_name: + return target + for child in self.child_nodes.values(): + target = child.find_target(target_name) + if target: + return target + return None + + def add_target(self, target: ConvertTarget) -> None: + insort(self.targets, target) + self.target_types.add(target.target_type) + + def walk(self) -> T.Iterable[ConvertTreeNode]: + yield self + for child in sorted(self.child_nodes.keys()): + yield from self.child_nodes[child].walk() + + +class ConvertTree: + """A tree structure where each level tracks the ConvertTargets. There is a BUILD.bazel or + Android.bp associated with each node of the tree. + """ + + def __init__(self) -> None: + self.root = ConvertTreeNode("") + self._targets_dict: T.Dict[str, ConvertTarget] = {} + + def __contains__(self, name: str) -> bool: + return name in self._targets_dict + + def __getitem__(self, name: str) -> ConvertTarget: + return self._targets_dict[name] + + def __setitem__(self, name: str, target: ConvertTarget) -> None: + self._targets_dict[name] = target + self._add_to_tree(target) + + def values(self) -> T.ValuesView[ConvertTarget]: + return self._targets_dict.values() + + def _get_node(self, subdir: str) -> ConvertTreeNode: + if not subdir: + return self.root + parts = subdir.split("/") + current = self.root + path_so_far = [] + for part in parts: + path_so_far.append(part) + if part not in current.child_nodes: + current.child_nodes[part] = ConvertTreeNode("/".join(path_so_far)) + current = current.child_nodes[part] + return current + + def _add_to_tree(self, target: ConvertTarget) -> None: + node = self._get_node(target.subdir) + node.add_target(target) + + def pop(self, name: str) -> ConvertTarget: + target = self._targets_dict.pop(name) + node = self._get_node(target.subdir) + node.targets.remove(target) + return target + + def walk(self) -> T.Iterable[ConvertTreeNode]: + yield from self.root.walk() + + +class ConvertStateTracker: + """Unified state tracker for all build systems.""" + + def __init__(self, project_config: ConvertProjectConfig, backend: ConvertBackend): + self.project_config = project_config + self.current_toolchain: T.Optional[AbstractToolchainInfo] = None + self.current_custom_select_instances: T.Optional[T.Set[SelectInstance]] = None + self.all_toolchains: T.Set[AbstractToolchainInfo] = set() + self.output_dir: str = "" + self.project_dir: str = "" + self.backend = backend + self.targets = ConvertTree() + + def set_current_config( + self, + toolchain_info: AbstractToolchainInfo, + custom_select_instances: T.Set[SelectInstance], + ) -> None: + self.current_toolchain = toolchain_info + self.current_custom_select_instances = custom_select_instances + self.all_toolchains.add(toolchain_info) + + def finish_current_config(self) -> None: + self.backend.finish_current_config(self) + + def add_python_target(self, target: ConvertInstancePythonTarget) -> None: + if target.name not in self.targets: + self.targets[target.name] = ConvertPythonTarget(target.name, target.subdir) + + # Ensure all project-defined python libraries are added to the instance + # before passing it to the backend. This ensures mako, etc. are available. + python_libs = self.project_config.dependencies.python_libraries + for lib in python_libs: + if lib not in target.libs: + target.libs.append(lib) + + self.backend.add_python_config( + T.cast(ConvertPythonTarget, self.targets[target.name]), target + ) + + def add_flag(self, flag: ConvertInstanceFlag) -> None: + if flag.name not in self.targets: + self.targets[flag.name] = ConvertFlag(flag.name, flag.subdir, flag.language) + + self.backend.add_flag_config( + T.cast(ConvertFlag, self.targets[flag.name]), + flag, + self.current_toolchain, + self.current_custom_select_instances, + ) + + def add_include_directory(self, inc: ConvertInstanceIncludeDirectory) -> None: + if inc.name not in self.targets: + self.targets[inc.name] = ConvertIncludeDirectory(inc.name, inc.subdir) + + self.backend.add_include_dir_config( + T.cast(ConvertIncludeDirectory, self.targets[inc.name]), + inc, + self.current_toolchain, + self.current_custom_select_instances, + ) + + def add_file_group(self, grp: ConvertInstanceFileGroup) -> None: + if grp.name not in self.targets: + self.targets[grp.name] = ConvertFileGroup(grp.name, grp.subdir) + self.backend.add_file_group_config( + T.cast(ConvertFileGroup, self.targets[grp.name]), grp + ) + + def add_custom_target(self, custom_target: ConvertInstanceCustomTarget) -> None: + self.backend.add_custom_target(self, custom_target) + + def add_static_library(self, lib: ConvertInstanceStaticLibrary) -> None: + if lib.name not in self.targets: + self.targets[lib.name] = ConvertStaticLibrary( + lib.name, lib.subdir, lib.rust_abi + ) + self.backend.add_build_target_config( + T.cast(ConvertStaticLibrary, self.targets[lib.name]), + lib, + self.current_toolchain, + self.current_custom_select_instances, + ) + + def add_shared_library(self, lib: ConvertInstanceSharedLibrary) -> None: + if lib.name not in self.targets: + self.targets[lib.name] = ConvertSharedLibrary( + lib.name, lib.subdir, lib.rust_abi + ) + self.backend.add_build_target_config( + T.cast(ConvertSharedLibrary, self.targets[lib.name]), + lib, + self.current_toolchain, + self.current_custom_select_instances, + ) + + def finish(self) -> None: + all_os_selects: T.Set[SelectInstance] = set() + all_arch_selects: T.Set[SelectInstance] = set() + all_select_instances: T.List[T.Set[SelectInstance]] = [] + all_custom_defaults: T.Set[SelectInstance] = set() + + all_custom_selects = self.project_config.get_all_custom_selects() + + for custom_select in all_custom_selects: + all_select_instances.append(custom_select.get_select_instances()) + all_custom_defaults.add(custom_select.get_default_instance()) + + for toolchain in self.all_toolchains: + all_os_selects.add(self.backend.get_os_info(toolchain, MachineChoice.HOST)) + all_arch_selects.add( + self.backend.get_arch_info(toolchain, MachineChoice.HOST) + ) + + all_select_instances.append(all_os_selects) + all_select_instances.append(all_arch_selects) + + for target in self.targets.values(): + target.finish(all_select_instances, all_custom_defaults) diff --git a/mesonbuild/convert/build_systems/emitter.py b/mesonbuild/convert/build_systems/emitter.py new file mode 100644 index 000000000000..3d68ba7e99b0 --- /dev/null +++ b/mesonbuild/convert/build_systems/emitter.py @@ -0,0 +1,217 @@ +#!/usr/bin/env python3 +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2026 The Meson Development Team + +from __future__ import annotations +import typing as T +import os + +from mesonbuild.convert.build_systems.target import ( + ConvertAttr, + ConvertTarget, + ConvertTargetType, + ConvertAttrNode, +) + +COMMON_INDENT = 4 +COMMON_MAX_LINE_LENGTH = 70 + +if T.TYPE_CHECKING: + from mesonbuild.convert.build_systems.common import ConvertStateTracker + + +class ConvertEmitterBackend: + """Interface for build system emitter backends.""" + + def emit_begin(self, output_dir: str, state_tracker: ConvertStateTracker) -> None: + pass + + def get_attr_map(self) -> T.Dict[ConvertAttr, str]: + raise NotImplementedError + + def get_module_map(self) -> T.Dict[ConvertTargetType, str]: + raise NotImplementedError + + def get_attr_separator(self) -> str: + raise NotImplementedError + + def get_opening_brace(self) -> str: + raise NotImplementedError + + def get_closing_brace(self) -> str: + raise NotImplementedError + + def get_list_brackets(self) -> T.Tuple[str, str]: + return ("[", "]") + + def get_build_file_name(self) -> str: + raise NotImplementedError + + def get_copyright_header(self, copyright_info: T.Dict[str, T.Any]) -> str: + raise NotImplementedError + + def get_license_block( + self, copyright_info: T.Dict[str, T.Any], is_root: bool + ) -> str: + raise NotImplementedError + + def emit_extra_root_info(self, state_tracker: ConvertStateTracker) -> str: + return "" + + def emit_module_load_info( + self, targets: T.List[ConvertTarget], is_root: bool + ) -> str: + return "" + + def emit_special_target_info(self, target: ConvertTarget) -> str: + return "" + + def format_conditionals(self, indent: int, node: ConvertAttrNode) -> str: + raise NotImplementedError + + +def generic_emit_attribute_values( + current_indent: int, + attribute_values: T.List[str], + brackets: T.Tuple[str, str] = ("[", "]"), + leading_space: bool = False) -> str: # fmt: skip + if not attribute_values: + return (" " if leading_space else "") + f"{brackets[0]}{brackets[1]}" + + default_indent = " " * current_indent + list_indent = " " * (current_indent + COMMON_INDENT) + content_str = (" " if leading_space else "") + f"{brackets[0]}\n" + for value in attribute_values: + content_str += f'{list_indent}"{value}",\n' + + content_str += f"{default_indent}{brackets[1]}" + return content_str + + +class CommonModuleEmitter: + """Shared module emitter that delegates backend-specific syntax to a backend.""" + + def __init__(self, target: ConvertTarget, backend: ConvertEmitterBackend): + self.target = target + self.backend = backend + + def emit(self) -> str: + content = "\n\n" + module_type = self.backend.get_module_map().get( + self.target.target_type, "unknown" + ) + content += f"{module_type}{self.backend.get_opening_brace()}\n" + content += self.emit_single_attributes() + content += self.emit_attribute_nodes() + + special_info = self.backend.emit_special_target_info(self.target) + if special_info: + content += special_info + + content += self.backend.get_closing_brace() + return content + + def emit_single_attributes(self) -> str: + content_str = "" + attr_indent = COMMON_INDENT * " " + attr_map = self.backend.get_attr_map() + separator = self.backend.get_attr_separator() + + for attr, value in self.target.single_attributes.items(): + attr_name = attr_map.get(attr) + if attr_name: + content_str += f"{attr_indent}{attr_name}{separator} {value},\n" + return content_str + + def emit_attribute_nodes(self) -> str: + attr_indent = COMMON_INDENT * " " + attr_map = self.backend.get_attr_map() + separator = self.backend.get_attr_separator() + + content_str = "" + for attr, node in self.target.attribute_nodes.items(): + if node.empty(): + continue + + attr_name = attr_map.get(attr) + if not attr_name: + continue + + content_str += f"{attr_indent}{attr_name}{separator}" + common_values = list(node.common_values) + common_values.sort() + + if node.common_values: + content_str += generic_emit_attribute_values( + COMMON_INDENT, + common_values, + self.backend.get_list_brackets(), + leading_space=(separator == ":"), + ) + + if node.select_nodes: + if node.common_values: + content_str += " + " + elif separator == ":": + content_str += " " + content_str += self.backend.format_conditionals(COMMON_INDENT, node) + content_str += ",\n" + return content_str + + +class CommonEmitter: + """Base class for all build system emitters, handling high-level emission logic.""" + + def __init__(self, output_dir: str, backend: ConvertEmitterBackend): + self.output_dir = output_dir + self.backend = backend + + def emit(self, state_tracker: ConvertStateTracker) -> None: + self.backend.emit_begin(self.output_dir, state_tracker) + + copyright_info = state_tracker.project_config.copyright.copy() + copyright_header = self.backend.get_copyright_header(copyright_info).strip() + + for node in state_tracker.targets.walk(): + subdir = node.subdir + targets = node.targets + if not targets and not node.is_root: + continue + + is_root = node.is_root + blocks = [] + + load_info = self.backend.emit_module_load_info(targets, is_root).strip() + if load_info: + # Place load info immediately after copyright header + blocks.append(copyright_header + "\n\n" + load_info) + else: + blocks.append(copyright_header) + + license_block = self.backend.get_license_block( + copyright_info, is_root + ).strip() + if license_block: + blocks.append(license_block) + + for target in targets: + module_text = CommonModuleEmitter(target, self.backend).emit().strip() + blocks.append(module_text) + + if is_root: + extra_info = self.backend.emit_extra_root_info(state_tracker).strip() + if extra_info: + blocks.append(extra_info) + + content = "\n\n".join(blocks) + "\n" + + output_path = ( + os.path.join(self.output_dir, subdir) if subdir else self.output_dir + ) + os.makedirs(output_path, exist_ok=True) + with open( + os.path.join(output_path, self.backend.get_build_file_name()), + "w", + encoding="utf-8", + ) as f: + f.write(content) diff --git a/mesonbuild/convert/build_systems/soong/__init__.py b/mesonbuild/convert/build_systems/soong/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/mesonbuild/convert/build_systems/soong/emitter.py b/mesonbuild/convert/build_systems/soong/emitter.py new file mode 100644 index 000000000000..9053a77c3e82 --- /dev/null +++ b/mesonbuild/convert/build_systems/soong/emitter.py @@ -0,0 +1,282 @@ +#!/usr/bin/env python3 +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2026 The Meson Development Team + +from __future__ import annotations +import typing as T +import os +import textwrap + +from mesonbuild.convert.common_defs import ( + SelectId, + SelectKind, +) +from mesonbuild.convert.build_systems.target import ( + ConvertAttr, + ConvertTarget, + ConvertTargetType, + ConvertAttrNode, + ConvertCustomTarget, + ConvertFlag, + ConvertBuildTarget, +) +from mesonbuild.convert.build_systems.emitter import ( + ConvertEmitterBackend, + generic_emit_attribute_values, + COMMON_INDENT, + COMMON_MAX_LINE_LENGTH, +) + +from mesonbuild.convert.build_systems.common import ConvertStateTracker + +MULTILIB_TARGET_BLOCK = """\ + target: { + host: { + compile_multilib: "64", + }, + }, +""" + +INSTALL_TARGET_BLOCK = """\ + target: { + android: { + relative_install_path: "hw", + }, + }, +""" + +COPYRIGHT_HEADER_TEMPLATE = """\ +/* + * Copyright (C) 2025-2026 The Magma GPU Project + * SPDX-License-Identifier: Apache-2.0 + * + * Generated via: + * https://github.com/mesonbuild/meson/tree/master/mesonbuild/convert + * + * Submit patches, do not hand-edit. + * + */ + +package {{ + // See: http://go/android-license-faq + default_applicable_licenses: ["{license_name}"], +}}""" + +LICENSE_BLOCK_TEMPLATE = """\ +license {{ + name: "{license_name}", + visibility: [":__subpackages__"], + license_kinds: [ +{license_kinds} + ], + license_text: [ +{license_texts} + ], +}}""" + +SOONG_ATTR_MAP = { + ConvertAttr.NAME: "name", + ConvertAttr.SRCS: "srcs", + ConvertAttr.SOONG_DEFAULTS: "defaults", + ConvertAttr.SOONG_STATIC_LIBRARIES: "static_libs", + ConvertAttr.SOONG_SHARED_LIBRARIES: "shared_libs", + ConvertAttr.SOONG_WHOLE_STATIC_LIBRARIES: "whole_static_libs", + ConvertAttr.SOONG_HEADER_LIBS: "header_libs", + ConvertAttr.SOONG_GENERATED_HEADERS: "generated_headers", + ConvertAttr.SOONG_GENERATED_SOURCES: "generated_sources", + ConvertAttr.SOONG_CFLAGS: "cflags", + ConvertAttr.SOONG_CPPFLAGS: "cppflags", + ConvertAttr.RUSTFLAGS: "flags", + ConvertAttr.SOONG_HOST_SUPPORTED: "host_supported", + ConvertAttr.SOONG_VENDOR: "vendor", + ConvertAttr.OUT: "out", + ConvertAttr.TOOLS: "tools", + ConvertAttr.INCLUDES: "export_include_dirs", + ConvertAttr.PYTHON_MAIN: "main", + ConvertAttr.SOONG_PYTHON_LIBS: "libs", + ConvertAttr.RUST_CRATE_NAME: "crate_name", + ConvertAttr.RUST_CRATE_ROOT: "crate_root", + ConvertAttr.SOONG_RUST_LIBS: "rustlibs", + ConvertAttr.RUST_PROC_MACROS: "proc_macros", + ConvertAttr.RUST_EDITION: "edition", + ConvertAttr.SOONG_C_STD: "c_std", + ConvertAttr.SOONG_CPP_STD: "cpp_std", + ConvertAttr.LDFLAGS: "ldflags", + ConvertAttr.SOONG_LINKER_VERSION_SCRIPT: "version_script", +} + + +SOONG_MODULE_MAP = { + ConvertTargetType.FILEGROUP: "filegroup", + ConvertTargetType.PYTHON_TARGET: "python_binary_host", + ConvertTargetType.CUSTOM_TARGET: "genrule", + ConvertTargetType.INCLUDE_DIRECTORY: "cc_library_headers", + ConvertTargetType.FLAG: "cc_defaults", + ConvertTargetType.RUST_FLAG: "rust_defaults", + ConvertTargetType.STATIC_LIBRARY: "cc_library_static", + ConvertTargetType.SHARED_LIBRARY: "cc_library_shared", + ConvertTargetType.RUST_LIBRARY: "rust_library", + ConvertTargetType.RUST_FFI_STATIC: "rust_ffi_static", + ConvertTargetType.RUST_FFI_SHARED: "rust_ffi_shared", +} + + +def _format_select_value(value: T.Union[str, bool]) -> str: + if isinstance(value, bool): + return str(value).lower() + if value == "default": + return value + return f'"{value}"' + + +def _format_select_id(select_id: SelectId) -> str: + if select_id.select_kind == SelectKind.ARCH: + return "arch()" + if select_id.select_kind == SelectKind.OS: + return "os()" + if select_id.select_kind == SelectKind.CUSTOM: + return f'soong_config_variable("{select_id.namespace}", "{select_id.variable}")' + return "" + + +def _emit_list_parens(strings: T.List[str]) -> str: + content_str = "" + for i, string in enumerate(strings): + content_str += string + if i != len(strings) - 1: + content_str += ", " + + if len(strings) > 1: + content_str = "(" + content_str + ")" + + return content_str + + +class SoongEmitterBackend(ConvertEmitterBackend): + def get_attr_map(self) -> T.Dict[ConvertAttr, str]: + return SOONG_ATTR_MAP + + def get_module_map(self) -> T.Dict[ConvertTargetType, str]: + return SOONG_MODULE_MAP + + def get_attr_separator(self) -> str: + return ":" + + def get_opening_brace(self) -> str: + return " {" + + def get_closing_brace(self) -> str: + return "}" + + def get_build_file_name(self) -> str: + return "Android.bp" + + def get_copyright_header(self, copyright_info: T.Dict[str, T.Any]) -> str: + return COPYRIGHT_HEADER_TEMPLATE.format( + license_name=copyright_info["license_name"], + ) + + def get_license_block( + self, copyright_info: T.Dict[str, T.Any], is_root: bool + ) -> str: + if is_root: + license_kinds = "\n".join( + [ + f' "SPDX-license-identifier-{lic}",' + for lic in copyright_info.get("licenses", []) + ] + ) + license_texts = "\n".join( + [f' "{txt}",' for txt in copyright_info.get("license_texts", [])] + ) + return "\n\n" + LICENSE_BLOCK_TEMPLATE.format( + license_name=copyright_info["license_name"], + license_kinds=license_kinds, + license_texts=license_texts, + ) + return "" + + def emit_extra_root_info(self, state_tracker: ConvertStateTracker) -> str: + content = "" + handwritten_modules = state_tracker.project_config.handwritten_modules + if handwritten_modules: + handwritten_path = os.path.join( + state_tracker.project_config.config_dir, handwritten_modules + ) + if os.path.exists(handwritten_path): + with open(handwritten_path, "r", encoding="utf-8") as f: + content += "\n\n" + f.read() + return content + + def emit_special_target_info(self, target: ConvertTarget) -> str: + content_str = "" + if isinstance(target, ConvertCustomTarget): + cmd = getattr(target, "cmd", "") + wrapper = textwrap.TextWrapper( + width=COMMON_MAX_LINE_LENGTH, break_on_hyphens=False + ) + lines = wrapper.wrap(text=cmd) + content_str += COMMON_INDENT * " " + "cmd: " + subsequent_indent = 2 * COMMON_INDENT + + for i, line in enumerate(lines): + first_line = i == 0 + last_line = i == len(lines) - 1 + + if last_line: + line = f'"{line}",' + else: + if not line.endswith(" "): + line = line + " " + line = f'"{line}" +' + + if not first_line: + content_str += " " * subsequent_indent + + content_str += line + "\n" + elif isinstance(target, ConvertFlag): + if getattr(target, "project_native_args", False) and getattr( + target, "host_supported", False + ): + content_str += MULTILIB_TARGET_BLOCK + elif isinstance(target, ConvertBuildTarget): + if getattr(target, "install", False): + content_str += INSTALL_TARGET_BLOCK + return content_str + + def format_conditionals(self, indent: int, node: ConvertAttrNode) -> str: + content_str = "" + current_indent = " " * indent + select_nodes = node.get_select_nodes() + if not select_nodes: + return content_str + + for i, select_node in enumerate(select_nodes): + if i > 0: + content_str += " + " + + content_str += "select(" + formatted_select_ids: T.List[str] = [] + for select_id in select_node.select_ids: + formatted_select_ids.append(_format_select_id(select_id)) + content_str += _emit_list_parens(formatted_select_ids) + content_str += ", {\n" + + value_indent = indent + COMMON_INDENT + indent_str = " " * value_indent + for select_values, attribute_values in select_node.select_tuples: + formatted_values: T.List[str] = [] + for select_value in select_values: + formatted_values.append(_format_select_value(select_value)) + + content_str += indent_str + _emit_list_parens(formatted_values) + ":" + content_str += ( + generic_emit_attribute_values( + value_indent, attribute_values, leading_space=True + ) + + ",\n" + ) + + content_str += f"{current_indent}}})" + + return content_str diff --git a/mesonbuild/convert/build_systems/soong/state.py b/mesonbuild/convert/build_systems/soong/state.py new file mode 100644 index 000000000000..b69abcc0db3a --- /dev/null +++ b/mesonbuild/convert/build_systems/soong/state.py @@ -0,0 +1,379 @@ +#!/usr/bin/env python3 +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2026 The Meson Development Team + +from __future__ import annotations +import typing as T +import os +import copy + +from mesonbuild import mlog +from mesonbuild.mesonlib import MachineChoice +from mesonbuild.convert.abstract.abstract_toolchain import ( + AbstractToolchainInfo, +) + +from mesonbuild.convert.common_defs import ( + SelectInstance, + SelectId, + SelectKind, +) + +from mesonbuild.convert.instance.convert_instance_utils import ( + ConvertDep, + ConvertSrc, + ConvertInstanceFlag, + ConvertInstanceIncludeDirectory, + ConvertInstanceFileGroup, +) +from mesonbuild.convert.build_systems.common import ( + ConvertBackend, + ConvertStateTracker, +) +from mesonbuild.convert.build_systems.target import ( + ConvertAttr, + ConvertFileGroup, + ConvertIncludeDirectory, + ConvertPythonTarget, + ConvertFlag, + ConvertBuildTarget, + ConvertCustomTarget, +) +from mesonbuild.convert.instance.convert_instance_build_target import ( + ConvertInstanceStaticLibrary, + ConvertInstanceSharedLibrary, + GeneratedFilesType, + RustABI, +) +from mesonbuild.convert.instance.convert_instance_custom_target import ( + ConvertInstanceCustomTarget, + ConvertInstancePythonTarget, + ConvertCustomTargetCmdPart, + ConvertCustomTargetCmdPartType, +) + + +def _get_soong_targets(convert_deps: T.List[ConvertDep]) -> T.List[str]: + soong_targets: T.List[str] = [] + for dep in convert_deps: + soong_targets.append(dep.target) + + return soong_targets + + +def _get_soong_sources(convert_srcs: T.List[ConvertSrc]) -> T.List[str]: + soong_srcs: T.List[str] = [] + for src in convert_srcs: + if src.target_dep: + soong_srcs.append(":" + src.target_dep.target) + else: + soong_srcs.append(src.source) + + return soong_srcs + + +def _custom_target_convert(custom_target: ConvertInstanceCustomTarget, + custom_target_type: GeneratedFilesType) -> ConvertInstanceCustomTarget: # fmt: skip + new_target = copy.deepcopy(custom_target) + filter_target: T.List[str] = [] + if custom_target_type == GeneratedFilesType.HEADERS: + new_target.name = f"{custom_target.name}_headers" + new_target.generated_sources = [] + filter_target = custom_target.generated_sources + elif custom_target_type == GeneratedFilesType.IMPL: + new_target.name = f"{custom_target.name}_impl" + new_target.generated_headers = [] + new_target.export_include_dirs = [] + filter_target = custom_target.generated_headers + + filtered_cmds: T.List[ConvertCustomTargetCmdPart] = [] + for cmd_part in custom_target.convert_instance_cmds: + if ( + cmd_part.cmd_type == ConvertCustomTargetCmdPartType.OUTPUT + and cmd_part.cmd in filter_target + ): + filtered_cmds.append( + ConvertCustomTargetCmdPart( + f"@@GEN_DIR@@/{os.path.basename(cmd_part.cmd)}", + ConvertCustomTargetCmdPartType.STRING, + ) + ) + else: + filtered_cmds.append(cmd_part) + + new_target.convert_instance_cmds = filtered_cmds + return new_target + + +class SoongBackend(ConvertBackend): + """Soong backend for build system conversion.""" + + def __init__(self) -> None: + self.converted_custom_targets: T.Dict[str, T.Tuple[str, str]] = {} + + def get_os_info( + self, toolchain: AbstractToolchainInfo, choice: MachineChoice + ) -> SelectInstance: + machine_info = toolchain.machine_info[choice] + toolchain_str = toolchain.toolchains[choice] + + os_string: str + if machine_info.system == "linux": + if toolchain_str.startswith("linux_glibc"): + os_string = "linux_glibc" + else: + os_string = "linux_musl" + else: + os_string = machine_info.system + + os_select = SelectInstance(SelectId(SelectKind.OS, "", "os"), os_string) + return os_select + + def get_arch_info( + self, toolchain: AbstractToolchainInfo, choice: MachineChoice + ) -> SelectInstance: + machine_info = toolchain.machine_info[choice] + select_id = SelectId(SelectKind.ARCH, "", "arch") + arch = machine_info.cpu_family + + # meson CPU families' don't seem to accept "arm64" + if arch == "aarch64": + arch = "arm64" + + arch_select = SelectInstance(select_id, arch) + return arch_select + + def add_python_config( + self, target: ConvertPythonTarget, instance: ConvertInstancePythonTarget + ) -> None: + target.single_attributes[ConvertAttr.PYTHON_MAIN] = ( + f'"{instance.main.target_only()}"' + ) + target.get_attribute_node(ConvertAttr.SRCS).add_common_values( + _get_soong_sources(instance.srcs) + ) + target.get_attribute_node(ConvertAttr.SOONG_PYTHON_LIBS).add_common_values( + instance.libs + ) + + def add_flag_config( + self, + target: ConvertFlag, + instance: ConvertInstanceFlag, + toolchain: AbstractToolchainInfo, + custom_instances: T.Set[SelectInstance]) -> None: # fmt: skip + if not hasattr(target, "project_native_args"): + target.project_native_args = False + if not hasattr(target, "host_supported"): + target.host_supported = False + + os_select = self.get_os_info(toolchain, MachineChoice.HOST) + arch_select = self.get_arch_info(toolchain, MachineChoice.HOST) + + label = {arch_select} | {os_select} | custom_instances + target.single_attributes[ConvertAttr.SOONG_VENDOR] = "true" + if toolchain.host_supported(): + target.single_attributes[ConvertAttr.SOONG_HOST_SUPPORTED] = "true" + target.host_supported = True + + if instance.project_native_args: + target.project_native_args = True + + if instance.language == "c": + target.get_attribute_node(ConvertAttr.SOONG_CFLAGS).add_conditional_values( + label, instance.compile_args + ) + elif instance.language == "cpp": + target.get_attribute_node( + ConvertAttr.SOONG_CPPFLAGS + ).add_conditional_values(label, instance.compile_args) + elif instance.language == "rust": + target.get_attribute_node(ConvertAttr.RUSTFLAGS).add_conditional_values( + label, instance.compile_args + ) + + if instance.link_args: + target.get_attribute_node(ConvertAttr.LDFLAGS).add_conditional_values( + label, instance.link_args + ) + + def add_include_dir_config( + self, + target: ConvertIncludeDirectory, + instance: ConvertInstanceIncludeDirectory, + toolchain: AbstractToolchainInfo, + custom_instances: T.Set[SelectInstance]) -> None: # fmt: skip + os_select = self.get_os_info(toolchain, MachineChoice.HOST) + arch_select = self.get_arch_info(toolchain, MachineChoice.HOST) + + label = {arch_select} | {os_select} | custom_instances + target.single_attributes[ConvertAttr.SOONG_VENDOR] = "true" + if toolchain.host_supported(): + target.single_attributes[ConvertAttr.SOONG_HOST_SUPPORTED] = "true" + + target.get_attribute_node(ConvertAttr.INCLUDES).add_conditional_values( + label, list(instance.paths) + ) + + def add_file_group_config( + self, target: ConvertFileGroup, instance: ConvertInstanceFileGroup + ) -> None: + target.get_attribute_node(ConvertAttr.SRCS).add_common_values(instance.srcs) + + def _get_custom_target_cmd( + self, convert_instance_cmds: T.List[ConvertCustomTargetCmdPart] + ) -> str: + final_cmd = [] + for p in convert_instance_cmds: + if isinstance(p, ConvertCustomTargetCmdPart): + if p.cmd_type == ConvertCustomTargetCmdPartType.TOOL: + final_cmd.append(f"$(location {p.src.target_only()})") + elif p.cmd_type == ConvertCustomTargetCmdPartType.PYTHON_BINARY: + final_cmd.append(f"$(location {p.src.target_only()})") + elif p.cmd_type == ConvertCustomTargetCmdPartType.INPUT: + soong_src = _get_soong_sources([p.src])[0] + final_cmd.append(f"$(location {soong_src})") + elif p.cmd_type == ConvertCustomTargetCmdPartType.OUTPUT: + final_cmd.append(f"$(location {p.cmd})") + elif p.cmd_type == ConvertCustomTargetCmdPartType.STRING: + processed_cmd = p.cmd.replace("@@GEN_DIR@@", "$(genDir)") + final_cmd.append(processed_cmd) + return " ".join(final_cmd) + + def add_custom_target( + self, state_tracker: ConvertStateTracker, instance: ConvertInstanceCustomTarget + ) -> None: + modified_targets: T.List[ConvertInstanceCustomTarget] = [] + if instance.generated_headers and instance.generated_sources: + modified_targets.append( + _custom_target_convert(instance, GeneratedFilesType.HEADERS) + ) + modified_targets.append( + _custom_target_convert(instance, GeneratedFilesType.IMPL) + ) + self.converted_custom_targets[instance.name] = ( + modified_targets[0].name, + modified_targets[1].name, + ) + else: + modified_targets.append(instance) + + for ct in modified_targets: + if ct.name not in state_tracker.targets: + state_tracker.targets[ct.name] = ConvertCustomTarget( + ct.name, ct.subdir, instance + ) + + target = T.cast(ConvertCustomTarget, state_tracker.targets[ct.name]) + if target.instance != instance: + state_tracker.targets.pop(ct.name) + mlog.warning("Dropped custom target that differed across configs") + return + + out = ct.generated_headers + ct.generated_sources + target.get_attribute_node(ConvertAttr.OUT).add_common_values(out) + target.get_attribute_node(ConvertAttr.SRCS).add_common_values( + _get_soong_sources(ct.srcs) + ) + target.get_attribute_node(ConvertAttr.TOOLS).add_common_values( + [t.target_only() for t in ct.tools] + ) + target.get_attribute_node(ConvertAttr.INCLUDES).add_common_values( + ct.export_include_dirs + ) + target.cmd = self._get_custom_target_cmd(ct.convert_instance_cmds) + + def add_build_target_config( + self, + target: ConvertBuildTarget, + instance: T.Union[ConvertInstanceStaticLibrary, ConvertInstanceSharedLibrary], + toolchain: AbstractToolchainInfo, + custom_instances: T.Set[SelectInstance]) -> None: # fmt: skip + if not hasattr(target, "install"): + target.install = False + + os_select = self.get_os_info(toolchain, instance.machine_choice) + arch_select = self.get_arch_info(toolchain, instance.machine_choice) + label = {arch_select} | {os_select} | custom_instances + + target.install |= instance.install + header_libs = list(instance.generated_include_dirs) + _get_soong_targets( + instance.header_libs + ) + + target.single_attributes[ConvertAttr.SOONG_VENDOR] = "true" + if toolchain.host_supported(): + target.single_attributes[ConvertAttr.SOONG_HOST_SUPPORTED] = "true" + + target.get_attribute_node(ConvertAttr.SOONG_DEFAULTS).add_common_values( + list(instance.generated_flags) + ) + + if target.rust_abi == RustABI.NONE: + if instance.c_std: + target.single_attributes[ConvertAttr.SOONG_C_STD] = ( + f'"{instance.c_std}"' + ) + if instance.cpp_std: + target.single_attributes[ConvertAttr.SOONG_CPP_STD] = ( + f'"{instance.cpp_std}"' + ) + target.get_attribute_node( + ConvertAttr.SOONG_HEADER_LIBS + ).add_conditional_values(label, header_libs) + + modified_gen_headers: T.List[str] = [] + modified_gen_sources: T.List[str] = [] + for header in instance.generated_headers: + if header.target in self.converted_custom_targets: + modified_gen_headers.append( + self.converted_custom_targets[header.target][0] + ) + else: + modified_gen_headers.append(header.target) + + for source in instance.generated_sources: + if source.target in self.converted_custom_targets: + modified_gen_sources.append( + self.converted_custom_targets[source.target][1] + ) + else: + modified_gen_sources.append(source.target) + + target.get_attribute_node( + ConvertAttr.SOONG_GENERATED_HEADERS + ).add_conditional_values(label, modified_gen_headers) + target.get_attribute_node( + ConvertAttr.SOONG_GENERATED_SOURCES + ).add_conditional_values(label, modified_gen_sources) + target.get_attribute_node(ConvertAttr.SRCS).add_conditional_values( + label, _get_soong_sources(instance.srcs) + ) + target.get_attribute_node( + ConvertAttr.SOONG_STATIC_LIBRARIES + ).add_conditional_values(label, _get_soong_targets(instance.static_libs)) + target.get_attribute_node( + ConvertAttr.SOONG_SHARED_LIBRARIES + ).add_conditional_values(label, _get_soong_targets(instance.shared_libs)) + target.get_attribute_node( + ConvertAttr.SOONG_WHOLE_STATIC_LIBRARIES + ).add_conditional_values( + label, _get_soong_targets(instance.whole_static_libs) + ) + else: + if instance.rust_edition: + target.single_attributes[ConvertAttr.RUST_EDITION] = ( + f'"{instance.rust_edition}"' + ) + target.single_attributes[ConvertAttr.RUST_CRATE_ROOT] = ( + f'"{instance.crate_root}"' + ) + target.single_attributes[ConvertAttr.RUST_CRATE_NAME] = ( + f'"{instance.crate_name}"' + ) + target.get_attribute_node(ConvertAttr.SOONG_RUST_LIBS).add_common_values( + _get_soong_targets(instance.static_libs) + ) + target.get_attribute_node(ConvertAttr.RUST_PROC_MACROS).add_common_values( + _get_soong_targets(instance.proc_macros) + ) diff --git a/mesonbuild/convert/build_systems/target.py b/mesonbuild/convert/build_systems/target.py new file mode 100644 index 000000000000..ab0844abd14a --- /dev/null +++ b/mesonbuild/convert/build_systems/target.py @@ -0,0 +1,359 @@ +#!/usr/bin/env python3 +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2026 The Meson Development Team + +from __future__ import annotations +import typing as T +from dataclasses import dataclass +from enum import Enum, IntEnum +from collections import defaultdict + +from mesonbuild.convert.common_defs import ( + SelectInstance, + SelectId, + SelectKind, +) +from mesonbuild.convert.instance.convert_instance_custom_target import ( + ConvertInstanceCustomTarget, +) +from mesonbuild.convert.instance.convert_instance_build_target import ( + RustABI, +) + + +class ConvertAttr(Enum): + # Common attributes (1-100) + NAME = 1 + SRCS = 2 + INCLUDES = 3 + LDFLAGS = 4 # LinkOpts in Bazel + PYTHON_MAIN = 5 + RUST_CRATE_NAME = 6 + RUST_CRATE_ROOT = 7 + RUST_EDITION = 8 + RUST_PROC_MACROS = 9 + RUSTFLAGS = 10 + TOOLS = 11 + OUT = 12 + # Soong-specific attributes (101-200) + SOONG_VENDOR = 101 + SOONG_HOST_SUPPORTED = 102 + SOONG_DEFAULTS = 103 + SOONG_GENERATED_HEADERS = 104 + SOONG_GENERATED_SOURCES = 105 + SOONG_HEADER_LIBS = 106 + SOONG_STATIC_LIBRARIES = 108 + SOONG_SHARED_LIBRARIES = 109 + SOONG_WHOLE_STATIC_LIBRARIES = 110 + SOONG_CFLAGS = 111 + SOONG_CPPFLAGS = 112 + SOONG_C_STD = 113 + SOONG_CPP_STD = 114 + SOONG_RUST_LIBS = 115 + SOONG_LINKER_VERSION_SCRIPT = 116 + SOONG_PYTHON_LIBS = 117 + # Bazel-specific attributes (200-300) + BAZEL_DEPS = 201 + BAZEL_FLAGS = 202 + BAZEL_HDRS = 203 + + +@dataclass +class SelectNode: + select_ids: T.List[SelectId] + select_tuples: T.List[T.Tuple[T.List[str], T.List[str]]] + + +class ConvertAttrNode: + """Representation of target attributes. + For example, + inc = include_directories('common') + srcs = files('common.c'); + if host_machine.system() == 'linux': + inc += include_directories('linux') + srcs += files('linux.c') + elif host_machine.system == 'windows' + inc += include_directories('windows') + srcs += files('windows.c') + If a build target uses both 'inc' and 'srcs', then it would + have the 'inc'/'srcs' attributes. These attributes would + have a set of common values and conditional values. + + The conditionality is provided the toolchain-generated SelectInstances + as well as the user-provided SelectInstances. + """ + + def __init__(self, attribute: ConvertAttr): + self.attribute = attribute + self.common_values: T.Set[str] = set() + self.grouped_select_instances: T.Dict[str, T.List[T.Set[SelectInstance]]] = ( + defaultdict(list) + ) + self.all_select_instances: T.Dict[str, T.Set[SelectInstance]] = defaultdict(set) + self.common_custom_instances: T.Optional[T.Set[SelectInstance]] = None + self.select_nodes: T.List[SelectNode] = [] + + def add_common_values(self, values: T.List[str]) -> None: + if not values: + return + + self.common_values.update(values) + + def add_conditional_values( + self, label: T.Set[SelectInstance], values: T.List[str] + ) -> None: + for select_instance in label: + if select_instance.select_id.select_kind == SelectKind.CUSTOM: + if self.common_custom_instances is None: + self.common_custom_instances = {select_instance} + else: + self.common_custom_instances &= {select_instance} + + if not values: + return + + for value in values: + self.grouped_select_instances[value].append(label) + for select_instance in label: + self.all_select_instances[value].add(select_instance) + + def consolidate_conditionals( + self, + select_instance_groups: T.List[T.Set[SelectInstance]], + all_custom_defaults: T.Set[SelectInstance], + ) -> None: + """ + Simplifies and organizes conditional attribute values after all project + configurations have been processed. This method iterates through each attribute + value and its associated conditional labels (`SelectInstance` objects). It + intelligently determines if a value is truly conditional or if it's common + to all configurations by checking against groups of conditions (e.g., all + possible OSes, all architectures). + + For example, if a source file 'linux.c' is present in configurations for + `{os:linux, arch:x86_64}` and `{os:linux, arch:aarch64}`, and the project + is only configured for these two architectures, this method will deduce + that 'linux.c' is only conditional on the OS. The redundant architecture + conditions are removed. The final simplified condition is then stored in + a `SelectNode`. If a value is found to be present in all possible + configurations (i.e., it has no remaining unique conditions), it's moved + from being conditional to the `common_values` set. This process ensures + that the final build file output is as clean and minimal as possible. + """ + for value, labels_list in self.grouped_select_instances.items(): + processed_labels: T.List[T.Set[SelectInstance]] = [] + for label in labels_list: + current_label = label.copy() + # Remove any full group matches from the label + for group in select_instance_groups: + if group.issubset(self.all_select_instances[value]): + current_label -= group + + # Remove common custom instances + if self.common_custom_instances is not None: + for instance in self.common_custom_instances: + if instance in all_custom_defaults: + current_label -= {instance} + + if current_label: + processed_labels.append(current_label) + + if not processed_labels: + self.common_values.add(value) + else: + self.grouped_select_instances[value] = processed_labels + for new_label in processed_labels: + select_ids: T.List[SelectId] = [] + for select_instance in new_label: + select_ids.append(select_instance.select_id) + + select_ids.sort() + current_select_values = [] + for select_id in select_ids: + for select_instance in new_label: + if select_id == select_instance.select_id: + current_select_values.append(select_instance.value) + + found = False + for select_node in self.select_nodes: + if select_ids == select_node.select_ids: + for ( + existing_values, + attribute_values, + ) in select_node.select_tuples: + if existing_values == current_select_values: + if value not in attribute_values: + attribute_values.append(value) + found = True + break + if not found: + select_node.select_tuples.append( + (current_select_values, [value]) + ) + found = True + break + + if not found: + self.select_nodes.append( + SelectNode(select_ids, [(current_select_values, [value])]) + ) + + def get_select_nodes(self) -> T.List[SelectNode]: + if not self.common_values and not self.select_nodes: + return [] + + for select_node in self.select_nodes: + default_strings: T.List[str] = [] + for select_id in select_node.select_ids: + default_strings.append("default") + + select_node.select_tuples.append((default_strings, [])) + + for values, attribute_values in select_node.select_tuples: + attribute_values.sort() + + return self.select_nodes + + def empty(self) -> bool: + return bool(not self.common_values and not self.select_nodes) + + +# Determines the order in which ConvertTargets are emitted +class ConvertTargetType(IntEnum): + FILEGROUP = 0 + PYTHON_TARGET = 1 + CUSTOM_TARGET = 2 + INCLUDE_DIRECTORY = 3 + FLAG = 4 + RUST_FLAG = 5 + STATIC_LIBRARY = 6 + SHARED_LIBRARY = 7 + RUST_LIBRARY = 8 + RUST_FFI_STATIC = 9 + RUST_FFI_SHARED = 10 + UNKNOWN = 11 + + +class ConvertTarget: + """Base class for all converted build targets""" + + def __init__(self, name: str, subdir: str): + self.name = name + self.subdir = subdir + self.single_attributes: T.Dict[ConvertAttr, str] = {} + self.attribute_nodes: T.Dict[ConvertAttr, ConvertAttrNode] = {} + self.single_attributes[ConvertAttr.NAME] = f'"{name}"' + self.target_type = ConvertTargetType.UNKNOWN + + def get_attribute_node(self, attribute: ConvertAttr) -> ConvertAttrNode: + if attribute not in self.attribute_nodes: + self.attribute_nodes[attribute] = ConvertAttrNode(attribute) + + return self.attribute_nodes[attribute] + + def is_active_for_label( + self, attribute: ConvertAttr, label: T.Set[SelectInstance] + ) -> bool: + """Checks if a target has any values defined for the specific label.""" + attr_node = self.attribute_nodes.get(attribute) + if not attr_node: + return False + + # If it has common values, it's active for all labels + if attr_node.common_values: + return True + + # Check if the exact current label was used to add values + for labels_list in attr_node.grouped_select_instances.values(): + if label in labels_list: + return True + return False + + def finish( + self, + all_select_instance_groups: T.List[T.Set[SelectInstance]], + all_custom_defaults: T.Set[SelectInstance], + ) -> None: + for node in self.attribute_nodes.values(): + node.consolidate_conditionals( + all_select_instance_groups, all_custom_defaults + ) + + def __lt__(self, other: ConvertTarget) -> bool: + if self.target_type != other.target_type: + return self.target_type < other.target_type + return self.name < other.name + + def emit(self) -> str: + return "" + + +class ConvertFileGroup(ConvertTarget): + def __init__(self, name: str, subdir: str): + super().__init__(name, subdir) + self.target_type = ConvertTargetType.FILEGROUP + + +class ConvertPythonTarget(ConvertTarget): + def __init__(self, name: str, subdir: str): + super().__init__(name, subdir) + self.target_type = ConvertTargetType.PYTHON_TARGET + + +class ConvertCustomTarget(ConvertTarget): + def __init__( + self, + name: str, + subdir: str, + custom_target_instance: ConvertInstanceCustomTarget, + ): + super().__init__(name, subdir) + self.cmd: str = "" + self.instance = custom_target_instance + self.target_type = ConvertTargetType.CUSTOM_TARGET + + +class ConvertFlag(ConvertTarget): + def __init__(self, name: str, subdir: str, language: str): + super().__init__(name, subdir) + self.language = language + self.host_supported: bool = False + self.project_native_args: bool = False + if language == "rust": + self.target_type = ConvertTargetType.RUST_FLAG + else: + self.target_type = ConvertTargetType.FLAG + + +class ConvertIncludeDirectory(ConvertTarget): + def __init__(self, name: str, subdir: str): + super().__init__(name, subdir) + self.target_type = ConvertTargetType.INCLUDE_DIRECTORY + + +class ConvertBuildTarget(ConvertTarget): + def __init__(self, name: str, subdir: str, rust_abi: RustABI): + super().__init__(name, subdir) + self.rust_abi = rust_abi + self.project_native_args: bool = False + self.install: bool = False + + +class ConvertStaticLibrary(ConvertBuildTarget): + def __init__(self, name: str, subdir: str, rust_abi: RustABI): + super().__init__(name, subdir, rust_abi) + if rust_abi == RustABI.RUST: + self.target_type = ConvertTargetType.RUST_LIBRARY + elif rust_abi == RustABI.C: + self.target_type = ConvertTargetType.RUST_FFI_STATIC + else: + self.target_type = ConvertTargetType.STATIC_LIBRARY + + +class ConvertSharedLibrary(ConvertBuildTarget): + def __init__(self, name: str, subdir: str, rust_abi: RustABI): + super().__init__(name, subdir, rust_abi) + if rust_abi == RustABI.C: + self.target_type = ConvertTargetType.RUST_FFI_SHARED + else: + self.target_type = ConvertTargetType.SHARED_LIBRARY diff --git a/mesonbuild/convert/common_defs.py b/mesonbuild/convert/common_defs.py new file mode 100644 index 000000000000..e2c0fa210d1c --- /dev/null +++ b/mesonbuild/convert/common_defs.py @@ -0,0 +1,81 @@ +#!/usr/bin/env python3 +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2026 The Meson Development Team + +from __future__ import annotations +import typing as T + +from mesonbuild.mesonlib import MesonException +from dataclasses import dataclass +from enum import IntEnum + + +class SelectKind(IntEnum): + ARCH = 1 + OS = 2 + CUSTOM = 3 + TOOLCHAIN = 4 + + +@dataclass(frozen=True, eq=True, order=True) +class SelectId: + select_kind: SelectKind + namespace: str + variable: str + + +@dataclass(frozen=True, eq=True) +class SelectInstance: + select_id: SelectId + value: str + + @staticmethod + def parse_from_string(custom_select: str) -> "SelectInstance": + if ":" in custom_select: + namespace, rest = custom_select.split(":", 1) + else: + namespace = "" + rest = custom_select + if "=" in rest: + variable, value = rest.split("=", 1) + else: + raise MesonException(f"Invalid custom variable format: {custom_select}") + return SelectInstance(SelectId(SelectKind.CUSTOM, namespace, variable), value) + + def __repr__(self) -> str: + return f"{self.select_id.namespace}:{self.select_id.variable}={self.value}" + + +@dataclass(frozen=True, eq=True) +class CustomSelect: + select_id: SelectId + possible_values: T.List[str] + default_value: str + + def __hash__(self) -> int: + return hash(self.select_id) + + def get_select_instances(self) -> T.Set[SelectInstance]: + instances = set() + for value in self.possible_values: + instances.add(SelectInstance(self.select_id, value)) + return instances + + def get_default_instance(self) -> SelectInstance: + return SelectInstance(self.select_id, self.default_value) + + +@dataclass +class MesonOptionInstance: + meson_option: str + meson_value: str + select_instance: SelectInstance + + +@dataclass +class ProjectOptionsInstance: + meson_options: T.Dict[str, T.Any] + select_instances: T.List[SelectInstance] + + def __repr__(self) -> str: + return f"'{self.select_instances}'" diff --git a/mesonbuild/convert/convert_interpreter.py b/mesonbuild/convert/convert_interpreter.py new file mode 100644 index 000000000000..079fe4ee1f7d --- /dev/null +++ b/mesonbuild/convert/convert_interpreter.py @@ -0,0 +1,300 @@ +#!/usr/bin/env python3 +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2026 The Meson Development Team + +from __future__ import annotations +import typing as T +import os +import sys + +if T.TYPE_CHECKING: + from mesonbuild.interpreter import kwargs + +from mesonbuild import interpreter, programs, mparser, options +from mesonbuild.build import Build, Executable +from mesonbuild.compilers import Compiler +from mesonbuild.interpreter.interpreterobjects import RunProcess +from mesonbuild.mesonlib import File, MachineChoice, MesonException +from mesonbuild.dependencies import base as dependency_base +from mesonbuild.dependencies.base import DependencyTypeName +from mesonbuild.dependencies.misc import ThreadDependency +from mesonbuild.interpreterbase import ( + InterpreterObject, + ObjectHolder, + noArgsFlattening, +) +from mesonbuild.interpreterbase.decorators import noKwargs +from mesonbuild.convert.convert_project_instance import ConvertProjectInstance +from mesonbuild.convert.convert_project_config import ConvertProjectConfig + + +class MachineHolder(InterpreterObject): + """A holder for machine information to make it available in the interpreter""" + + def __init__(self, machine_info: T.Any) -> None: + super().__init__() + self.holder = machine_info + + @InterpreterObject.method("system") + @noKwargs + def system_method(self, args: T.List[T.Any], kwargs: T.Dict[str, T.Any]) -> str: + return T.cast(str, self.holder.system) + + @InterpreterObject.method("cpu_family") + @noKwargs + def cpu_family_method(self, args: T.List[T.Any], kwargs: T.Dict[str, T.Any]) -> str: + return T.cast(str, self.holder.cpu_family) + + @InterpreterObject.method("cpu") + @noKwargs + def cpu_method(self, args: T.List[T.Any], kwargs: T.Dict[str, T.Any]) -> str: + return T.cast(str, self.holder.cpu) + + @InterpreterObject.method("endian") + @noKwargs + def endian_method(self, args: T.List[T.Any], kwargs: T.Dict[str, T.Any]) -> str: + return T.cast(str, self.holder.endian) + + +class ConvertInterpreter(interpreter.Interpreter): + """Custom Meson interpreter for the convert tool. + + It tracks name assignments and overrides the behavior of dependencies and `run_command` + from the base `Interpreter` class. + """ + + def __init__( + self, + build_info: Build, + project_instance: ConvertProjectInstance, + project_config: ConvertProjectConfig, + **kwargs: T.Any, + ): + super().__init__(build_info, **kwargs) + self.project_instance = project_instance + self.project_config = project_config + + prefix = self.environment.get_prefix() + libdir = self.environment.get_libdir() + install_dir = libdir + if not os.path.isabs(libdir): + install_dir = os.path.join(prefix, libdir) + + self.project_instance.set_directories( + self.environment.get_source_dir(), + self.environment.get_build_dir(), + install_dir, + ) + self.variables["host_machine"] = MachineHolder( + self.build.environment.machines.host + ) + self.variables["build_machine"] = MachineHolder( + self.build.environment.machines.build + ) + self.variables["target_machine"] = MachineHolder( + self.build.environment.machines.target + ) + self.funcs["find_program"] = self.func_find_program + + @noArgsFlattening + def func_find_program( + self, + node: mparser.BaseNode, + args: T.List[T.Any], + kwargs: T.Dict[str, T.Any], + ) -> programs.ExternalProgram: + prog_names = args[0] + if not isinstance(prog_names, list): + prog_names = [prog_names] + + for prog_name in prog_names: + # Check if the program is a script in the source tree + script_path = os.path.join( + self.environment.source_dir, self.subdir, prog_name + ) + is_file = os.path.isfile(script_path) + if is_file: + # If it's a python script, prepend the python executable + if prog_name.endswith(".py"): + return programs.ExternalProgram( + prog_name, command=[sys.executable, script_path] + ) + return programs.ExternalProgram(prog_name, command=[script_path]) + + prog_info = self.project_config.dependencies.programs.get(prog_name) + if prog_info: + prog = programs.ExternalProgram(prog_name, command=[prog_name]) + prog.found = lambda: True # type: ignore[method-assign] + version = prog_info.get("version") + if version: + prog.version = version # type: ignore[attr-defined] + return prog + + return programs.NonExistingExternalProgram(prog_names[0]) + + def run_command_impl( + self, + args: T.Tuple[ + T.Union[ + Executable, + programs.Program, + Compiler, + File, + str, + T.List[T.Any], + ], + T.List[ + T.Union[ + Executable, + programs.Program, + Compiler, + File, + str, + ] + ], + ], + kwargs: "kwargs.RunCommand", + in_builddir: bool = False, + ) -> RunProcess: + emulated_process = RunProcess.__new__(RunProcess) + emulated_process.returncode = 0 + emulated_process.stdout = "" + emulated_process.stderr = "" + + cmd, raw_args = args + + cmd_args: T.List[ + T.Union[Executable, programs.Program, Compiler, File, str] + ] = [] + for arg in raw_args: + if isinstance(arg, list): + cmd_args.extend(arg) + else: + cmd_args.append(arg) + + cmd_name = "" + if isinstance(cmd, programs.ExternalProgram): + cmd_name = cmd.get_name() + elif isinstance(cmd, str): + cmd_name = cmd + elif isinstance(cmd, list): + cmd_name = cmd[0] + cmd_args = cmd[1:] + cmd_args + else: + cmd_name = T.cast(str, cmd) + + prog_info = self.project_config.dependencies.programs.get(cmd_name) + if "--version" in cmd_args: + if prog_info and "version" in prog_info: + emulated_process.stdout = str(prog_info["version"]) + emulated_process.stderr = "" + + emulated_process.subproject = self.subproject + return emulated_process + + def _redetect_machines(self) -> None: + pass + + def print_extra_warnings(self) -> None: + pass + + def _print_summary(self) -> None: + pass + + def track_assignment(self, name: str) -> None: + variable = self.variables.get(name) + if not variable: + return + + if isinstance(variable, ObjectHolder): + variable = variable.held_object + + if isinstance(variable, list): + self.project_instance.interpreter_info.assign(name, self.subdir, variable) + for obj in variable: + self.project_instance.interpreter_info.assign(name, self.subdir, obj) + else: + self.project_instance.interpreter_info.assign(name, self.subdir, variable) + + def assignment(self, node: mparser.AssignmentNode) -> None: + name = node.var_name.value + super().assignment(node) + self.track_assignment(name) + + def evaluate_plusassign(self, node: mparser.PlusAssignmentNode) -> None: + name = node.var_name.value + super().evaluate_plusassign(node) + self.track_assignment(name) + + def func_dependency( + self, + node: mparser.BaseNode, + args: T.List[T.Any], + kwargs: T.Dict[str, T.Any], + ) -> dependency_base.Dependency: + desired_dep_name = args[0] if args else kwargs.get("name", "unnamed") + + if T.TYPE_CHECKING: + typed_kwargs = T.cast(dependency_base.DependencyObjectKWs, kwargs) + else: + typed_kwargs = kwargs + + if desired_dep_name == "threads": + kwargs["native"] = MachineChoice.HOST + return ThreadDependency( + "threads", + self.environment, + typed_kwargs, + ) + + dep_info = ( + self.project_config.dependencies.shared_libraries.get(desired_dep_name) + or self.project_config.dependencies.static_libraries.get(desired_dep_name) + or self.project_config.dependencies.header_libraries.get(desired_dep_name) + ) + + if dep_info: + kwargs["native"] = MachineChoice.HOST + dep = dependency_base.ExternalDependency( + "system", + self.environment, + typed_kwargs, + ) + dep.type_name = T.cast(DependencyTypeName, "library") + dep.is_found = True + dep.version = ( + dep_info[0].get("version", "convert_instance") + if dep_info + else "convert_instance" + ) + + configtool_checks = dep_info[0].get("configtool", {}) if dep_info else {} + pkgconfig_checks = dep_info[0].get("pkgconfig", {}) if dep_info else {} + + def get_variable(*args: T.Any, **kwargs: T.Any) -> T.Optional[str]: + ct_var = kwargs.get("configtool") + if ct_var and ct_var in configtool_checks: + return T.cast(T.Optional[str], configtool_checks.get(ct_var)) + pc_var = kwargs.get("pkgconfig") + if pc_var and pc_var in pkgconfig_checks: + return T.cast(T.Optional[str], pkgconfig_checks.get(pc_var)) + return None + + dep.get_variable = get_variable # type: ignore[method-assign] + dep.name = desired_dep_name + + if self.project_config.dependencies.static_libraries.get(desired_dep_name): + dep.static = True + + return dep + + required = kwargs.get("required", True) + if isinstance(required, options.UserFeatureOption): + required = bool(required.value == "enabled") + + if required: + raise MesonException( + f"Dependency '{desired_dep_name}' not found and is required." + ) + + return dependency_base.NotFoundDependency(desired_dep_name, self.environment) diff --git a/mesonbuild/convert/convert_interpreter_info.py b/mesonbuild/convert/convert_interpreter_info.py new file mode 100644 index 000000000000..8f65d1cc9181 --- /dev/null +++ b/mesonbuild/convert/convert_interpreter_info.py @@ -0,0 +1,59 @@ +#!/usr/bin/env python3 +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2026 The Meson Development Team + +from __future__ import annotations +import typing as T + +from mesonbuild import build +from mesonbuild.mesonlib import File + + +def determine_key(obj: T.Any) -> T.Optional[str]: + canonical_string: T.Optional[str] = None + if isinstance(obj, build.IncludeDirs): + canonical_string = "|".join( + [ + obj.curdir, + ",".join(sorted(obj.incdirs)), + str(obj.is_system), + ",".join(sorted(obj.extra_build_dirs)), + ] + ) + elif isinstance(obj, str): + canonical_string = obj + elif isinstance(obj, File): + canonical_string = obj.subdir + obj.fname + + return canonical_string + + +class ConvertInterpreterInfo: + """Holds assignment data gathered by the `ConvertInterpreter`. + + For example, for an assignment like `inc_data = include_directories('data')`, + this class tracks the name 'inc_data' and associates it with the + corresponding Meson `IncludeDirs` object. + """ + + def __init__(self) -> None: + self.assignments: T.Dict[str, T.Tuple[str, str]] = {} + + def assign(self, name: str, subdir: str, obj: T.Any) -> None: + key = determine_key(obj) + if key not in self.assignments: + self.assignments[key] = (name, subdir) + + def lookup_assignment(self, obj: T.Any) -> T.Optional[str]: + key = determine_key(obj) + if key in self.assignments: + return self.assignments[key][0] + + return None + + def lookup_full_assignment(self, obj: T.Any) -> T.Optional[T.Tuple[str, str]]: + key = determine_key(obj) + if key in self.assignments: + return self.assignments[key] + + return None diff --git a/mesonbuild/convert/convert_project_config.py b/mesonbuild/convert/convert_project_config.py new file mode 100644 index 000000000000..849c24082a4d --- /dev/null +++ b/mesonbuild/convert/convert_project_config.py @@ -0,0 +1,211 @@ +#!/usr/bin/env python3 +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2026 The Meson Development Team + +from __future__ import annotations +import typing as T +import itertools + +from mesonbuild.convert.common_defs import ( + SelectKind, + SelectId, + SelectInstance, + CustomSelect, + MesonOptionInstance, + ProjectOptionsInstance, +) +from mesonbuild.convert.abstract.abstract_dependencies import ( + AbstractDependencies, +) +from mesonbuild.convert.convert_project_instance import ConvertProjectInstance + + +class ConvertProjectConfig: + """Holds data that remains static across meson convert invocations (dependencies, + user-project details)""" + + def __init__(self, + config_data: T.Dict[str, T.Any], + dependencies: AbstractDependencies, + config_dir: str = ""): # fmt: skip + self._toml_data = config_data + self.dependencies = dependencies + self.config_dir = config_dir + + @property + def build_system(self) -> str: + return T.cast(str, self._toml_data.get("project", {}).get("build_system", "")) + + @property + def project_name(self) -> str: + return T.cast(str, self._toml_data.get("project", {}).get("project_name", "")) + + @property + def handwritten_modules(self) -> T.Optional[str]: + return T.cast( + T.Optional[str], + self._toml_data.get("project", {}).get("handwritten_modules"), + ) + + @property + def copyright(self) -> T.Dict[str, T.Any]: + return T.cast(T.Dict[str, T.Any], self._toml_data.get("copyright", {})) + + @property + def custom_target(self) -> T.Dict[str, T.Any]: + return T.cast(T.Dict[str, T.Any], self._toml_data.get("custom_target", {})) + + @property + def target_renames(self) -> T.Dict[str, str]: + return T.cast(T.Dict[str, str], self._toml_data.get("target_renames", {})) + + def _get_option_combinations(self, + static_options: T.Dict[str, T.Any], + variable_options: T.Dict[str, T.List[T.Any]]) -> T.List[ProjectOptionsInstance]: # fmt: skip + """ + Computes the Cartesian product of all variable Meson options. + + This function takes the `variable_options` defined in the TOML configuration and generates + all possible unique combinations of them. Each combination results in a distinct + `ProjectOptionsInstance`. + + A `ProjectInstance` is a fully resolved set of Meson options and the corresponding + `SelectInstance`s that define that specific configuration variant. This allows the + tool to analyze each permutation of the build configuration independently. + """ + options: T.List[ProjectOptionsInstance] = [] + custom_selects = self.get_all_custom_selects() + if not custom_selects: + options.append(ProjectOptionsInstance(static_options, [])) + return options + + if not variable_options: + meson_options = static_options.copy() + select_instances: T.List[SelectInstance] = [] + for select in custom_selects: + select_instances.append( + SelectInstance(select.select_id, select.default_value) + ) + + options.append(ProjectOptionsInstance(meson_options, select_instances)) + return options + + # Get all instances of the Meson project. + # + # Input TOML: + # - [config.variable_options] + # meson_opt_x = [ + # { true, "project:opt_a=val1" } + # { false, "project:opt_a=val2" } + # ] + # meson_opt_y = [ + # { true, "project:opt_b=val3" } + # { false, "project:opt_b=val4" } + # ] + # + # Output: + # - ProjectOptionsInstance: + # meson_options: + # {meson_opt_x = true, meson_opt_y = true } + # select_instances: + # {project_opt_a = val1, project_opt_b = val3 } + # - ProjectOptionsInstance: + # meson_options: + # {meson_opt_x = true, meson_opt_b = false } + # select_instances: + # {project_opt_a = val1, project_opt_b = val4 } + # + # (...) + all_instances: T.List[T.List[MesonOptionInstance]] = [] + for option_name, variants in variable_options.items(): + opt_instances: T.List[MesonOptionInstance] = [] + for variant in variants: + meson_value = variant["value"] + select_string = variant["select"] + opt_instances.append( + MesonOptionInstance( + option_name, + meson_value, + SelectInstance.parse_from_string(select_string), + ) + ) + + all_instances.append(opt_instances) + + custom_select_ids: T.Dict[SelectId, CustomSelect] = {} + for select in custom_selects: + custom_select_ids[select.select_id] = select + + all_instances_product = list(itertools.product(*all_instances)) + for instance_set in all_instances_product: + meson_options = static_options.copy() + missing_select_ids = custom_select_ids.copy() + select_instances_loop: T.List[SelectInstance] = [] + for opt_instance in instance_set: + meson_options[opt_instance.meson_option] = opt_instance.meson_value + select_instances_loop.append(opt_instance.select_instance) + if opt_instance.select_instance.select_id in missing_select_ids: + missing_select_ids.pop(opt_instance.select_instance.select_id) + + for select in missing_select_ids.values(): + select_instances_loop.append( + SelectInstance(select.select_id, select.default_value) + ) + + options.append(ProjectOptionsInstance(meson_options, select_instances_loop)) + + return options + + def get_project_instances(self) -> T.List[ConvertProjectInstance]: + configs = self._toml_data.get("config", []) + instances = [] + + for config in configs: + config_name = config.get("config_name", "") + toolchains_data = config.get("toolchains", {}) + host_toolchains = toolchains_data.get("host_toolchains", []) + build_toolchains = toolchains_data.get("build_toolchains", []) + static_options = config.get("static_options", {}).copy() + variable_options = config.get("variable_options", {}) + + combinations = self._get_option_combinations( + static_options, variable_options + ) + + for host_toolchain in host_toolchains: + for build_toolchain in build_toolchains: + for combo in combinations: + instances.append( + ConvertProjectInstance( + name=config_name, + host_toolchain=host_toolchain, + build_toolchain=build_toolchain, + option_instance=combo, + ) + ) + + return instances + + def get_all_custom_selects(self) -> T.List[CustomSelect]: + custom_vars_data = self._toml_data.get("custom_variable", []) + return [ + CustomSelect( + select_id=SelectId( + select_kind=SelectKind.CUSTOM, + namespace=select_data.get("namespace", ""), + variable=select_data.get("name", ""), + ), + possible_values=select_data.get("possible_values", []), + default_value=select_data.get("default_value", ""), + ) + for select_data in custom_vars_data + ] + + def is_dependency_necessary(self, dep_name: str) -> bool: + # Common libraries usually apart of the libc implementation + return dep_name not in ["threads", "m", "dl", "c", "rt"] + + def sanitize_target_name(self, target_name: str) -> str: + if target_name in self.target_renames: + return self.target_renames[target_name] + return target_name.translate(str.maketrans("", "", "[]")) diff --git a/mesonbuild/convert/convert_project_instance.py b/mesonbuild/convert/convert_project_instance.py new file mode 100644 index 000000000000..36757159cdfe --- /dev/null +++ b/mesonbuild/convert/convert_project_instance.py @@ -0,0 +1,86 @@ +#!/usr/bin/env python3 +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2026 The Meson Development Team + +from __future__ import annotations +import typing as T +from pathlib import Path +import os +import sys + +from mesonbuild import mlog +from mesonbuild.convert.common_defs import ProjectOptionsInstance +from mesonbuild.convert.convert_interpreter_info import ConvertInterpreterInfo + + +class ConvertProjectInstance: + """A single meson project configuration. A meson convert invocation can have multiple + meson configurations""" + + def __init__( + self, + name: str, + host_toolchain: str, + build_toolchain: str, + option_instance: ProjectOptionsInstance, + ): + self.name = name + self.host_toolchain = host_toolchain + self.build_toolchain = build_toolchain + self.option_instance = option_instance + + self.project_dir: str = "" + self.install_dir: str = "" + self.build_dir: str = "" + + self.interpreter_info: ConvertInterpreterInfo = ConvertInterpreterInfo() + + def set_directories( + self, project_dir: str, build_dir: str, install_dir: str + ) -> None: + self.project_dir = project_dir + self.build_dir = build_dir + self.install_dir = install_dir + + def emit(self) -> None: + mlog.set_verbose() + mlog.log(f"Processing config: {self.name}") + mlog.log(f""" Processing -- host toolchain: {self.host_toolchain}, build toolchain: + {self.build_toolchain}, custom variables {self.option_instance.select_instances}""") + mlog.set_quiet() + + def normalize_path(self, path: str, current_subdir: str) -> str: + prospective_path = Path(self.project_dir) / current_subdir / path + abs_path = os.path.normpath(str(prospective_path)) + + if os.path.exists(abs_path): + relative_path = os.path.relpath(abs_path, self.project_dir) + return relative_path + else: + sys.exit(f"Unknown path: {abs_path}") + + def normalize_file_path(self, file_path: str, current_subdir: str) -> str: + path = self.normalize_path(file_path, current_subdir) + return os.path.dirname(path) + + def normalize_string( + self, input_string: str, current_subdir: str + ) -> T.Optional[str]: + gen_dir = self.build_dir + "/" + current_subdir + if gen_dir in input_string: + sanitized = input_string.replace(gen_dir, "@@GEN_DIR@@") + return sanitized + + if self.install_dir in input_string: + sanitized = input_string.replace(self.install_dir, "@@INSTALL_DIR@@") + return sanitized + + if self.build_dir in input_string: + sanitized = input_string.replace(self.build_dir, "@@BUILD_DIR@@") + return sanitized + + if self.project_dir in input_string: + sanitized = input_string.replace(self.project_dir, "@@PROJECT_DIR@@") + return sanitized + + return input_string diff --git a/mesonbuild/convert/convertmain.py b/mesonbuild/convert/convertmain.py new file mode 100644 index 000000000000..2751f7f24c47 --- /dev/null +++ b/mesonbuild/convert/convertmain.py @@ -0,0 +1,269 @@ +#!/usr/bin/env python3 +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2026 The Meson Development Team + +from __future__ import annotations +import argparse +import tempfile +import typing as T +import sys +import os + +from mesonbuild import build, environment, mlog +from mesonbuild.options import OptionKey +from mesonbuild.mesonlib import MachineChoice + +from mesonbuild.convert.abstract.abstract_dependencies import ( + AbstractDependencies, +) +from mesonbuild.convert.abstract.abstract_toolchain import AbstractToolchain +from mesonbuild.convert.build_systems.bazel.emitter import BazelEmitterBackend +from mesonbuild.convert.build_systems.bazel.state import BazelBackend +from mesonbuild.convert.build_systems.common import ConvertStateTracker +from mesonbuild.convert.build_systems.emitter import CommonEmitter +from mesonbuild.convert.build_systems.soong.emitter import SoongEmitterBackend +from mesonbuild.convert.build_systems.soong.state import SoongBackend +from mesonbuild.convert.common_defs import SelectInstance +from mesonbuild.convert.convert_project_config import ( + ConvertProjectConfig, +) +from mesonbuild.convert.convert_interpreter import ConvertInterpreter +from mesonbuild.convert.instance.convert_instance_build_target import ( + ConvertInstanceStaticLibrary, + ConvertInstanceSharedLibrary, +) +from mesonbuild.convert.instance.convert_instance_custom_target import ( + ConvertInstanceCustomTarget, +) +from mesonbuild.convert.convert_project_instance import ConvertProjectInstance + + +def generate( + project_instance: ConvertProjectInstance, + options: argparse.Namespace, + toolchain: AbstractToolchain, + env: environment.Environment, + state_tracker: ConvertStateTracker, + custom_select_instances: T.Set[SelectInstance]) -> None: # fmt: skip + """ + Sets up and runs the Meson interpreter for a single project configuration. + + This function configures the environment with the abstract toolchain, runs the + custom `ConvertInterpreter` to analyze the `meson.build` files, and then + iterates through the results. + + It processes all discovered targets (custom, build, etc.), converting them into + build-system-agnostic representations and adding them to the state tracker for + later emission. + """ + env.machines.host = toolchain.toolchain_info.machine_info[MachineChoice.HOST] + env.machines.build = toolchain.toolchain_info.machine_info[MachineChoice.BUILD] + state_tracker.set_current_config(toolchain.toolchain_info, custom_select_instances) + c_compiler = toolchain.create_c_compiler(MachineChoice.HOST) + if c_compiler: + env.coredata.compilers[MachineChoice.HOST]["c"] = c_compiler + cpp_compiler = toolchain.create_cpp_compiler(MachineChoice.HOST) + if cpp_compiler: + env.coredata.compilers[MachineChoice.HOST]["cpp"] = cpp_compiler + rust_compiler = toolchain.create_rust_compiler(MachineChoice.HOST) + if rust_compiler: + env.coredata.compilers[MachineChoice.HOST]["rust"] = rust_compiler + c_compiler = toolchain.create_c_compiler(MachineChoice.BUILD) + if c_compiler: + env.coredata.compilers[MachineChoice.BUILD]["c"] = c_compiler + cpp_compiler = toolchain.create_cpp_compiler(MachineChoice.BUILD) + if cpp_compiler: + env.coredata.compilers[MachineChoice.BUILD]["cpp"] = cpp_compiler + rust_compiler = toolchain.create_rust_compiler(MachineChoice.BUILD) + if rust_compiler: + env.coredata.compilers[MachineChoice.BUILD]["rust"] = rust_compiler + build_info = build.Build(env) + user_defined_options = options + d: T.Dict[OptionKey, T.Any] = { + OptionKey.from_string(k): v + for k, v in project_instance.option_instance.meson_options.items() + } + if hasattr(user_defined_options, "cmd_line_options"): + cmd_line_opts = { + OptionKey.from_string(k): v + for k, v in user_defined_options.cmd_line_options.items() + } + d.update(cmd_line_opts) + user_defined_options.cmd_line_options = d + intr = ConvertInterpreter( + build_info, + project_instance, + state_tracker.project_config, + user_defined_options=user_defined_options, + ) + try: + intr.run() + except Exception as e: + raise e + processed_python_targets: T.Set[str] = set() + processed_filegroups: T.Set[str] = set() + processed_include_dirs: T.Set[str] = set() + processed_flags: T.Set[str] = set() + for custom_target in build_info.get_custom_targets().values(): + convert_ct = ConvertInstanceCustomTarget( + custom_target, project_instance, state_tracker.project_config + ) + if convert_ct.skip_custom_target: + continue + state_tracker.add_custom_target(convert_ct) + python_target = convert_ct.get_python_target() + if python_target: + if python_target.name not in processed_python_targets: + state_tracker.add_python_target(python_target) + processed_python_targets.add(python_target.name) + for filegroup in convert_ct.get_generated_filegroups(): + if filegroup.name and filegroup.name not in processed_filegroups: + state_tracker.add_file_group(filegroup) + processed_filegroups.add(filegroup.name) + for target in build_info.get_build_targets().values(): + build_target: T.Optional[ + T.Union[ConvertInstanceStaticLibrary, ConvertInstanceSharedLibrary] + ] = None + if isinstance(target, build.StaticLibrary): + build_target = ConvertInstanceStaticLibrary( + build_info, + target, + project_instance, + state_tracker.project_config, + ) + state_tracker.add_static_library(build_target) + elif isinstance(target, build.SharedLibrary): + build_target = ConvertInstanceSharedLibrary( + build_info, + target, + project_instance, + state_tracker.project_config, + ) + state_tracker.add_shared_library(build_target) + if not build_target: + continue + for flag in build_target.generated_flags.values(): + if flag.name not in processed_flags: + processed_flags.add(flag.name) + state_tracker.add_flag(flag) + for include_dir in build_target.generated_include_dirs.values(): + if include_dir.name and include_dir.name not in processed_include_dirs: + processed_include_dirs.add(include_dir.name) + state_tracker.add_include_directory(include_dir) + for filegroup in build_target.generated_filegroups.values(): + if filegroup.name and filegroup.name not in processed_filegroups: + state_tracker.add_file_group(filegroup) + processed_filegroups.add(filegroup.name) + + +def create_default_options(args: argparse.Namespace) -> argparse.Namespace: + args.sourcedir = args.project_dir + args.builddir = os.path.join(args.project_dir, "convert-build") + args.cross_file = [] + args.backend = "none" + args.projectoptions = [] + args.native_file = [] + args.cmd_line_options = {} + return args + + +def choose_build_system(project_config: ConvertProjectConfig, + output_dir: str) -> T.Tuple[ConvertStateTracker, CommonEmitter]: # fmt: skip + if project_config.build_system == "soong": + state_tracker: ConvertStateTracker = ConvertStateTracker( + project_config, SoongBackend() + ) + emitter: CommonEmitter = CommonEmitter(output_dir, SoongEmitterBackend()) + elif project_config.build_system == "bazel": + state_tracker = ConvertStateTracker(project_config, BazelBackend()) + emitter = CommonEmitter(output_dir, BazelEmitterBackend()) + else: + sys.exit(f"Build system {project_config.build_system} not supported.") + + return (state_tracker, emitter) + + +def convert_build_system( + config_toml: T.Dict[str, T.Any], + toolchain_toml: T.Dict[str, T.Any], + dependencies_toml: T.Dict[str, T.Any], + options: argparse.Namespace) -> int: # fmt: skip + """ + Converts a Meson project to a different build system based on the provided configuration. + + This tool operates by parsing three main TOML configuration files: + - project.toml: a project's structure and build options, + - toolchain.toml: toolchains + sysroots for a hermetic project + - dependency.toml: mappings to external dependencies needed by the project + + The main logic iterates through each build configuration specified in the project's TOML file. + + For each configuration, it simulates a Meson build environment using an "abstract" toolchain + that mimics a real one without actually invoking any compilers. + + A custom version of the Meson interpreter runs on the project's `meson.build` files. + Instead of generating build commands, this interpreter gathers detailed information about all + targets, sources, dependencies, and compiler flags. + + This information is collected into a build-system-agnostic state tracker. After processing all + configurations, this tracker consolidates the data, resolving any conditional logic. Finally, a + build system-specific "emitter" (e.g., for Soong or Bazel) takes this consolidated state and + generates the corresponding build files (e.g., `BUILD.bazel` or `Android.bp`). + """ + dependencies = AbstractDependencies(dependencies_toml) + toolchain_configs = { + tc.get("name"): tc for tc in toolchain_toml.get("toolchain", []) + } + options.project_dir = os.path.abspath(options.project_dir) + output_dir = ( + os.path.abspath(options.output_dir) + if options.output_dir + else options.project_dir + ) + + project_config = ConvertProjectConfig( + config_toml, dependencies, os.path.dirname(options.config) + ) + state_tracker, emitter = choose_build_system(project_config, output_dir) + state_tracker.project_dir = options.project_dir + + mlog.set_quiet() + # Iterate over ConvertProjectInstance objects + for project_instance in project_config.get_project_instances(): + default_options = create_default_options(options) + default_options.cmd_line_options = ( + project_instance.option_instance.meson_options + ) + custom_select_instances = set(project_instance.option_instance.select_instances) + project_instance.emit() + with tempfile.TemporaryDirectory() as build_dir: + default_options.builddir = build_dir + env = environment.Environment( + default_options.sourcedir, + default_options.builddir, + default_options, + ) + toolchain = AbstractToolchain( + env, + project_instance.host_toolchain, + project_instance.build_toolchain, + toolchain_configs, + toolchain_toml, + ) + + state_tracker.set_current_config( + toolchain.toolchain_info, custom_select_instances + ) + generate( + project_instance, + default_options, + toolchain, + env, + state_tracker, + custom_select_instances, + ) + state_tracker.finish_current_config() + + state_tracker.finish() + emitter.emit(state_tracker) + return 0 diff --git a/mesonbuild/convert/instance/__init__.py b/mesonbuild/convert/instance/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/mesonbuild/convert/instance/convert_instance_build_target.py b/mesonbuild/convert/instance/convert_instance_build_target.py new file mode 100644 index 000000000000..fd19fba8ee30 --- /dev/null +++ b/mesonbuild/convert/instance/convert_instance_build_target.py @@ -0,0 +1,499 @@ +#!/usr/bin/env python3 +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2026 The Meson Development Team + +from __future__ import annotations +import typing as T +import os +from collections import defaultdict +from dataclasses import dataclass + +from mesonbuild import build +from mesonbuild.mesonlib import File +from mesonbuild.dependencies import base as dependency_base +from mesonbuild.options import OptionKey +from enum import Enum, IntFlag + +from mesonbuild.convert.instance.convert_instance_utils import ( + ConvertDep, + ConvertSrc, + ConvertInstanceFlag, + ConvertInstanceIncludeDirectory, + ConvertInstanceFileGroup, + determine_filegroup_name, +) +from mesonbuild.convert.convert_project_instance import ConvertProjectInstance +from mesonbuild.convert.convert_project_config import ConvertProjectConfig + + +class RustABI(Enum): + RUST = "rust" + C = "c" + NONE = None + + +class GeneratedFilesType(IntFlag): + UNKNOWN = 0 + HEADERS = 1 + IMPL = 2 + HEADERS_AND_IMPL = 3 # pylint: disable=implicit-flag-alias + + +@dataclass +class CustomTargetInfo: + subdir: str = "" + files_type: GeneratedFilesType = GeneratedFilesType.UNKNOWN + + +def _determine_name(original_name: str, + project_config: ConvertProjectConfig, + rust_abi: RustABI) -> str: # fmt: skip + # Soong has a separation of module name and crate_name. If the crate_name is 'serde', then + # the module_name must be 'libserde'. Binaries are ignored for now by this tool. + if rust_abi != RustABI.NONE: + if not original_name.startswith("lib"): + return "lib" + original_name + return original_name + else: + return project_config.sanitize_target_name(original_name) + + +def _determine_rust_abi(build_target: build.BuildTarget) -> RustABI: + rust_abi = build_target.original_kwargs.get("rust_abi") + if rust_abi: + return RustABI(rust_abi) + + return RustABI.NONE + + +def _determine_files_type(outputs: T.List[str]) -> GeneratedFilesType: + files_type: GeneratedFilesType = GeneratedFilesType.UNKNOWN + for output in outputs: + if output.endswith(".h") or output.endswith(".hpp"): + files_type |= GeneratedFilesType.HEADERS + else: + files_type |= GeneratedFilesType.IMPL + + return files_type + + +class ConvertInstanceBuildTarget: + """A representation of build.BuildTarget, but optimized for the convert tool""" + + def __init__( + self, + build_info: build.Build, + build_target: build.BuildTarget, + project_instance: ConvertProjectInstance, + project_config: ConvertProjectConfig): # fmt: skip + self.name: str = "" + + self.rust_abi: RustABI = RustABI.NONE + self.crate_root: str = "" + self.crate_name: str = "" + self.src_subdirs: T.Set[str] = set() + self.rust_edition: T.Optional[str] = None + self.proc_macros: T.List[ConvertDep] = [] + + self.srcs: T.List[ConvertSrc] = [] + self.static_libs: T.List[ConvertDep] = [] + self.header_libs: T.List[ConvertDep] = [] + self.shared_libs: T.List[ConvertDep] = [] + self.whole_static_libs: T.List[ConvertDep] = [] + self.generated_headers: T.List[ConvertDep] = [] + self.generated_sources: T.List[ConvertDep] = [] + self.c_std: T.Optional[str] = None + self.cpp_std: T.Optional[str] = None + + self.compile_args_deps: T.List[str] = [] + self.link_args_deps: T.List[str] = [] + self.linker_version_script_name: T.Optional[str] = None + + self.generated_filegroups: T.Dict[str, ConvertInstanceFileGroup] = {} + self.generated_include_dirs: T.Dict[str, ConvertInstanceIncludeDirectory] = {} + self.generated_flags: T.Dict[str, ConvertInstanceFlag] = {} + self.generated_linker_flags: T.Dict[str, ConvertInstanceFlag] = {} + + self.subdir = build_target.subdir + self.machine_choice = build_target.for_machine + self.install = build_target.install + self._parse_build_target( + build_info, build_target, project_instance, project_config + ) + + def _parse_build_target( + self, + build_info: build.Build, + build_target: build.BuildTarget, + project_instance: ConvertProjectInstance, + project_config: ConvertProjectConfig) -> None: # fmt: skip + """ + Main entry point for processing a `build.BuildTarget`. + + This method serves as a dispatcher that orchestrates the parsing of a raw + `build.BuildTarget` from Meson. + + It calls various specialized `_handle_*` methods to translate all relevant + properties of the target—such as its name, sources, dependencies, etc. + """ + self._handle_naming(build_target, project_config) + self._handle_sources(build_target, project_instance) + self._handle_generated_sources(build_target, project_config) + self._handle_external_dependencies(build_target, project_config) + self._handle_internal_dependencies(build_target, project_config) + self._handle_include_dirs(build_target, project_instance) + self._handle_compile_args( + build_info, build_target, project_instance, project_config + ) + self._handle_linker_args( + build_info, build_target, project_instance, project_config + ) + self._handle_language_standards(build_target) + + def _handle_naming(self, + build_target: build.BuildTarget, + project_config: ConvertProjectConfig) -> None: # fmt: skip + self.rust_abi = _determine_rust_abi(build_target) + if self.rust_abi != RustABI.NONE: + self.crate_name = build_target.get_basename() + self.name = _determine_name( + build_target.get_basename(), project_config, self.rust_abi + ) + + def _handle_sources(self, + build_target: build.BuildTarget, + project_instance: ConvertProjectInstance) -> None: # fmt: skip + for file in build_target.sources: + if not isinstance(file, File): + continue + + needs_filegroup = ( + file.subdir != self.subdir or file.fname != os.path.basename(file.fname) + ) + + if needs_filegroup: + fg_name = project_instance.interpreter_info.lookup_assignment(file) + if file.fname != os.path.basename(file.fname): + fg_name = determine_filegroup_name(file.fname) + + if file.fname.endswith(".h") or file.fname.endswith(".hpp"): + fg_name = fg_name + "_headers" + if fg_name in self.generated_include_dirs: + self.generated_include_dirs[fg_name].add_header_file( + file, project_instance + ) + else: + directory = ConvertInstanceIncludeDirectory(fg_name) + directory.add_header_file(file, project_instance) + self.generated_include_dirs[directory.name] = directory + else: + fg_name = fg_name + "_impl" + if fg_name in self.generated_filegroups: + self.generated_filegroups[fg_name].add_source_file( + file, project_instance + ) + else: + filegroup = ConvertInstanceFileGroup(name=fg_name) + filegroup.add_source_file(file, project_instance) + self.generated_filegroups[fg_name] = filegroup + self.srcs.append( + ConvertSrc.from_target(filegroup.name, filegroup.subdir) + ) + else: + if not file.fname.endswith(".h") and not file.fname.endswith(".hpp"): + self.srcs.append(ConvertSrc(file.fname)) + + if self.rust_abi != RustABI.NONE: + for s in self.srcs: + if os.path.basename(s.source) == "lib.rs": + self.crate_root = s.source + break + if not self.crate_root and self.srcs: + self.crate_root = self.srcs[0].source + + for s in self.srcs: + self.src_subdirs.add(os.path.dirname(s.source)) + + def _handle_external_dependencies(self, + build_target: build.BuildTarget, + project_config: ConvertProjectConfig) -> None: # fmt: skip + for d in build_target.external_deps: + if d.found() and isinstance(d, dependency_base.ExternalDependency): + dep_info = ( + project_config.dependencies.shared_libraries.get(d.name) + or project_config.dependencies.static_libraries.get(d.name) + or project_config.dependencies.header_libraries.get(d.name) + ) + + if not project_config.is_dependency_necessary(d.name): + continue + + repo_name = dep_info[0].get("repo_name", "") + subdir = dep_info[0].get("subdir", "") + target_name = dep_info[0].get("target_name") + source_url = dep_info[0].get("source_url", "") + source_filename = dep_info[0].get("source_filename", "") + source_hash = dep_info[0].get("source_hash") + is_proc_macro = dep_info[0].get("proc_macro", False) + + dep = ConvertDep( + target_name, + subdir, + repo_name, + source_url, + source_filename, + source_hash, + ) + if d.name in project_config.dependencies.header_libraries: + self.header_libs.append(dep) + elif d.name in project_config.dependencies.static_libraries: + if is_proc_macro: + self.proc_macros.append(dep) + else: + self.static_libs.append(dep) + else: + if project_config.is_dependency_necessary(target_name): + self.shared_libs.append(dep) + elif isinstance(d, dependency_base.InternalDependency): + # meson likes to put link + compile args as internal dependencies + # for some reason + self.compile_args_deps.extend(d.get_compile_args()) + self.link_args_deps.extend(d.get_link_args()) + + def _handle_include_dirs(self, build_target: build.BuildTarget, + project_instance: ConvertProjectInstance) -> None: # fmt: skip + for include_dir in build_target.include_dirs: + directory_name = project_instance.interpreter_info.lookup_assignment( + include_dir + ) + if directory_name is not None: + if directory_name in self.generated_include_dirs: + self.generated_include_dirs[directory_name].add_include_dir( + include_dir, project_instance + ) + else: + directory = ConvertInstanceIncludeDirectory(directory_name) + directory.add_include_dir(include_dir, project_instance) + self.generated_include_dirs[directory.name] = directory + else: + directory = ConvertInstanceIncludeDirectory() + directory.add_include_dir(include_dir, project_instance) + if directory.name not in self.generated_include_dirs: + self.generated_include_dirs[directory.name] = directory + else: + self.generated_include_dirs[directory.name].add_include_dir( + include_dir, project_instance + ) + + def _handle_generated_sources(self, + build_target: build.BuildTarget, + project_config: ConvertProjectConfig) -> None: # fmt: skip + targets_to_process: T.List[T.Tuple[build.Target, T.List[str]]] = [] + target_mapping: T.Dict[str, CustomTargetInfo] = defaultdict(CustomTargetInfo) + for obj in build_target.get_generated_sources(): + if isinstance(obj, build.CustomTarget): + targets_to_process.append((obj, T.cast(T.List[str], obj.outputs))) + for extra_dep in obj.extra_depends: + if isinstance(extra_dep, build.CustomTarget): + targets_to_process.append( + (extra_dep, T.cast(T.List[str], extra_dep.outputs)) + ) + elif isinstance(obj, build.CustomTargetIndex): + targets_to_process.append((obj.target, [obj.output])) + + for target, outputs in targets_to_process: + info = target_mapping[target.name] + info.subdir = target.subdir + info.files_type |= _determine_files_type(outputs) + + for name, info in target_mapping.items(): + sanitized_name = _determine_name(name, project_config, self.rust_abi) + dep = ConvertDep(sanitized_name, info.subdir) + + if info.files_type & GeneratedFilesType.HEADERS: + self.generated_headers.append(dep) + if info.files_type & GeneratedFilesType.IMPL: + self.generated_sources.append(dep) + + def _handle_internal_dependencies(self, + build_target: build.BuildTarget, + project_config: ConvertProjectConfig) -> None: # fmt: skip + for target in build_target.link_whole_targets: + if not isinstance(target, build.BuildTarget): + continue + target_rust_abi = _determine_rust_abi(target) + if self.rust_abi == RustABI.NONE and target_rust_abi == RustABI.RUST: + continue + + sanitized_name = _determine_name( + target.name, project_config, target_rust_abi + ) + if isinstance(target, build.BuildTarget): + self.whole_static_libs.append(ConvertDep(sanitized_name, target.subdir)) + + for linked_target in build_target.get_all_linked_targets(): + if not isinstance(linked_target, build.BuildTarget): + continue + target_rust_abi = _determine_rust_abi(linked_target) + if self.rust_abi == RustABI.NONE and target_rust_abi == RustABI.RUST: + continue + + sanitized_name = _determine_name( + linked_target.name, project_config, target_rust_abi + ) + dep = ConvertDep(sanitized_name, linked_target.subdir) + if isinstance(linked_target, build.StaticLibrary): + if dep not in self.static_libs or self.whole_static_libs: + self.static_libs.append(dep) + elif isinstance(linked_target, build.SharedLibrary): + if dep not in self.shared_libs: + self.shared_libs.append(dep) + + def _handle_linker_args( + self, + build_info: build.Build, + build_target: build.BuildTarget, + project_instance: ConvertProjectInstance, + project_config: ConvertProjectConfig) -> None: # fmt: skip + for language in build_target.compilers.keys(): + compiler = build_target.compilers[language] + project_link_args = build_info.get_project_link_args(compiler, build_target) + + if project_link_args: + name = f"{project_config.project_name}_{language}_link_args" + flag = ConvertInstanceFlag(name, "", language) + for arg in project_link_args: + flag.add_link_arg(arg) + self.generated_flags[name] = flag + + filtered_link_args = [] + for arg in build_target.link_args: + # Meson should really support version scripts to avoid special cases, such as here. + # https://github.com/mesonbuild/meson/issues/3047 + if arg == "-Wl,--version-script": + continue + + normalized_string = project_instance.normalize_string(arg, self.subdir) + if normalized_string != arg: + # Version script detected + if normalized_string is not None and normalized_string.startswith( + "@@PROJECT_DIR@@/" + ): + normalized_string = normalized_string.replace( + "@@PROJECT_DIR@@/", "" + ) + elif normalized_string is not None: + normalized_string = normalized_string.replace( + "@@PROJECT_DIR@@", "" + ) + else: + continue + + subdir, filename = os.path.split(normalized_string) + file = File.from_source_file( + project_instance.project_dir, subdir, filename + ) + + filegroup = ConvertInstanceFileGroup() + filegroup.add_source_file(file, project_instance) + self.generated_filegroups[filegroup.name] = filegroup + self.linker_version_script_name = ":" + filegroup.name + continue + + filtered_link_args.append(arg) + + for arg in filtered_link_args: + if arg in project_link_args: + continue + + assignment = project_instance.interpreter_info.lookup_full_assignment( + arg + ) + if assignment is not None: + name, subdir = assignment + if name in self.generated_flags: + self.generated_flags[name].add_link_arg(arg) + else: + flag = ConvertInstanceFlag(name, subdir, language) + flag.add_link_arg(arg) + self.generated_flags[name] = flag + + def _handle_compile_args( + self, + build_info: build.Build, + build_target: build.BuildTarget, + project_instance: ConvertProjectInstance, + project_config: ConvertProjectConfig) -> None: # fmt: skip + for language in build_target.compilers.keys(): + compiler = build_target.compilers[language] + project_args = build_info.get_project_args(compiler, build_target) + + if project_args: + name = f"{project_config.project_name}_{language}_project_args" + flag = ConvertInstanceFlag(name, "", language) + if language in {"c", "cpp"}: + flag.project_native_args = True + + for arg in project_args: + flag.add_compile_arg(arg) + + self.generated_flags[name] = flag + + extra_args = build_target.get_extra_args(language) + self.compile_args_deps + if extra_args: + for arg in extra_args: + if arg in project_args: + continue + + assignment = ( + project_instance.interpreter_info.lookup_full_assignment(arg) + ) + if assignment is not None: + name, subdir = assignment + else: + name = f"{build_target.name}_{language}_flags" + subdir = self.subdir + + if name in self.generated_flags: + self.generated_flags[name].add_compile_arg(arg) + else: + flag = ConvertInstanceFlag(name, subdir, language) + flag.add_compile_arg(arg) + self.generated_flags[name] = flag + + def _handle_language_standards(self, build_target: build.BuildTarget) -> None: + if "c" in build_target.compilers: + c_std = build_target.environment.coredata.get_option_for_target( + build_target, OptionKey("c_std") + ) + if c_std and c_std != "none": + self.c_std = str(c_std) + + if "cpp" in build_target.compilers: + cpp_std = build_target.environment.coredata.get_option_for_target( + build_target, OptionKey("cpp_std") + ) + if cpp_std and cpp_std != "none": + self.cpp_std = str(cpp_std) + + if self.rust_abi != RustABI.NONE: + edition = build_target.environment.coredata.get_option_for_target( + build_target, OptionKey("rust_std") + ) + if edition: + self.rust_edition = str(edition) + + +class ConvertInstanceStaticLibrary(ConvertInstanceBuildTarget): + def __str__(self) -> str: + return f"@StaticLibrary({self.name})" + + +class ConvertInstanceSharedLibrary(ConvertInstanceBuildTarget): + def __str__(self) -> str: + return f"@SharedLibrary({self.name})" + + +class ConvertInstanceExecutable(ConvertInstanceBuildTarget): + def __str__(self) -> str: + return f"@Executable({self.name})" diff --git a/mesonbuild/convert/instance/convert_instance_custom_target.py b/mesonbuild/convert/instance/convert_instance_custom_target.py new file mode 100644 index 000000000000..bf708c6d5d6c --- /dev/null +++ b/mesonbuild/convert/instance/convert_instance_custom_target.py @@ -0,0 +1,421 @@ +#!/usr/bin/env python3 +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2026 The Meson Development Team + +from __future__ import annotations +import os +import typing as T +from enum import Enum +from dataclasses import dataclass, field + +from mesonbuild import build, programs +from mesonbuild.mesonlib import File, MesonException + +from mesonbuild.convert.instance.convert_instance_utils import ( + determine_filegroup_name, + ConvertSrc, + ConvertSrcList, + ConvertInstanceFileGroup, +) +from mesonbuild.convert.convert_project_instance import ConvertProjectInstance +from mesonbuild.convert.convert_project_config import ConvertProjectConfig + + +class ConvertCustomTargetCmdPartType(Enum): + TOOL = 1 + PYTHON_BINARY = 2 + INPUT = 3 + OUTPUT = 4 + STRING = 5 + + +@dataclass(eq=True, unsafe_hash=True) +class ConvertCustomTargetCmdPart: + cmd: str + cmd_type: ConvertCustomTargetCmdPartType + src: T.Optional[ConvertSrc] = None + + @staticmethod + def from_convert_src(source: ConvertSrc) -> ConvertCustomTargetCmdPart: + return ConvertCustomTargetCmdPart( + "", ConvertCustomTargetCmdPartType.INPUT, source + ) + + +def get_component_dirs(subdir: str) -> T.List[str]: + parts = subdir.split("/") + components: T.List[str] = [] + current_path: T.List[str] = [] + for part in parts: + current_path.append(part) + components.append("/".join(current_path)) + return components + + +def index_from_string(input_str: str) -> int: + valid_prefixes = ("@INPUT", "@OUTPUT") + if input_str.startswith(valid_prefixes): + index_as_str = ( + input_str.replace("@INPUT", "").replace("@OUTPUT", "").rstrip("@") + ) + if index_as_str: + return int(index_as_str) + + return 0 + + +def is_python_script(input_str: str) -> bool: + return input_str.endswith((".py", "_py")) + + +def python_script_to_binary(input_str: str) -> str: + name = os.path.basename(input_str) + if name.endswith("gen.py"): + return name[:-6] + "py_binary" + if name.endswith("gen_py"): + return name[:-6] + "py_binary" + if name.endswith(".py"): + return name[:-3] + "_py_binary" + if name.endswith("_py"): + return name[:-3] + "_py_binary" + return name + "_py_binary" + + +@dataclass +class ConvertInstancePythonTarget: + main: ConvertSrc + subdir: str = "" + name: str = "" + srcs: T.List[ConvertSrc] = field(default_factory=list) + libs: T.List[str] = field(default_factory=list) + + +class ConvertInstanceCustomTarget: + """A representation of build.CustomTarget, but optimized for the convert tool""" + + def __init__( + self, + custom_target: build.CustomTarget, + project_instance: ConvertProjectInstance, + project_config: ConvertProjectConfig, + ) -> None: + self.tools: T.List[ConvertSrc] = [] + self.srcs: T.List[ConvertSrc] = [] + + self.tool_subdir = "" + self.is_python = False + self.python_script: T.Optional[ConvertSrc] = None + self.python_depend_files: ConvertSrcList = ConvertSrcList() + + self.generated_headers: T.List[str] = [] + self.generated_sources: T.List[str] = [] + self.export_include_dirs: T.List[str] = [] + self.convert_instance_cmds: T.List[ConvertCustomTargetCmdPart] = [] + self.generated_filegroups: T.Dict[str, ConvertInstanceFileGroup] = {} + self.skip_custom_target: bool = False + + self.name = project_config.sanitize_target_name(custom_target.name) + self.subdir = custom_target.subdir + self.project_instance = project_instance + self.project_config = project_config + self._parse_custom_target(custom_target) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, ConvertInstanceCustomTarget): + return NotImplemented + return ( + self.srcs == other.srcs + and self.tools == other.tools + and self.generated_headers == other.generated_headers + and self.generated_sources == other.generated_sources + and set(self.convert_instance_cmds) == set(other.convert_instance_cmds) + ) + + def get_python_target(self) -> T.Optional[ConvertInstancePythonTarget]: + if self.python_script is None: + return None + + python_binary_name = "" + for tool in self.tools: + if tool.target_only().endswith("_py_binary"): + python_binary_name = tool.target_only() + break + + if not python_binary_name: + return None + + python_target = ConvertInstancePythonTarget( + self.python_script, + self.tool_subdir, + python_binary_name, + self.python_depend_files.get_sources(), + ) + if self.project_config.dependencies.programs: + tool_config = ( + self.project_config.dependencies.programs.get("python3") + or self.project_config.dependencies.programs.get("python") + or {} + ) + python_target.libs.extend(tool_config.get("dependencies", [])) + return python_target + + def get_generated_filegroups(self) -> T.List[ConvertInstanceFileGroup]: + return list(self.generated_filegroups.values()) + + def _handle_environment(self, custom_target: build.CustomTarget) -> None: + if custom_target.env: + for key, val in custom_target.env.get_env({}).items(): + sanitized_val = self.project_instance.normalize_string(val, self.subdir) + assert sanitized_val is not None + env_cmd = f"{key}={sanitized_val}" + self.convert_instance_cmds.append( + ConvertCustomTargetCmdPart( + env_cmd, ConvertCustomTargetCmdPartType.STRING + ) + ) + + def _parse_custom_target(self, custom_target: build.CustomTarget) -> None: + """ + Main entry point for processing a `build.CustomTarget`. + + This method orchestrates the parsing of a raw `build.CustomTarget` from + Meson. It delegates the analysis of the target's command, outputs, and + dependencies to various `_handle_*` methods. + + These methods are responsible for translating the different parts of the + custom target into a build-system-agnostic representation.. + """ + self._handle_environment(custom_target) + + for command in custom_target.command: + if isinstance(command, File): + self._handle_file(command, custom_target) + elif isinstance(command, programs.ExternalProgram): + self._handle_program(command) + elif isinstance(command, str): + self._handle_string(command, custom_target) + + for output in custom_target.outputs: + output_str = T.cast(str, output) + # this only works for one output, but that's the case we see in practice + if custom_target.capture: + self.convert_instance_cmds.append( + ConvertCustomTargetCmdPart( + ">", ConvertCustomTargetCmdPartType.STRING + ) + ) + self.convert_instance_cmds.append( + ConvertCustomTargetCmdPart( + output_str, ConvertCustomTargetCmdPartType.OUTPUT + ) + ) + + if output_str.endswith(".h"): + self.generated_headers.append(output_str) + else: + self.generated_sources.append(output_str) + + for file in custom_target.depend_files: + if isinstance(file, File): + self._handle_file(file, custom_target) + + self._apply_workarounds(custom_target) + + def _handle_input(self, src: T.Any, custom_target: build.CustomTarget) -> None: + if isinstance(src, File): + self._handle_file(src, custom_target) + elif isinstance(src, (build.CustomTarget, build.CustomTargetIndex)): + output = src.get_outputs()[0] + self.convert_instance_cmds.append( + ConvertCustomTargetCmdPart(output, ConvertCustomTargetCmdPartType.INPUT) + ) + self.srcs.append(ConvertSrc(output)) + elif isinstance(src, str): + raise MesonException(f"Type: {type(src)} not handled, exiting...") + + def _handle_file(self, file: File, custom_target: build.CustomTarget) -> None: + needs_filegroup = file.subdir != self.subdir or file.fname != os.path.basename( + file.fname + ) + + if needs_filegroup: + fg_name = self.project_instance.interpreter_info.lookup_assignment(file) + if fg_name is not None: + if fg_name in self.generated_filegroups: + self.generated_filegroups[fg_name].add_source_file( + file, self.project_instance + ) + else: + filegroup = ConvertInstanceFileGroup(name=fg_name) + filegroup.add_source_file(file, self.project_instance) + self.generated_filegroups[fg_name] = filegroup + else: + fg_name = determine_filegroup_name(file.fname) + filegroup = ConvertInstanceFileGroup() + filegroup.add_source_file(file, self.project_instance) + fg_name = filegroup.name + self.generated_filegroups[fg_name] = filegroup + + subdir = self.generated_filegroups[fg_name].subdir + src = ConvertSrc.from_target(fg_name, subdir) + else: + src = ConvertSrc(file.fname) + subdir = file.subdir + + if is_python_script(file.fname) and self.python_script is None: + self.python_script = ConvertSrc.from_target( + os.path.basename(file.fname), subdir + ) + python_binary = python_script_to_binary(file.fname) + tool_src = ConvertSrc.from_target(python_binary, subdir) + if tool_src in self.tools: + return + + self.tool_subdir = subdir + self.python_depend_files.add(src) + + self.tools.append(tool_src) + self.convert_instance_cmds.append( + ConvertCustomTargetCmdPart( + python_binary, + ConvertCustomTargetCmdPartType.PYTHON_BINARY, + tool_src, + ) + ) + elif src not in self.srcs: + if file not in custom_target.depend_files: + self.convert_instance_cmds.append( + ConvertCustomTargetCmdPart.from_convert_src(src) + ) + self.srcs.append(src) + elif is_python_script(file.fname): + self.python_depend_files.add(src) + else: + self.srcs.append(src) + + def _handle_program(self, program: programs.ExternalProgram) -> None: + prog_name = program.get_name() + if prog_name in {"python", "python3"}: + return + + if is_python_script(prog_name): + self.python_script = ConvertSrc(prog_name) + self.tool_subdir = self.subdir + + python_binary = python_script_to_binary(prog_name) + tool_src = ConvertSrc.from_target(python_binary, self.tool_subdir) + self.tools.append(tool_src) + self.convert_instance_cmds.append( + ConvertCustomTargetCmdPart( + python_binary, + ConvertCustomTargetCmdPartType.PYTHON_BINARY, + tool_src, + ) + ) + else: + if self.project_config.dependencies.programs: + prog_config = self.project_config.dependencies.programs.get(prog_name) + if prog_config is None: + raise MesonException( + f"Type: {type(prog_name)} not present, exiting..." + ) + + tool_name = prog_name + if prog_config and "path" in prog_config: + tool_name = prog_config["path"] + + tool_src = ConvertSrc.from_target(tool_name, "") + self.tools.append(tool_src) + self.convert_instance_cmds.append( + ConvertCustomTargetCmdPart( + tool_name, ConvertCustomTargetCmdPartType.TOOL, tool_src + ) + ) + + def _handle_string( + self, command_string: str, custom_target: build.CustomTarget + ) -> None: + if command_string == "@INPUT@": + for j, src in enumerate(custom_target.sources): + self._handle_input(src, custom_target) + elif command_string.startswith("@INPUT"): + idx = index_from_string(command_string) + assert idx is not None + src = custom_target.sources[idx] + self._handle_input(src, custom_target) + elif command_string.startswith("@OUTPUT"): + value = index_from_string(command_string) + assert value is not None + output = custom_target.outputs[value] + output_str = T.cast(str, output) + self.convert_instance_cmds.append( + ConvertCustomTargetCmdPart( + output_str, ConvertCustomTargetCmdPartType.OUTPUT + ) + ) + else: + normalized_string = self.project_instance.normalize_string( + command_string, self.subdir + ) + assert normalized_string is not None + processed_string = self._handle_normalized_string( + normalized_string, custom_target + ) + self.convert_instance_cmds.append( + ConvertCustomTargetCmdPart( + processed_string, ConvertCustomTargetCmdPartType.STRING + ) + ) + + def _handle_normalized_string( + self, normalized_str: str, custom_target: build.CustomTarget + ) -> str: + sanitized_parts = [] + string_parts = normalized_str.split(" ") + + for part in string_parts: + if part.startswith("-I"): + # Ignore for now, maybe do something if desired sources not specified + # as depend_files or depends + continue + elif part.startswith("@@PROJECT_DIR@@"): + sanitized = part.replace("@@PROJECT_DIR@@", "") + sanitized_parts.append(sanitized) + elif part.startswith("@@INSTALL_DIR@@"): + # Install dir undefined for hermetic builds for now + self.skip_custom_target = True + elif "@DEPFILE@" in part: + depfile = custom_target.get_dep_outname(self.srcs[0].target_only()) # type: ignore + sanitized_parts.append(part.replace("@DEPFILE@", depfile)) + else: + sanitized_parts.append(part) + + if len(sanitized_parts) > 1: + joined_string = " ".join(sanitized_parts) + return f"'{joined_string}'" + else: + return " ".join(sanitized_parts) + + def _apply_workarounds(self, custom_target: build.CustomTarget) -> None: + workarounds = self.project_config.custom_target.get("workarounds", {}) + if not workarounds: + return + + # Only one workaround now: export_include_dirs + export_includes = workarounds.get("export_include_dirs", []) + if not export_includes: + return + + if custom_target.name not in export_includes: + return + + prefixed_headers = [] + for header in self.generated_headers: + prefixed_headers.append(os.path.join(self.subdir, header)) + + for cmd_part in self.convert_instance_cmds: + if header == cmd_part.cmd: + cmd_part.cmd = os.path.join(self.subdir, cmd_part.cmd) + + self.generated_headers = prefixed_headers + self.export_include_dirs = get_component_dirs(self.subdir) diff --git a/mesonbuild/convert/instance/convert_instance_utils.py b/mesonbuild/convert/instance/convert_instance_utils.py new file mode 100644 index 000000000000..1c4f74ac2553 --- /dev/null +++ b/mesonbuild/convert/instance/convert_instance_utils.py @@ -0,0 +1,205 @@ +#!/usr/bin/env python3 +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2026 The Meson Development Team + +from __future__ import annotations +import typing as T +from dataclasses import dataclass +import os + +from mesonbuild.mesonlib import File +from mesonbuild import build +from mesonbuild.convert.convert_project_instance import ConvertProjectInstance + + +def _add_file_to_group( + new_file: File, + project_instance: ConvertProjectInstance, + current_subdir: T.Optional[str], + current_paths: T.List[str]) -> T.Tuple[str, T.List[str]]: # fmt: skip + normalized_path = project_instance.normalize_file_path( + new_file.fname, new_file.subdir + ) + old_subdir = current_subdir + if current_subdir is not None: + new_subdir = os.path.commonpath([current_subdir, normalized_path]) + else: + new_subdir = normalized_path + + new_paths = list(current_paths) + if old_subdir != new_subdir: + rebased_paths = [] + if old_subdir is not None: + for path in new_paths: + abs_path = os.path.join(old_subdir, path) + rebased_paths.append(os.path.relpath(abs_path, new_subdir)) + new_paths = rebased_paths + + full_path = os.path.join(normalized_path, os.path.basename(new_file.fname)) + new_paths.append(os.path.relpath(full_path, new_subdir)) + return new_subdir, new_paths + + +def determine_filegroup_name(source: str) -> str: + root, ext = os.path.splitext(source) + name = os.path.basename(root) + "_" + ext[1:] + return name + + +@dataclass +class ConvertDep: + target: str + subdir: str = "" + repo: str = "" + source_url: str = "" + source_filename: str = "" + source_hash: str = "" + + def __hash__(self) -> int: + return hash(self.target) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, ConvertDep): + return False + return self.target == other.target + + +@dataclass +class ConvertSrc: + source: str + target_dep: T.Optional[ConvertDep] = None + + @staticmethod + def from_target(target_name: str, target_subdir: str) -> ConvertSrc: + return ConvertSrc("", ConvertDep(target_name, target_subdir)) + + def target_only(self) -> str: + if self.target_dep: + return self.target_dep.target + else: + return self.source + + def __hash__(self) -> int: + if self.target_dep: + return hash(self.target_dep) + else: + return hash(self.source) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, ConvertSrc): + return False + return self.source == other.source and self.target_dep == other.target_dep + + +class ConvertSrcList: + """Helper to deduplicate ConvertSrc and normalize paths""" + + def __init__(self) -> None: + self.srcs: T.Dict[str, ConvertSrc] = {} + + def add(self, src: ConvertSrc) -> None: + key: str = "" + + if src.target_dep: + key = src.target_dep.target + else: + key = src.source + + if src.target_dep and key in self.srcs: + existing = self.srcs[key] + existing.target_dep.subdir = os.path.commonpath( + [existing.target_dep.subdir, src.target_dep.subdir] + ) + else: + self.srcs[key] = src + + def get_sources(self) -> T.List[ConvertSrc]: + return list(self.srcs.values()) + + +class ConvertInstanceFlag: + """Holds things like c_flags, cpp_flags, link_flags that can be applied to a target""" + + def __init__(self, name: str, subdir: str, language: str): + self.name = name + self.subdir = subdir + self.language = language + self.project_native_args = False + self.compile_args: T.List[str] = [] + self.link_args: T.List[str] = [] + + def add_compile_arg(self, arg: str) -> None: + self.compile_args.append(arg.replace('"', '\\"')) + self.compile_args.sort() + + def add_link_arg(self, arg: str) -> None: + self.link_args.append(arg.replace('"', '\\"')) + self.link_args.sort() + + +class ConvertInstanceIncludeDirectory: + """Representation of build.IncludeDirs, optimized for the convert operation""" + + def __init__(self, name: T.Optional[str] = None) -> None: + self.subdir: T.Optional[str] = None + self.paths: T.Set[str] = set() + self.name = name + + def add_include_dir( + self, + include_dir: build.IncludeDirs, + project_instance: ConvertProjectInstance, + ) -> None: + self.subdir = include_dir.curdir + normalized_paths: T.List[str] = [] + for directory in include_dir.incdirs: + normalized_paths.append( + project_instance.normalize_path(directory, self.subdir) + ) + + for path in self.paths: + if path == ".": + path = "" + normalized_paths.append(self.subdir + path) + + self.subdir = os.path.commonpath(normalized_paths) + for normalized_path in normalized_paths: + self.paths.add(os.path.relpath(normalized_path, self.subdir)) + + if self.name is None: + self.name = "inc_" + self.subdir.replace("/", "_") + + def add_header_file( + self, file: File, project_instance: ConvertProjectInstance + ) -> None: + self.subdir, paths = _add_file_to_group( + file, project_instance, self.subdir, list(self.paths) + ) + self.paths = set() + for path in paths: + if path.endswith(".h") or path.endswith(".hpp"): + newpath = os.path.dirname(path) + if not newpath: + newpath = "." + + self.paths.add(newpath) + else: + self.paths.add(path) + + +class ConvertInstanceFileGroup: + """A set of files with associated metadata. Translates to Soong/Bazel filegroup module""" + + def __init__(self, name: T.Optional[str] = None) -> None: + self.name = name + self.subdir: T.Optional[str] = None + self.srcs: T.List[str] = [] + + def add_source_file( + self, file: File, project_instance: ConvertProjectInstance + ) -> None: + self.subdir, self.srcs = _add_file_to_group( + file, project_instance, self.subdir, self.srcs + ) + if self.name is None: + self.name = determine_filegroup_name(self.srcs[0]) diff --git a/mesonbuild/convert/reference/android/aosp_mesa3d.toml b/mesonbuild/convert/reference/android/aosp_mesa3d.toml new file mode 100644 index 000000000000..a650a3feb858 --- /dev/null +++ b/mesonbuild/convert/reference/android/aosp_mesa3d.toml @@ -0,0 +1,106 @@ +# Copyright 2026 The Meson Development Team +# SPDX-License-Identifier: Apache-2.0 +[project] +project_name = 'aosp_mesa3d' +build_system = 'soong' +# Modules defined here must relative to the root +handwritten_modules = 'gfxstream_workaround.bp' + +[copyright] +license_name = "external_mesa3d_license" +licenses = ["MIT", "Apache-2.0", "GPL-1.0-or-later", "GPL-2.0-only"] +license_texts = ["LICENSE"] + +[target_renames] +vulkan_lvp = "vulkan.lvp" +vulkan_gfxstream = "vulkan.ranchu" + +[custom_target] +[custom_target.workarounds] +# Maybe we should always do this. +export_include_dirs = [ + "u_format_gen.h", + "spirv_info", + "leaf.spv.h", + "morton.spv.h", + "lbvh_main.spv.h", + "lbvh_generate_ir.spv.h", + "ploc_internal.spv.h", + "init.comp.spv.h", + "fill.comp.spv.h", + "histogram.comp.spv.h", + "prefix.comp.spv.h", + "scatter_0_even.comp.spv.h", + "scatter_0_odd.comp.spv.h", + "scatter_1_even.comp.spv.h", + "scatter_1_odd.comp.spv.h", + "hploc_internal.spv.h", +] + +[[custom_variable]] +namespace = "aosp_mesa3d" +name = "perfetto" +possible_values = ["true", "default"] +default_value = "default" + +[[config]] +config_name = 'aosp_reference_drivers' + +[config.toolchains] +host_toolchains = [ + "android_arm64", + "android_x86_64", + "android_x86" +] +build_toolchains = ["linux_glibc_x86_64"] + +[config.static_options] +glx = "disabled" +zlib = "disabled" +platforms = "android" +android-strict = false +vulkan-drivers = "gfxstream,swrast" +gallium-drivers = "" +virtgpu_kumquat = true +opengl = false +egl = "disabled" + +[config.variable_options] +perfetto = [ + { value = true, select = "aosp_mesa3d:perfetto=true" }, + { value = false, select = "aosp_mesa3d:perfetto=default" } +] + +[[config]] +config_name = 'aosp_emulated_android_gfxstream' + +[config.toolchains] +host_toolchains = ["linux_glibc_x86_64", "linux_musl_x86_64", "linux_musl_aarch64"] +build_toolchains = ["linux_glibc_x86_64"] + +[config.static_options] +glx = "disabled" +zlib = "disabled" +platforms = "android" +android-stub = true +vulkan-drivers = "gfxstream" +gallium-drivers = "" +virtgpu_kumquat = true +opengl = false +egl = "disabled" + +[[config]] +config_name = 'aosp_host_lavapipe' + +[config.toolchains] +host_toolchains = ["linux_glibc_x86_64", "linux_musl_x86_64", "linux_musl_aarch64"] +build_toolchains = ["linux_glibc_x86_64"] + +[config.static_options] +glx = "disabled" +zlib = "disabled" +platforms = "" +vulkan-drivers = "swrast" +gallium-drivers = "" +opengl = false +egl = "disabled" diff --git a/mesonbuild/convert/reference/android/dependencies.toml b/mesonbuild/convert/reference/android/dependencies.toml new file mode 100644 index 000000000000..b0ac09b94c86 --- /dev/null +++ b/mesonbuild/convert/reference/android/dependencies.toml @@ -0,0 +1,115 @@ +# Copyright 2026 The Meson Development Team +# SPDX-License-Identifier: Apache-2.0 + +[shared_libraries] +android-base = [ + { target_name = "libbase" } +] +libdrm = [ + { target_name = "libdrm" } +] +libzstd = [ + { target_name = "libzstd" } +] +hardware = [ + { target_name = "libhardware" } +] +cutils = [ + { target_name = "libcutils" } +] +log = [ + { target_name = "liblog" } +] +nativewindow = [ + { target_name = "libnativewindow" } +] +libclc = [ + { target_name = "libclc_aosp_mesa", pkgconfig = {'libexecdir' = 'external/mesa3d/llvm-fake/clc-dir'}} +] +android-utils = [ + { target_name = "libutils" } +] +sync = [ + { target_name = "libsync" } +] +android-aemu-gl-codec = [ + { target_name = "libOpenglCodecCommon" } +] + +[static_libraries] +llvm = [ + { target_name = "libLLVM16_swiftshader", version = "16.0", configtool = { "has-rtti" = "YES", "libdir" = "/tmp/" } } +] +rustix = [ + { target_name = "librustix" } +] +cfg-if = [ + { target_name = "libcfg_if" } +] +remain = [ + { target_name = "libremain", proc_macro = true } +] +zerocopy-derive = [ + { target_name = "libzerocopy_derive", proc_macro = true } +] +bitflags = [ + { target_name = "libbitflags" } +] +errno = [ + { target_name = "liberrno" } +] +rust-libc = [ + { target_name = "liblibc" } +] +rust-log = [ + { target_name = "liblog_rust" } +] +thiserror = [ + { target_name = "libthiserror" } +] +zerocopy = [ + { target_name = "libzerocopy" } +] +android-aemu-ringbuffer = [ + { target_name = "libringbuffer" } +] +android-aemu-qemupipe = [ + { target_name = "libqemupipe.ranchu" } +] +android-aemu-rc-encoder = [ + { target_name = "libgfxstream_rcEnc_static" } +] +android-aemu-base = [ + { target_name = "libgfxstream_androidemu_static" } +] +android-arect = [ + { target_name = "libarect" } +] +perfetto = [ + { target_name = "libperfetto_client_experimental" } +] + +[header_libraries] +android-hwvulkan-headers = [ + { target_name = "hwvulkan_headers" } +] +android-aemu-gralloc-headers = [ + { target_name = "libgralloc_cb.ranchu" } +] +android-minigbm-headers = [ + { target_name = "minigbm_headers" } +] +android-nativebase-headers = [ + { target_name = "libnativebase_headers" } +] + +[python_libraries] +mako = "1.3" +pyyaml = "6.0" + +[programs] +python3 = { version = "3.9" } +glslangValidator = { version = "Glslang Version: 12:44.4.0" } +bindgen = { version = "1.71" } +bison = { version = "3.8.2" } +flex = { version = "2.6.4" } diff --git a/mesonbuild/convert/reference/android/gfxstream_workaround.bp b/mesonbuild/convert/reference/android/gfxstream_workaround.bp new file mode 100644 index 000000000000..a91ea30c2dab --- /dev/null +++ b/mesonbuild/convert/reference/android/gfxstream_workaround.bp @@ -0,0 +1,184 @@ +/* + * This is a special handwritten template for gfxstream compatibility. + * Parts of the Android tree, notably: + * + * - hardware/google/gfxstream + * - device/generic/goldfish + * + * have a dependency on this code. They **shouldn't**, but they do. + * This dependency may be removed when: + * + * - gfxstream does testing Github, rather than Android + * - gfxstream GLES is deleted + * - out-of-tree goldfish drivers are remove in favor of virtio + * + */ + +cc_library_headers { + name: "mesa_common_headers", + vendor_available: true, + host_supported: true, + export_include_dirs: [ + "src", + "include", + ], +} + +cc_library_headers { + name: "mesa_src_headers", + vendor_available: true, + host_supported: true, + export_include_dirs: [ + "src", + ], +} + +cc_library_headers { + name: "mesa_gfxstream_virtgpu", + vendor: true, + host_supported: true, + export_include_dirs: [ + "src/gfxstream/guest/platform/include", + ], +} + +cc_library_headers { + name: "virtgpu_kumquat_ffi_headers_mesa3d", + vendor: true, + host_supported: true, + export_include_dirs: [ + "src/virtio/virtgpu_kumquat_ffi/include", + ], +} + +cc_library_headers { + name: "mesa_gfxstream_guest_android_headers", + vendor: true, + host_supported: true, + export_include_dirs: [ + "src/gfxstream/guest/android", + ], +} + +/* + * This is such a hack, when you consider hardware/google/aemu. + */ +cc_library_headers { + name: "mesa_gfxstream_aemu_headers", + vendor: true, + host_supported: true, + export_include_dirs: [ + "src/gfxstream/aemu/include", + ], +} + +cc_defaults { + name: "mesa_platform_virtgpu_defaults", + vendor: true, + header_libs: [ + "inc_platform_virtgpu", + "inc_src", + "mesa_gfxstream_virtgpu", + "mesa_src_headers", + ], + whole_static_libs: [ + "mesa_platform_virtgpu", + "platform_virtgpu_drm", + "libvirtgpu_kumquat_ffi", + "gfxstream_vulkan_mapper", + ], + shared_libs: [ + "libdrm", + ], + export_shared_lib_headers: [ + "libdrm", + ], + target: { + android: { + shared_libs: [ + "libsync", + ], + }, + }, +} + +cc_library_headers { + name: "mesa_gfxstream_guest_iostream", + vendor: true, + host_supported: true, + shared_libs: [ + "liblog", + ], + export_shared_lib_headers: [ + "liblog", + ], + export_include_dirs: [ + "src/gfxstream/guest/iostream/include", + ], +} + +cc_library_static { + name: "mesa_platform_virtgpu", + vendor: true, + host_supported: true, + whole_static_libs: [ + "platform_virtgpu", + "vulkan_util", + "platform_virtgpu_kumquat", + ], + shared_libs: [ + "libdrm", + ], + export_shared_lib_headers: [ + "libdrm", + ], + target: { + host: { + compile_multilib: "64", + }, + android: { + shared_libs: [ + "libsync", + ], + }, + }, +} + +cc_library_static { + name: "mesa_gfxstream_connection_manager", + vendor: true, + host_supported: true, + whole_static_libs: ["connection_manager"], + export_include_dirs: ["src/gfxstream/guest/connection-manager"], + target: { + host: { + compile_multilib: "64", + }, + }, +} + +cc_library_static { + name: "mesa_goldfish_address_space", + vendor: true, + host_supported: true, + whole_static_libs: ["goldfish_address_space"], + export_include_dirs: ["src/gfxstream/guest/GoldfishAddressSpace/include"], + target: { + host: { + compile_multilib: "64", + }, + }, +} + +cc_library_static { + name: "mesa_gfxstream_guest_android", + vendor: true, + host_supported: true, + whole_static_libs: ["gfxstream_android"], + export_include_dirs: ["src/gfxstream/guest/android/include"], + target: { + host: { + compile_multilib: "64", + }, + }, +} diff --git a/mesonbuild/convert/reference/android/toolchain.toml b/mesonbuild/convert/reference/android/toolchain.toml new file mode 100644 index 000000000000..7a4c843fe7f4 --- /dev/null +++ b/mesonbuild/convert/reference/android/toolchain.toml @@ -0,0 +1,328 @@ +# Copyright 2026 The Meson Development Team +# SPDX-License-Identifier-Apache-2.0 + +# Generated via meson check-toolchain --android-ndk-version r29 -o android_r29.toml + +[compiler_binaries.wrap] +source_url = "https://dl.google.com/android/repository/android-ndk-r29-linux.zip" +source_filename = "android-ndk-r29-linux.zip" +source_hash = "4abbbcdc842f3d4879206e9695d52709603e52dd68d3c1fff04b3b5e7a308ecf" + +[compiler_binaries.binary_paths] +cc = "toolchains/llvm/prebuilt/linux-x86_64/bin/clang" +cpp = "toolchains/llvm/prebuilt/linux-x86_64/bin/clang++" +ar = "toolchains/llvm/prebuilt/linux-x86_64/bin/llvm-ar" +strip = "toolchains/llvm/prebuilt/linux-x86_64/bin/llvm-strip" +toolchain_id = "clang-android" + +[[toolchain]] +name = "android_arm64" + +[toolchain.host_machine] +cpu_family = "aarch64" +cpu = "aarch64" +system = "android" +endian = "little" + +[toolchain.c] +compiler_id = "clang" +linker_id = "ld.lld" +version = "21.0.0" + +[toolchain.cpp] +compiler_id = "clang" +linker_id = "ld.lld" +version = "21.0.0" + +[toolchain.rust] +compiler_id = "rustc" +linker_id = "ld.lld" +version = "1.90.0" + +[toolchain.c.links.fails] +"GCC 64bit atomics" = true + +[toolchain.c.check_header.fails] +"pthread_np.h" = true + +[toolchain.c.has_header_symbol.fails] +"sys/mkdev.h" = { major = true, minor = true, makedev = true } +"errno.h" = { program_invocation_name = true } + +[toolchain.c.has_function.fails] +qsort_s = true +secure_getenv = true +pthread_setaffinity_np = true +__builtin_add_overflow_p = true +__builtin_sub_overflow_p = true + +[toolchain.c.supported_arguments.fails] +args = [ + "-Wno-nonnull-compare", +] + +[toolchain.cpp.supported_arguments.fails] +args = [ + "-flifetime-dse=1", +] + +[[toolchain]] +name = "android_x86" + +[toolchain.host_machine] +cpu_family = "x86" +cpu = "i686" +system = "android" +endian = "little" + +[toolchain.c] +compiler_id = "clang" +linker_id = "ld.lld" +version = "21.0.0" + +[toolchain.cpp] +compiler_id = "clang" +linker_id = "ld.lld" +version = "21.0.0" + +[toolchain.rust] +compiler_id = "rustc" +linker_id = "ld.lld" +version = "1.90.0" + +[toolchain.c.compiles.fails] +"__uint128_t" = true + +[toolchain.c.links.fails] +"GCC 64bit atomics" = true + +[toolchain.c.check_header.fails] +"pthread_np.h" = true + +[toolchain.c.has_header_symbol.fails] +"sys/mkdev.h" = { major = true, minor = true, makedev = true } +"errno.h" = { program_invocation_name = true } + +[toolchain.c.has_function.fails] +qsort_s = true +secure_getenv = true +pthread_setaffinity_np = true +__builtin_add_overflow_p = true +__builtin_sub_overflow_p = true + +[toolchain.c.supported_arguments.fails] +args = [ + "-Wno-nonnull-compare", +] + +[toolchain.cpp.supported_arguments.fails] +args = [ + "-flifetime-dse=1", +] + +[[toolchain]] +name = "android_x86_64" + +[toolchain.host_machine] +cpu_family = "x86_64" +cpu = "x86_64" +system = "android" +endian = "little" + +[toolchain.c] +compiler_id = "clang" +linker_id = "ld.lld" +version = "21.0.0" + +[toolchain.cpp] +compiler_id = "clang" +linker_id = "ld.lld" +version = "21.0.0" + +[toolchain.rust] +compiler_id = "rustc" +linker_id = "ld.lld" +version = "1.90.0" + +[toolchain.c.links.fails] +"GCC 64bit atomics" = true + +[toolchain.c.check_header.fails] +"pthread_np.h" = true + +[toolchain.c.has_header_symbol.fails] +"sys/mkdev.h" = { major = true, minor = true, makedev = true } +"errno.h" = { program_invocation_name = true } + +[toolchain.c.has_function.fails] +qsort_s = true +secure_getenv = true +pthread_setaffinity_np = true +__builtin_add_overflow_p = true +__builtin_sub_overflow_p = true + +[toolchain.c.supported_arguments.fails] +args = [ + "-Wno-nonnull-compare", +] + +[toolchain.cpp.supported_arguments.fails] +args = [ + "-flifetime-dse=1", +] + +[[toolchain]] +name = "linux_glibc_x86_64" + +[toolchain.host_machine] +cpu_family = "x86_64" +cpu = "x86_64" +system = "linux" +endian = "little" + +[toolchain.c] +compiler_id = "clang" +linker_id = "ld.lld" +version = "21.0.0" + +[toolchain.cpp] +compiler_id = "clang" +linker_id = "ld.lld" +version = "21.0.0" + +[toolchain.rust] +compiler_id = "rustc" +linker_id = "ld.lld" +version = "1.90.0" + +[toolchain.c.check_header.fails] +"pthread_np.h" = true +"linux/udmabuf.h" = true + +[toolchain.c.has_header_symbol.fails] +"sys/mkdev.h" = { major = true, minor = true, makedev = true } +"errno.h" = { program_invocation_name = true } + +[toolchain.c.has_function.fails] +memfd_create = true +qsort_s = true +pthread_setaffinity_np = true +thrd_create = true +getrandom = true +__builtin_add_overflow_p = true +__builtin_sub_overflow_p = true +reallocarray = true + +[toolchain.c.supported_arguments.fails] +args = [ + "-Wno-nonnull-compare", +] + +[toolchain.cpp.supported_arguments.fails] +args = [ + "-flifetime-dse=1", +] + +[[toolchain]] +name = "linux_musl_x86_64" + +[toolchain.host_machine] +cpu_family = "x86_64" +cpu = "x86_64" +system = "linux" +endian = "little" + +[toolchain.c] +compiler_id = "clang" +linker_id = "ld.lld" +version = "21.0.0" + +[toolchain.cpp] +compiler_id = "clang" +linker_id = "ld.lld" +version = "21.0.0" + +[toolchain.rust] +compiler_id = "rustc" +linker_id = "ld.lld" +version = "1.90.0" + +[toolchain.c.check_header.fails] +"pthread_np.h" = true +"linux/udmabuf.h" = true +"xlocale.h" = true + +[toolchain.c.has_header_symbol.fails] +"sys/mkdev.h" = { major = true, minor = true, makedev = true } +"errno.h" = { program_invocation_name = true } + +[toolchain.c.has_function.fails] +memfd_create = true +qsort_s = true +pthread_setaffinity_np = true +thrd_create = true +getrandom = true +__builtin_add_overflow_p = true +__builtin_sub_overflow_p = true + +[toolchain.c.supported_arguments.fails] +args = [ + "-Wno-nonnull-compare", +] + +[toolchain.cpp.supported_arguments.fails] +args = [ + "-flifetime-dse=1", +] + +[[toolchain]] +name = "linux_musl_aarch64" + +[toolchain.host_machine] +cpu_family = "aarch64" +cpu = "aarch64" +system = "linux" +endian = "little" + +[toolchain.c] +compiler_id = "clang" +linker_id = "ld.lld" +version = "21.0.0" + +[toolchain.cpp] +compiler_id = "clang" +linker_id = "ld.lld" +version = "21.0.0" + +[toolchain.rust] +compiler_id = "rustc" +linker_id = "ld.lld" +version = "1.90.0" + +[toolchain.c.check_header.fails] +"pthread_np.h" = true +"linux/udmabuf.h" = true +"xlocale.h" = true + +[toolchain.c.has_header_symbol.fails] +"sys/mkdev.h" = { major = true, minor = true, makedev = true } +"errno.h" = { program_invocation_name = true } + +[toolchain.c.has_function.fails] +memfd_create = true +qsort_s = true +pthread_setaffinity_np = true +thrd_create = true +getrandom = true +__builtin_add_overflow_p = true +__builtin_sub_overflow_p = true + +[toolchain.c.supported_arguments.fails] +args = [ + "-Wno-nonnull-compare", +] + +[toolchain.cpp.supported_arguments.fails] +args = [ + "-flifetime-dse=1", +] diff --git a/mesonbuild/convert/reference/fuchsia/dependencies.toml b/mesonbuild/convert/reference/fuchsia/dependencies.toml new file mode 100644 index 000000000000..9312f47a5156 --- /dev/null +++ b/mesonbuild/convert/reference/fuchsia/dependencies.toml @@ -0,0 +1,13 @@ +# Copyright 2026 The Meson Development Team +# SPDX-License-Identifier: Apache-2.0 + +[python_libraries] +mako = "1.3" +markupsafe = "2.0.0" +pyyaml = "6.0" + +[programs] +python3 = { version = "3.9" } +bindgen = { version = "1.71" } +bison = { version = "3.8.2" } +flex = { version = "2.6.4" } diff --git a/mesonbuild/convert/reference/fuchsia/mesa3d.toml b/mesonbuild/convert/reference/fuchsia/mesa3d.toml new file mode 100644 index 000000000000..6f79ed7caa8c --- /dev/null +++ b/mesonbuild/convert/reference/fuchsia/mesa3d.toml @@ -0,0 +1,49 @@ +# Copyright 2026 The Meson Development Team +# SPDX-License-Identifier: Apache-2.0 +[project] +project_name = 'fuchsia_mesa3d' +build_system = 'bazel' + +[copyright] +license_name = "fuchsia_license" +licenses = ["BSD-2-Clause"] +license_texts = ["LICENSE"] + +[custom_target] +[custom_target.workarounds] +# Maybe we should always do this. +export_include_dirs = [ + "u_format_gen.h", + "spirv_info", + "leaf.spv.h", + "morton.spv.h", + "lbvh_main.spv.h", + "lbvh_generate_ir.spv.h", + "ploc_internal.spv.h", + "init.comp.spv.h", + "fill.comp.spv.h", + "histogram.comp.spv.h", + "prefix.comp.spv.h", + "scatter_0_even.comp.spv.h", + "scatter_0_odd.comp.spv.h", + "scatter_1_even.comp.spv.h", + "scatter_1_odd.comp.spv.h", + "hploc_internal.spv.h", +] + +[[config]] +config_name = 'fuchsia_mesa3d_drivers' + +[config.toolchains] +host_toolchains = ["fuchsia_x86_64"] +build_toolchains = ["fuchsia_x86_64"] + +[config.static_options] +glx = "disabled" +platforms = "" +gallium-drivers = "" +vulkan-drivers = 'gfxstream' +shader-cache = 'disabled' +opengl = false +egl = "disabled" +zlib = "disabled" diff --git a/mesonbuild/convert/reference/fuchsia/toolchain.toml b/mesonbuild/convert/reference/fuchsia/toolchain.toml new file mode 100644 index 000000000000..1c0b4a0c1a33 --- /dev/null +++ b/mesonbuild/convert/reference/fuchsia/toolchain.toml @@ -0,0 +1,219 @@ +# Copyright 2026 The Meson Development Team +# SPDX-License-Identifier-Apache-2.0 + +# Generated via meson check-toolchain --fuchsia-clang-instance-id eElzEu-CHpuu6lW7IF3kpIH08sihIBJybp-rwP3yevUC --fuchsia-core-sdk-instance-id sVJe9XVJyCecoWH2SKQtCXQEDjgtV9xx3qpUW_x9USkC -o fuchsia_check_qsort_v3.toml + +[compiler_binaries.wrap] +source_url = "https://chrome-infra-packages.appspot.com/dl/fuchsia/third_party/clang/linux-amd64/+/eElzEu-CHpuu6lW7IF3kpIH08sihIBJybp-rwP3yevUC" +source_filename = "fuchsia-clang-linux-amd64.zip" +source_hash = "78497312ef821e9baeea55bb205de4a481f4f2c8a12012726e9fabc0fdf27af5" + +[compiler_binaries.binary_paths] +cc = "bin/clang" +cpp = "bin/clang++" +ld = "bin/ld.lld" +ar = "bin/llvm-ar" +nm = "bin/llvm-nm" +objcopy = "bin/llvm-objcopy" +objdump = "bin/llvm-objdump" +gcov = "bin/llvm-cov" +strip = "bin/llvm-strip" +as = "bin/llvm-as" +toolchain_id = "clang-fuchsia" + +[sysroot.wrap] +source_url = "https://chrome-infra-packages.appspot.com/dl/fuchsia/sdk/core/linux-amd64/+/sVJe9XVJyCecoWH2SKQtCXQEDjgtV9xx3qpUW_x9USkC" +source_filename = "fuchsia-sdk-core-linux-amd64.zip" +source_hash = "b1525ef57549c8279ca161f648a42d0974040e382d57dc71deaa545bfc7d5129" + +[[toolchain]] +name = "fuchsia_x86_64" + +[toolchain.host_machine] +cpu_family = "x86_64" +cpu = "x86_64" +system = "fuchsia" +endian = "little" + +[toolchain.c] +compiler_id = "clang" +linker_id = "ld.lld" +version = "23.0.0" + +[toolchain.cpp] +compiler_id = "clang" +linker_id = "ld.lld" +version = "23.0.0" + +[toolchain.rust] +compiler_id = "rustc" +linker_id = "ld.lld" +version = "1.90.0" + +[toolchain.c.links.fails] +"GCC 64bit atomics" = true +"xlocale" = true + +[toolchain.c.check_header.fails] +"xlocale.h" = true +"linux/futex.h" = true +"linux/udmabuf.h" = true +"pthread_np.h" = true + +[toolchain.c.has_header_symbol.fails] +"sys/sysmacros.h" = { major = true, minor = true, makedev = true } +"sys/mkdev.h" = { major = true, minor = true, makedev = true } +"errno.h" = { program_invocation_name = true } + +[toolchain.c.has_function.fails] +ffs = true +ffsll = true +getrandom = true +qsort_s = true +secure_getenv = true +pthread_setaffinity_np = true +reallocarray = true +__builtin_add_overflow_p = true +__builtin_sub_overflow_p = true +memfd_create = true + +[toolchain.c.supported_arguments.fails] +args = [ + "-Wno-nonnull-compare", +] + +[toolchain.cpp.links.fails] +"qsort_r" = true + +[toolchain.cpp.supported_arguments.fails] +args = [ + "-flifetime-dse=1", +] + +[[toolchain]] +name = "fuchsia_aarch64" + +[toolchain.host_machine] +cpu_family = "aarch64" +cpu = "aarch64" +system = "fuchsia" +endian = "little" + +[toolchain.c] +compiler_id = "clang" +linker_id = "ld.lld" +version = "23.0.0" + +[toolchain.cpp] +compiler_id = "clang" +linker_id = "ld.lld" +version = "23.0.0" + +[toolchain.rust] +compiler_id = "rustc" +linker_id = "ld.lld" +version = "1.90.0" + +[toolchain.c.links.fails] +"GCC 64bit atomics" = true +"xlocale" = true + +[toolchain.c.check_header.fails] +"xlocale.h" = true +"linux/futex.h" = true +"linux/udmabuf.h" = true +"pthread_np.h" = true + +[toolchain.c.has_header_symbol.fails] +"sys/sysmacros.h" = { major = true, minor = true, makedev = true } +"sys/mkdev.h" = { major = true, minor = true, makedev = true } +"errno.h" = { program_invocation_name = true } + +[toolchain.c.has_function.fails] +ffs = true +ffsll = true +getrandom = true +qsort_s = true +secure_getenv = true +pthread_setaffinity_np = true +reallocarray = true +__builtin_add_overflow_p = true +__builtin_sub_overflow_p = true +memfd_create = true + +[toolchain.c.supported_arguments.fails] +args = [ + "-Wno-nonnull-compare", +] + +[toolchain.cpp.links.fails] +"qsort_r" = true + +[toolchain.cpp.supported_arguments.fails] +args = [ + "-flifetime-dse=1", +] + +[[toolchain]] +name = "fuchsia_riscv" + +[toolchain.host_machine] +cpu_family = "riscv64" +cpu = "riscv64" +system = "fuchsia" +endian = "little" + +[toolchain.c] +compiler_id = "clang" +linker_id = "ld.lld" +version = "23.0.0" + +[toolchain.cpp] +compiler_id = "clang" +linker_id = "ld.lld" +version = "23.0.0" + +[toolchain.rust] +compiler_id = "rustc" +linker_id = "ld.lld" +version = "1.90.0" + +[toolchain.c.links.fails] +"GCC 64bit atomics" = true +"xlocale" = true + +[toolchain.c.check_header.fails] +"xlocale.h" = true +"linux/futex.h" = true +"linux/udmabuf.h" = true +"pthread_np.h" = true + +[toolchain.c.has_header_symbol.fails] +"sys/sysmacros.h" = { major = true, minor = true, makedev = true } +"sys/mkdev.h" = { major = true, minor = true, makedev = true } +"errno.h" = { program_invocation_name = true } + +[toolchain.c.has_function.fails] +ffs = true +ffsll = true +getrandom = true +qsort_s = true +secure_getenv = true +pthread_setaffinity_np = true +reallocarray = true +__builtin_add_overflow_p = true +__builtin_sub_overflow_p = true +memfd_create = true + +[toolchain.c.supported_arguments.fails] +args = [ + "-Wno-nonnull-compare", +] + +[toolchain.cpp.links.fails] +"qsort_r" = true + +[toolchain.cpp.supported_arguments.fails] +args = [ + "-flifetime-dse=1", +] \ No newline at end of file diff --git a/mesonbuild/convert/reference/test/basic_bazel.toml b/mesonbuild/convert/reference/test/basic_bazel.toml new file mode 100644 index 000000000000..bd6be8d1cd9b --- /dev/null +++ b/mesonbuild/convert/reference/test/basic_bazel.toml @@ -0,0 +1,19 @@ +# Copyright 2026 The Meson Development Team +# SPDX-License-Identifier: Apache-2.0 +[project] +project_name = 'basic_convert' +build_system = 'bazel' + +[copyright] +license_name = "test_license" +licenses = ["Apache-2.0"] +license_texts = ["LICENSE"] + +[[config]] +config_name = 'test_config' + +[config.toolchains] +host_toolchains = ["test_toolchain"] +build_toolchains = ["test_toolchain"] + +[config.static_options] diff --git a/mesonbuild/convert/reference/test/basic_soong.toml b/mesonbuild/convert/reference/test/basic_soong.toml new file mode 100644 index 000000000000..2f60676415ca --- /dev/null +++ b/mesonbuild/convert/reference/test/basic_soong.toml @@ -0,0 +1,19 @@ +# Copyright 2026 The Meson Development Team +# SPDX-License-Identifier: Apache-2.0 +[project] +project_name = 'basic_convert' +build_system = 'soong' + +[copyright] +license_name = "test_license" +licenses = ["Apache-2.0"] +license_texts = ["LICENSE"] + +[[config]] +config_name = 'test_config' + +[config.toolchains] +host_toolchains = ["test_toolchain"] +build_toolchains = ["test_toolchain"] + +[config.static_options] diff --git a/mesonbuild/convert/reference/test/dependencies.toml b/mesonbuild/convert/reference/test/dependencies.toml new file mode 100644 index 000000000000..09dcfeea4f92 --- /dev/null +++ b/mesonbuild/convert/reference/test/dependencies.toml @@ -0,0 +1,12 @@ +# Copyright 2026 The Meson Development Team +# SPDX-License-Identifier: Apache-2.0 + +[shared_libraries] + +[static_libraries] + +[header_libraries] + +[programs] +python3 = { version = "3.11" } +cp = { version = "1.0" } diff --git a/mesonbuild/convert/reference/test/toolchain.toml b/mesonbuild/convert/reference/test/toolchain.toml new file mode 100644 index 000000000000..17d47b07285c --- /dev/null +++ b/mesonbuild/convert/reference/test/toolchain.toml @@ -0,0 +1,21 @@ +# Copyright 2026 The Meson Development Team +# SPDX-License-Identifier-Apache-2.0 + +[[toolchain]] +name = "test_toolchain" + +[toolchain.host_machine] +cpu_family = "x86_64" +cpu = "x86_64" +system = "linux" +endian = "little" + +[toolchain.c] +compiler_id = "clang" +linker_id = "ld.lld" +version = "21.0.0" + +[toolchain.cpp] +compiler_id = "clang" +linker_id = "ld.lld" +version = "21.0.0" diff --git a/mesonbuild/mchecktoolchain.py b/mesonbuild/mchecktoolchain.py new file mode 100644 index 000000000000..0c9d60d9db9c --- /dev/null +++ b/mesonbuild/mchecktoolchain.py @@ -0,0 +1,72 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2026 The Meson Authors + +from __future__ import annotations +import argparse +import os +import typing as T + +from . import mlog +from .check_toolchain.android import generate_android_toolchains +from .check_toolchain.fuchsia import generate_fuchsia_toolchains +from .check_toolchain.defs import Toolchain +from .check_toolchain.emitter import ToolchainEmitter +from .check_toolchain.checker import run_compiler_checks + + +def add_arguments(parser: argparse.ArgumentParser) -> None: + parser.add_argument("--name", default=None) + parser.add_argument("-o", "--output", default=None, help="Output file name.") + group = parser.add_mutually_exclusive_group() + group.add_argument("--cross-file") + group.add_argument("--android-ndk-version") + group.add_argument("--fuchsia-clang-instance-id") + parser.add_argument("--fuchsia-core-sdk-instance-id") + parser.add_argument("--android-ndk-platform", default="linux") + + +def run(options: argparse.Namespace) -> int: + toolchains: T.List[Toolchain] = [] + output_filename = options.output + + if options.cross_file: + if not options.name: + mlog.error("Must specify --name when using --cross-file.") + return 1 + toolchains.append(run_compiler_checks(options.cross_file, options.name, [], [])) + if output_filename is None: + output_filename = "check-toolchain-output.toml" + elif options.android_ndk_version: + if output_filename is None: + output_filename = "aosp.toolchain.toml" + toolchains = generate_android_toolchains( + run_compiler_checks, + ndk_version=options.android_ndk_version, + ndk_platform=options.android_ndk_platform, + ) + elif options.fuchsia_clang_instance_id: + if not options.fuchsia_core_sdk_instance_id: + mlog.error( + "Must specify --fuchsia-core-sdk-instance-id when using --fuchsia-clang-instance-id." + ) + return 1 + if output_filename is None: + output_filename = "fuchsia.toolchain.toml" + toolchains = generate_fuchsia_toolchains( + options.fuchsia_clang_instance_id, + options.fuchsia_core_sdk_instance_id, + run_compiler_checks, + ) + else: + name = options.name or "native" + toolchains.append(run_compiler_checks(None, name, [], [])) + if output_filename is None: + output_filename = "check-toolchain-output.toml" + + emitter = ToolchainEmitter(toolchains, options) + emitter.emit(output_filename) + + mlog.log( + "Toolchain information written to", mlog.bold(os.path.abspath(output_filename)) + ) + return 0 diff --git a/mesonbuild/mconvert.py b/mesonbuild/mconvert.py new file mode 100644 index 000000000000..3689a69e1d22 --- /dev/null +++ b/mesonbuild/mconvert.py @@ -0,0 +1,78 @@ +#!/usr/bin/env python3 +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2026 The Meson Development Team + +import argparse +import os +import sys +import typing as T + +if sys.version_info >= (3, 11): + import tomllib +else: + tomllib: T.Optional[T.Any] = None + +from mesonbuild.convert.convertmain import convert_build_system + + +def add_arguments(parser: argparse.ArgumentParser) -> None: + parser.add_argument( + "hermetic_project", + nargs="?", + default=None, + help="The hermetic project to convert (e.g., android, fuchsia).", + ) + parser.add_argument( + "git_project", + nargs="?", + default=None, + help="The git project to convert (e.g., aosp_mesa3d, mesa3d).", + ) + parser.add_argument( + "--config", help="The path to a valid project config file (toml)." + ) + parser.add_argument( + "--toolchain", help="The path to a valid toolchain config file (toml)." + ) + parser.add_argument( + "--dependencies", help="The path to a valid dependencies file (toml)." + ) + parser.add_argument( + "--project-dir", default=os.getcwd(), help="The path to the project directory." + ) + parser.add_argument( + "--output-dir", + help="The path to the output directory for generated files. Defaults to the project directory.", + ) + + +def run(options: argparse.Namespace) -> int: + if tomllib is None: + sys.exit("The convert feature requires Python 3.11 or newer.") + + if options.hermetic_project and options.git_project: + base_path = os.path.join( + os.path.dirname(__file__), "convert", "reference", options.hermetic_project + ) + options.config = os.path.join(base_path, f"{options.git_project}.toml") + options.toolchain = os.path.join(base_path, "toolchain.toml") + options.dependencies = os.path.join(base_path, "dependencies.toml") + elif not all([options.config, options.toolchain]): + sys.exit( + "Error: You must specify either a hermetic project and git project, or --config and --toolchain paths." + ) + + # Load all toml files + try: + with open(options.config, "rb") as f: + config_toml = tomllib.load(f) + with open(options.toolchain, "rb") as f: + toolchain_toml = tomllib.load(f) + dependencies_toml = {} + if options.dependencies and os.path.exists(options.dependencies): + with open(options.dependencies, "rb") as f: + dependencies_toml = tomllib.load(f) + except Exception as e: + sys.exit(f"Error trying to open config file: {e}") + + return convert_build_system(config_toml, toolchain_toml, dependencies_toml, options) diff --git a/mesonbuild/mesonmain.py b/mesonbuild/mesonmain.py index 1ded0b155264..5bb476689e21 100644 --- a/mesonbuild/mesonmain.py +++ b/mesonbuild/mesonmain.py @@ -64,7 +64,7 @@ def errorhandler(e: Exception, command: str) -> int: class CommandLineParser: def __init__(self) -> None: # only import these once we do full argparse processing - from . import mconf, mdist, minit, minstall, mintro, msetup, mtest, rewriter, msubprojects, munstable_coredata, mcompile, mdevenv, mformat + from . import mconf, mchecktoolchain, mconvert, mdist, minit, minstall, mintro, msetup, mtest, rewriter, msubprojects, munstable_coredata, mcompile, mdevenv, mformat from .scripts import env2mfile, reprotest from .wrap import wraptool import shutil @@ -81,6 +81,10 @@ def __init__(self) -> None: help_msg='Configure the project') self.add_command('configure', mconf.add_arguments, mconf.run, help_msg='Change project options',) + self.add_command('check-toolchain', mchecktoolchain.add_arguments, mchecktoolchain.run, + help_msg='Check toolchain properties') + self.add_command('convert', mconvert.add_arguments, mconvert.run, + help_msg='Convert a Meson project to another build system',) self.add_command('dist', mdist.add_arguments, mdist.run, help_msg='Generate release archive',) self.add_command('install', minstall.add_arguments, minstall.run, diff --git a/run_mypy.py b/run_mypy.py index 6c0ddaaa4752..01f3648ac047 100755 --- a/run_mypy.py +++ b/run_mypy.py @@ -16,7 +16,9 @@ # fully typed submodules 'mesonbuild/ast/', 'mesonbuild/cargo/', + 'mesonbuild/check_toolchain/', 'mesonbuild/cmake/', + 'mesonbuild/convert/', 'mesonbuild/compilers/', 'mesonbuild/dependencies/', 'mesonbuild/interpreter/primitives/', @@ -43,7 +45,9 @@ 'mesonbuild/interpreter/type_checking.py', 'mesonbuild/machinefile.py', 'mesonbuild/mesondata.py', + 'mesonbuild/mchecktoolchain.py', 'mesonbuild/mcompile.py', + 'mesonbuild/mconvert.py', 'mesonbuild/mdevenv.py', 'mesonbuild/mconf.py', 'mesonbuild/mdist.py', diff --git a/test cases/convert/1 basic/Android.bp b/test cases/convert/1 basic/Android.bp new file mode 100644 index 000000000000..c54f90afeb98 --- /dev/null +++ b/test cases/convert/1 basic/Android.bp @@ -0,0 +1,53 @@ +/* + * Copyright (C) 2025-2026 The Magma GPU Project + * SPDX-License-Identifier: Apache-2.0 + * + * Generated via: + * https://github.com/mesonbuild/meson/tree/master/mesonbuild/convert + * + * Submit patches, do not hand-edit. + * + */ + +package { + // See: http://go/android-license-faq + default_applicable_licenses: ["test_license"], +} + +license { + name: "test_license", + visibility: [":__subpackages__"], + license_kinds: [ + "SPDX-license-identifier-Apache-2.0", + ], + license_text: [ + "LICENSE", + ], +} + +cc_library_static { + name: "commonlib", + vendor: true, + host_supported: true, + header_libs: [ + "inc", + ], + srcs: [ + "common.c", + ], +} + +cc_library_shared { + name: "simplelib", + vendor: true, + host_supported: true, + header_libs: [ + "inc", + ], + srcs: [ + "main.c", + ], + static_libs: [ + "commonlib", + ], +} diff --git a/test cases/convert/1 basic/BUILD.bazel b/test cases/convert/1 basic/BUILD.bazel new file mode 100644 index 000000000000..84888daf1b18 --- /dev/null +++ b/test cases/convert/1 basic/BUILD.bazel @@ -0,0 +1,43 @@ +# Copyright (C) 2025-2026 The Magma GPU Project +# SPDX-License-Identifier: Apache-2.0 +# +# Generated via: +# https://github.com/mesonbuild/meson/tree/master/mesonbuild/convert +# +# Submit patches, do not hand-edit. + +load("//bazel:meson_rules.bzl", "meson_cc_library") +load("@rules_license//rules:license.bzl", "license") + +license( + name = "test_license", + license_kinds = [ + "@rules_license//licenses/spdx:Apache-2.0", + ], +) + +package( + default_applicable_licenses = ["//:test_license"], + default_visibility = ["//visibility:public"], +) + +meson_cc_library( + name = "commonlib", + srcs =[ + "common.c", + ], + deps =[ + "//include:inc", + ], +) + +meson_cc_library( + name = "simplelib", + srcs =[ + "main.c", + ], + deps =[ + "//:commonlib", + "//include:inc", + ], +) diff --git a/test cases/convert/1 basic/LICENSE b/test cases/convert/1 basic/LICENSE new file mode 100644 index 000000000000..3c320c205c67 --- /dev/null +++ b/test cases/convert/1 basic/LICENSE @@ -0,0 +1 @@ +Dummy License Content diff --git a/test cases/convert/1 basic/MODULE.bazel b/test cases/convert/1 basic/MODULE.bazel new file mode 100644 index 000000000000..0ae317e6efec --- /dev/null +++ b/test cases/convert/1 basic/MODULE.bazel @@ -0,0 +1,23 @@ +# Copyright (C) 2025-2026 The Magma GPU Project +# SPDX-License-Identifier: Apache-2.0 +# +# Generated via: +# https://github.com/mesonbuild/meson/tree/master/mesonbuild/convert +# +# Submit patches, do not hand-edit. +module(name = "basic_convert", version = "1.0") + +bazel_dep(name = "rules_cc", version = "0.2.17") +bazel_dep(name = "platforms", version = "1.0.0") +bazel_dep(name = "rules_license", version = "1.0.0") +bazel_dep(name = "rules_python", version = "1.7.0") + +meson_repos = use_extension("//bazel:toolchains.bzl", "meson_repos") + + + + + + + + diff --git a/test cases/convert/1 basic/bazel/BUILD.bazel b/test cases/convert/1 basic/bazel/BUILD.bazel new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test cases/convert/1 basic/common.c b/test cases/convert/1 basic/common.c new file mode 100644 index 000000000000..27bd829c4803 --- /dev/null +++ b/test cases/convert/1 basic/common.c @@ -0,0 +1,4 @@ +#include "common.h" +#include +void common_func(void) { printf("common +"); } diff --git a/test cases/convert/1 basic/include/Android.bp b/test cases/convert/1 basic/include/Android.bp new file mode 100644 index 000000000000..bf741210c270 --- /dev/null +++ b/test cases/convert/1 basic/include/Android.bp @@ -0,0 +1,24 @@ +/* + * Copyright (C) 2025-2026 The Magma GPU Project + * SPDX-License-Identifier: Apache-2.0 + * + * Generated via: + * https://github.com/mesonbuild/meson/tree/master/mesonbuild/convert + * + * Submit patches, do not hand-edit. + * + */ + +package { + // See: http://go/android-license-faq + default_applicable_licenses: ["test_license"], +} + +cc_library_headers { + name: "inc", + vendor: true, + host_supported: true, + export_include_dirs: [ + ".", + ], +} diff --git a/test cases/convert/1 basic/include/BUILD.bazel b/test cases/convert/1 basic/include/BUILD.bazel new file mode 100644 index 000000000000..f05739f3a2f4 --- /dev/null +++ b/test cases/convert/1 basic/include/BUILD.bazel @@ -0,0 +1,22 @@ +# Copyright (C) 2025-2026 The Magma GPU Project +# SPDX-License-Identifier: Apache-2.0 +# +# Generated via: +# https://github.com/mesonbuild/meson/tree/master/mesonbuild/convert +# +# Submit patches, do not hand-edit. + +load("//bazel:meson_rules.bzl", "meson_cc_headers") + +package( + default_applicable_licenses = ["//:test_license"], + default_visibility = ["//visibility:public"], +) + +meson_cc_headers( + name = "inc", + hdrs = glob(["**/*.h", "**/*.hpp", "**/*.inl", "**/*.inc"], allow_empty = True), + export_include_dirs =[ + ".", + ], +) diff --git a/test cases/convert/1 basic/include/common.h b/test cases/convert/1 basic/include/common.h new file mode 100644 index 000000000000..6f1ab190dbc3 --- /dev/null +++ b/test cases/convert/1 basic/include/common.h @@ -0,0 +1,4 @@ +#ifndef COMMON_H +#define COMMON_H +void common_func(void); +#endif diff --git a/test cases/convert/1 basic/main.c b/test cases/convert/1 basic/main.c new file mode 100644 index 000000000000..a01a1ec41c95 --- /dev/null +++ b/test cases/convert/1 basic/main.c @@ -0,0 +1,6 @@ +#include + +int main(void) { + printf("Hello from basic convert test!"); + return 0; +} diff --git a/test cases/convert/1 basic/meson.build b/test cases/convert/1 basic/meson.build new file mode 100644 index 000000000000..c3dc4d317af3 --- /dev/null +++ b/test cases/convert/1 basic/meson.build @@ -0,0 +1,14 @@ +project('basic-convert', 'c') + +inc = include_directories('include') + +commonlib = static_library('commonlib', 'common.c', include_directories: inc) + +shared_library('simplelib', 'main.c', + include_directories: inc, + link_with: commonlib +) + +subdir('subdir2') +subdir('subdir1') +subdir('subdir3') diff --git a/test cases/convert/1 basic/subdir1/Android.bp b/test cases/convert/1 basic/subdir1/Android.bp new file mode 100644 index 000000000000..73a75f5731e4 --- /dev/null +++ b/test cases/convert/1 basic/subdir1/Android.bp @@ -0,0 +1,27 @@ +/* + * Copyright (C) 2025-2026 The Magma GPU Project + * SPDX-License-Identifier: Apache-2.0 + * + * Generated via: + * https://github.com/mesonbuild/meson/tree/master/mesonbuild/convert + * + * Submit patches, do not hand-edit. + * + */ + +package { + // See: http://go/android-license-faq + default_applicable_licenses: ["test_license"], +} + +cc_library_shared { + name: "lib1", + vendor: true, + host_supported: true, + srcs: [ + "lib1.c", + ], + static_libs: [ + "lib2", + ], +} diff --git a/test cases/convert/1 basic/subdir1/BUILD.bazel b/test cases/convert/1 basic/subdir1/BUILD.bazel new file mode 100644 index 000000000000..8176d52ee563 --- /dev/null +++ b/test cases/convert/1 basic/subdir1/BUILD.bazel @@ -0,0 +1,24 @@ +# Copyright (C) 2025-2026 The Magma GPU Project +# SPDX-License-Identifier: Apache-2.0 +# +# Generated via: +# https://github.com/mesonbuild/meson/tree/master/mesonbuild/convert +# +# Submit patches, do not hand-edit. + +load("//bazel:meson_rules.bzl", "meson_cc_library") + +package( + default_applicable_licenses = ["//:test_license"], + default_visibility = ["//visibility:public"], +) + +meson_cc_library( + name = "lib1", + srcs =[ + "lib1.c", + ], + deps =[ + "//subdir2:lib2", + ], +) diff --git a/test cases/convert/1 basic/subdir1/lib1.c b/test cases/convert/1 basic/subdir1/lib1.c new file mode 100644 index 000000000000..9a4fad8f823f --- /dev/null +++ b/test cases/convert/1 basic/subdir1/lib1.c @@ -0,0 +1,3 @@ +#include +void lib1_func(void) { printf("lib1 +"); } diff --git a/test cases/convert/1 basic/subdir1/meson.build b/test cases/convert/1 basic/subdir1/meson.build new file mode 100644 index 000000000000..d171adb5b81c --- /dev/null +++ b/test cases/convert/1 basic/subdir1/meson.build @@ -0,0 +1 @@ +shared_library('lib1', 'lib1.c', link_with: lib2) diff --git a/test cases/convert/1 basic/subdir2/Android.bp b/test cases/convert/1 basic/subdir2/Android.bp new file mode 100644 index 000000000000..e7ee21da8213 --- /dev/null +++ b/test cases/convert/1 basic/subdir2/Android.bp @@ -0,0 +1,27 @@ +/* + * Copyright (C) 2025-2026 The Magma GPU Project + * SPDX-License-Identifier: Apache-2.0 + * + * Generated via: + * https://github.com/mesonbuild/meson/tree/master/mesonbuild/convert + * + * Submit patches, do not hand-edit. + * + */ + +package { + // See: http://go/android-license-faq + default_applicable_licenses: ["test_license"], +} + +cc_library_static { + name: "lib2", + vendor: true, + host_supported: true, + header_libs: [ + "inc", + ], + srcs: [ + "lib2.c", + ], +} diff --git a/test cases/convert/1 basic/subdir2/BUILD.bazel b/test cases/convert/1 basic/subdir2/BUILD.bazel new file mode 100644 index 000000000000..15bc4b5b561f --- /dev/null +++ b/test cases/convert/1 basic/subdir2/BUILD.bazel @@ -0,0 +1,24 @@ +# Copyright (C) 2025-2026 The Magma GPU Project +# SPDX-License-Identifier: Apache-2.0 +# +# Generated via: +# https://github.com/mesonbuild/meson/tree/master/mesonbuild/convert +# +# Submit patches, do not hand-edit. + +load("//bazel:meson_rules.bzl", "meson_cc_library") + +package( + default_applicable_licenses = ["//:test_license"], + default_visibility = ["//visibility:public"], +) + +meson_cc_library( + name = "lib2", + srcs =[ + "lib2.c", + ], + deps =[ + "//include:inc", + ], +) diff --git a/test cases/convert/1 basic/subdir2/lib2.c b/test cases/convert/1 basic/subdir2/lib2.c new file mode 100644 index 000000000000..aab646447469 --- /dev/null +++ b/test cases/convert/1 basic/subdir2/lib2.c @@ -0,0 +1 @@ +int lib2_func(void) { return 42; } diff --git a/test cases/convert/1 basic/subdir2/meson.build b/test cases/convert/1 basic/subdir2/meson.build new file mode 100644 index 000000000000..cfa38a20334a --- /dev/null +++ b/test cases/convert/1 basic/subdir2/meson.build @@ -0,0 +1 @@ +lib2 = static_library('lib2', 'lib2.c', include_directories: inc) diff --git a/test cases/convert/1 basic/subdir3/Android.bp b/test cases/convert/1 basic/subdir3/Android.bp new file mode 100644 index 000000000000..5667f0059cda --- /dev/null +++ b/test cases/convert/1 basic/subdir3/Android.bp @@ -0,0 +1,46 @@ +/* + * Copyright (C) 2025-2026 The Magma GPU Project + * SPDX-License-Identifier: Apache-2.0 + * + * Generated via: + * https://github.com/mesonbuild/meson/tree/master/mesonbuild/convert + * + * Submit patches, do not hand-edit. + * + */ + +package { + // See: http://go/android-license-faq + default_applicable_licenses: ["test_license"], +} + +python_binary_host { + name: "script_py_binary", + main: "script.py", + srcs: [ + "script.py", + ], +} + +genrule { + name: "copy-script", + out: [ + "script_copy.py", + ], + tools: [ + "cp", + "script_py_binary", + ], + cmd: "$(location cp) $(location script_py_binary) $(location script_copy.py)", +} + +genrule { + name: "gen-hdr", + out: [ + "generated.h", + ], + tools: [ + "script_py_binary", + ], + cmd: "$(location script_py_binary) $(location generated.h)", +} diff --git a/test cases/convert/1 basic/subdir3/BUILD.bazel b/test cases/convert/1 basic/subdir3/BUILD.bazel new file mode 100644 index 000000000000..283630b4b503 --- /dev/null +++ b/test cases/convert/1 basic/subdir3/BUILD.bazel @@ -0,0 +1,46 @@ +# Copyright (C) 2025-2026 The Magma GPU Project +# SPDX-License-Identifier: Apache-2.0 +# +# Generated via: +# https://github.com/mesonbuild/meson/tree/master/mesonbuild/convert +# +# Submit patches, do not hand-edit. + +load("//bazel:meson_rules.bzl", "meson_genrule") +load("@rules_python//python:py_binary.bzl", "py_binary") + +package( + default_applicable_licenses = ["//:test_license"], + default_visibility = ["//visibility:public"], +) + +py_binary( + name = "script_py_binary", + main = "//subdir3:script.py", + srcs =[ + "script.py", + ], +) + +meson_genrule( + name = "copy-script", + outs =[ + "script_copy.py", + ], + tools =[ + "//:cp", + "//subdir3:script_py_binary", + ], + cmd = "$(location //:cp) $(location //subdir3:script_py_binary) $(location script_copy.py)", +) + +meson_genrule( + name = "gen-hdr", + outs =[ + "generated.h", + ], + tools =[ + "//subdir3:script_py_binary", + ], + cmd = "$(location //subdir3:script_py_binary) $(location generated.h)", +) diff --git a/test cases/convert/1 basic/subdir3/meson.build b/test cases/convert/1 basic/subdir3/meson.build new file mode 100644 index 000000000000..45fdafc01168 --- /dev/null +++ b/test cases/convert/1 basic/subdir3/meson.build @@ -0,0 +1,16 @@ +py = find_program('python3') +cp = find_program('cp') + +custom_target('gen-hdr', + output: 'generated.h', + command: [py, files('script.py'), '@OUTPUT@'], + install: true, + install_dir: 'include' +) + +custom_target('copy-script', + output: 'script_copy.py', + command: [cp, files('script.py'), '@OUTPUT@'], + install: true, + install_dir: 'share' +) diff --git a/test cases/convert/1 basic/subdir3/script.py b/test cases/convert/1 basic/subdir3/script.py new file mode 100644 index 000000000000..b6ec2d352694 --- /dev/null +++ b/test cases/convert/1 basic/subdir3/script.py @@ -0,0 +1,4 @@ +import sys +with open(sys.argv[1], 'w') as f: + f.write('#define GENERATED 1 +') diff --git a/unittests/checktoolchaintests.py b/unittests/checktoolchaintests.py new file mode 100644 index 000000000000..21b8da79c480 --- /dev/null +++ b/unittests/checktoolchaintests.py @@ -0,0 +1,57 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2026 The Meson development team + +import subprocess +import tempfile +from pathlib import Path + +from .baseplatformtests import BasePlatformTests +from mesonbuild.cargo.toml import load_toml + + +class CheckToolchainTests(BasePlatformTests): + def test_native_check_toolchain(self) -> None: + with tempfile.TemporaryDirectory() as output_dir: + output_file = Path(output_dir) / "output.toml" + command = self.meson_command + [ + "check-toolchain", + "--output", + str(output_file), + "--name", + "test-native", + ] + + p = subprocess.run( + command, capture_output=True, encoding="utf-8", text=True + ) + if p.returncode != 0: + print("STDOUT:") + print(p.stdout) + print("STDERR:") + print(p.stderr) + + self.assertEqual(p.returncode, 0) + self.assertTrue(output_file.exists(), "Output TOML file was not generated") + + data = load_toml(str(output_file)) + self.assertIn("toolchain", data) + toolchains = data["toolchain"] + self.assertIsInstance(toolchains, list) + self.assertGreater(len(toolchains), 0) + + native = toolchains[0] + self.assertEqual(native["name"], "test-native") + self.assertIn("host_machine", native) + host = native["host_machine"] + self.assertIn("cpu_family", host) + self.assertIn("system", host) + self.assertIn("cpu", host) + self.assertIn("endian", host) + + self.assertIn("c", native) + self.assertIn("compiler_id", native["c"]) + self.assertIn("version", native["c"]) + + self.assertIn("cpp", native) + self.assertIn("compiler_id", native["cpp"]) + self.assertIn("version", native["cpp"]) diff --git a/unittests/converttests.py b/unittests/converttests.py new file mode 100644 index 000000000000..b7f98784626a --- /dev/null +++ b/unittests/converttests.py @@ -0,0 +1,91 @@ +# SPDX-License-Identifier: Apache-2.0 +# Copyright 2026 The Meson development team + +import os +import subprocess +import sys +import unittest +from pathlib import Path +import tempfile + +from .baseplatformtests import BasePlatformTests + + +@unittest.skipIf(sys.version_info < (3, 11), "convert feature requires Python 3.11 or newer") +class ConvertTests(BasePlatformTests): + def setUp(self) -> None: + super().setUp() + self.src_root = Path(__file__).resolve().parent.parent + self.convert_test_dir = self.src_root / "test cases/convert" + + def _compare_directories(self, expected_dir: Path, actual_dir: Path, filename: str): + for root, _, files in os.walk(expected_dir): + if filename in files: + rel_path = Path(root).relative_to(expected_dir) + expected_file = Path(root) / filename + actual_file = actual_dir / rel_path / filename + + self.assertTrue( + actual_file.exists(), f"Expected file {actual_file} does not exist" + ) + + expected_content = expected_file.read_text(encoding="utf-8").strip() + actual_content = actual_file.read_text(encoding="utf-8").strip() + + self.assertEqual( + actual_content, + expected_content, + f"Content mismatch in {actual_file}", + ) + + def test_soong_conversion(self): + with tempfile.TemporaryDirectory() as output_dir: + test_dir = self.convert_test_dir / "1 basic" + + command = self.meson_command + [ + "convert", + "test", + "basic_soong", + "--project-dir", + str(test_dir), + "--output-dir", + output_dir, + ] + + p = subprocess.run( + command, capture_output=True, encoding="utf-8", text=True + ) + if p.returncode != 0: + print("STDOUT:") + print(p.stdout) + print("STDERR:") + print(p.stderr) + + self.assertEqual(p.returncode, 0) + self._compare_directories(test_dir, Path(output_dir), "Android.bp") + + def test_bazel_conversion(self): + with tempfile.TemporaryDirectory() as output_dir: + test_dir = self.convert_test_dir / "1 basic" + + command = self.meson_command + [ + "convert", + "test", + "basic_bazel", + "--project-dir", + str(test_dir), + "--output-dir", + output_dir, + ] + + p = subprocess.run( + command, capture_output=True, encoding="utf-8", text=True + ) + if p.returncode != 0: + print("STDOUT:") + print(p.stdout) + print("STDERR:") + print(p.stderr) + + self.assertEqual(p.returncode, 0) + self._compare_directories(test_dir, Path(output_dir), "BUILD.bazel")