diff options
Diffstat (limited to 'meson/mesonbuild/modules')
24 files changed, 7063 insertions, 0 deletions
diff --git a/meson/mesonbuild/modules/__init__.py b/meson/mesonbuild/modules/__init__.py new file mode 100644 index 000000000..737a01c2f --- /dev/null +++ b/meson/mesonbuild/modules/__init__.py @@ -0,0 +1,212 @@ +# Copyright 2019 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This file contains the detection logic for external dependencies that +# are UI-related. + +import os +import typing as T + +from .. import build, mesonlib +from ..mesonlib import relpath, HoldableObject +from ..interpreterbase.decorators import noKwargs, noPosargs + +if T.TYPE_CHECKING: + from ..interpreter import Interpreter + from ..interpreterbase import TYPE_var, TYPE_kwargs + from ..programs import ExternalProgram + +class ModuleState: + """Object passed to all module methods. + + This is a WIP API provided to modules, it should be extended to have everything + needed so modules does not touch any other part of Meson internal APIs. + """ + + def __init__(self, interpreter: 'Interpreter') -> None: + # Keep it private, it should be accessed only through methods. + self._interpreter = interpreter + + self.source_root = interpreter.environment.get_source_dir() + self.build_to_src = relpath(interpreter.environment.get_source_dir(), + interpreter.environment.get_build_dir()) + self.subproject = interpreter.subproject + self.subdir = interpreter.subdir + self.current_lineno = interpreter.current_lineno + self.environment = interpreter.environment + self.project_name = interpreter.build.project_name + self.project_version = interpreter.build.dep_manifest[interpreter.active_projectname] + # The backend object is under-used right now, but we will need it: + # https://github.com/mesonbuild/meson/issues/1419 + self.backend = interpreter.backend + self.targets = interpreter.build.targets + self.data = interpreter.build.data + self.headers = interpreter.build.get_headers() + self.man = interpreter.build.get_man() + self.global_args = interpreter.build.global_args.host + self.project_args = interpreter.build.projects_args.host.get(interpreter.subproject, {}) + self.build_machine = interpreter.builtin['build_machine'].held_object + self.host_machine = interpreter.builtin['host_machine'].held_object + self.target_machine = interpreter.builtin['target_machine'].held_object + self.current_node = interpreter.current_node + + def get_include_args(self, include_dirs: T.Iterable[T.Union[str, build.IncludeDirs]], prefix: str = '-I') -> T.List[str]: + if not include_dirs: + return [] + + srcdir = self.environment.get_source_dir() + builddir = self.environment.get_build_dir() + + dirs_str: T.List[str] = [] + for dirs in include_dirs: + if isinstance(dirs, str): + dirs_str += [f'{prefix}{dirs}'] + continue + + # Should be build.IncludeDirs object. + basedir = dirs.get_curdir() + for d in dirs.get_incdirs(): + expdir = os.path.join(basedir, d) + srctreedir = os.path.join(srcdir, expdir) + buildtreedir = os.path.join(builddir, expdir) + dirs_str += [f'{prefix}{buildtreedir}', + f'{prefix}{srctreedir}'] + for d in dirs.get_extra_build_dirs(): + dirs_str += [f'{prefix}{d}'] + + return dirs_str + + def find_program(self, prog: T.Union[str, T.List[str]], required: bool = True, + version_func: T.Optional[T.Callable[['ExternalProgram'], str]] = None, + wanted: T.Optional[str] = None) -> 'ExternalProgram': + return self._interpreter.find_program_impl(prog, required=required, version_func=version_func, wanted=wanted) + + def test(self, args: T.Tuple[str, T.Union[build.Executable, build.Jar, 'ExternalProgram', mesonlib.File]], + workdir: T.Optional[str] = None, + env: T.Union[T.List[str], T.Dict[str, str], str] = None, + depends: T.List[T.Union[build.CustomTarget, build.BuildTarget]] = None) -> None: + kwargs = {'workdir': workdir, + 'env': env, + 'depends': depends, + } + # TODO: Use interpreter internal API, but we need to go through @typed_kwargs + self._interpreter.func_test(self.current_node, args, kwargs) + + +class ModuleObject(HoldableObject): + """Base class for all objects returned by modules + """ + def __init__(self) -> None: + self.methods: T.Dict[ + str, + T.Callable[[ModuleState, T.List['TYPE_var'], 'TYPE_kwargs'], T.Union[ModuleReturnValue, 'TYPE_var']] + ] = {} + + +class MutableModuleObject(ModuleObject): + pass + + +# FIXME: Port all modules to stop using self.interpreter and use API on +# ModuleState instead. Modules should stop using this class and instead use +# ModuleObject base class. +class ExtensionModule(ModuleObject): + def __init__(self, interpreter: 'Interpreter') -> None: + super().__init__() + self.interpreter = interpreter + self.methods.update({ + 'found': self.found_method, + }) + + @noPosargs + @noKwargs + def found_method(self, state: 'ModuleState', args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> bool: + return self.found() + + @staticmethod + def found() -> bool: + return True + + +class NewExtensionModule(ModuleObject): + + """Class for modern modules + + provides the found method. + """ + + def __init__(self) -> None: + super().__init__() + self.methods.update({ + 'found': self.found_method, + }) + + @noPosargs + @noKwargs + def found_method(self, state: 'ModuleState', args: T.List['TYPE_var'], kwargs: 'TYPE_kwargs') -> bool: + return self.found() + + @staticmethod + def found() -> bool: + return True + + +class NotFoundExtensionModule(NewExtensionModule): + + """Class for modern modules + + provides the found method. + """ + + @staticmethod + def found() -> bool: + return False + + +def is_module_library(fname): + ''' + Check if the file is a library-like file generated by a module-specific + target, such as GirTarget or TypelibTarget + ''' + if hasattr(fname, 'fname'): + fname = fname.fname + suffix = fname.split('.')[-1] + return suffix in ('gir', 'typelib') + + +class ModuleReturnValue: + def __init__(self, return_value: T.Optional['TYPE_var'], new_objects: T.List['TYPE_var']) -> None: + self.return_value = return_value + assert(isinstance(new_objects, list)) + self.new_objects = new_objects + +class GResourceTarget(build.CustomTarget): + def __init__(self, name, subdir, subproject, kwargs): + super().__init__(name, subdir, subproject, kwargs) + +class GResourceHeaderTarget(build.CustomTarget): + def __init__(self, name, subdir, subproject, kwargs): + super().__init__(name, subdir, subproject, kwargs) + +class GirTarget(build.CustomTarget): + def __init__(self, name, subdir, subproject, kwargs): + super().__init__(name, subdir, subproject, kwargs) + +class TypelibTarget(build.CustomTarget): + def __init__(self, name, subdir, subproject, kwargs): + super().__init__(name, subdir, subproject, kwargs) + +class VapiTarget(build.CustomTarget): + def __init__(self, name, subdir, subproject, kwargs): + super().__init__(name, subdir, subproject, kwargs) diff --git a/meson/mesonbuild/modules/cmake.py b/meson/mesonbuild/modules/cmake.py new file mode 100644 index 000000000..cc259dcdc --- /dev/null +++ b/meson/mesonbuild/modules/cmake.py @@ -0,0 +1,406 @@ +# Copyright 2018 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import re +import os, os.path, pathlib +import shutil +import typing as T + +from . import ExtensionModule, ModuleReturnValue, ModuleObject + +from .. import build, mesonlib, mlog, dependencies +from ..cmake import SingleTargetOptions, TargetOptions, cmake_defines_to_args +from ..interpreter import ConfigurationDataObject, SubprojectHolder +from ..interpreterbase import ( + FeatureNew, + FeatureNewKwargs, + FeatureDeprecatedKwargs, + + stringArgs, + permittedKwargs, + noPosargs, + noKwargs, + + InvalidArguments, + InterpreterException, +) +from ..programs import ExternalProgram + + +COMPATIBILITIES = ['AnyNewerVersion', 'SameMajorVersion', 'SameMinorVersion', 'ExactVersion'] + +# Taken from https://github.com/Kitware/CMake/blob/master/Modules/CMakePackageConfigHelpers.cmake +PACKAGE_INIT_BASE = ''' +####### Expanded from \\@PACKAGE_INIT\\@ by configure_package_config_file() ####### +####### Any changes to this file will be overwritten by the next CMake run #### +####### The input file was @inputFileName@ ######## + +get_filename_component(PACKAGE_PREFIX_DIR "${CMAKE_CURRENT_LIST_DIR}/@PACKAGE_RELATIVE_PATH@" ABSOLUTE) +''' +PACKAGE_INIT_EXT = ''' +# Use original install prefix when loaded through a "/usr move" +# cross-prefix symbolic link such as /lib -> /usr/lib. +get_filename_component(_realCurr "${CMAKE_CURRENT_LIST_DIR}" REALPATH) +get_filename_component(_realOrig "@absInstallDir@" REALPATH) +if(_realCurr STREQUAL _realOrig) + set(PACKAGE_PREFIX_DIR "@installPrefix@") +endif() +unset(_realOrig) +unset(_realCurr) +''' +PACKAGE_INIT_SET_AND_CHECK = ''' +macro(set_and_check _var _file) + set(${_var} "${_file}") + if(NOT EXISTS "${_file}") + message(FATAL_ERROR "File or directory ${_file} referenced by variable ${_var} does not exist !") + endif() +endmacro() + +#################################################################################### +''' + +class CMakeSubproject(ModuleObject): + def __init__(self, subp, pv): + assert(isinstance(subp, SubprojectHolder)) + assert(hasattr(subp, 'cm_interpreter')) + super().__init__() + self.subp = subp + self.methods.update({'get_variable': self.get_variable, + 'dependency': self.dependency, + 'include_directories': self.include_directories, + 'target': self.target, + 'target_type': self.target_type, + 'target_list': self.target_list, + 'found': self.found_method, + }) + + def _args_to_info(self, args): + if len(args) != 1: + raise InterpreterException('Exactly one argument is required.') + + tgt = args[0] + res = self.subp.cm_interpreter.target_info(tgt) + if res is None: + raise InterpreterException(f'The CMake target {tgt} does not exist\n' + + ' Use the following command in your meson.build to list all available targets:\n\n' + + ' message(\'CMaket targets:\\n - \' + \'\\n - \'.join(<cmake_subproject>.target_list()))') + + # Make sure that all keys are present (if not this is a bug) + assert(all([x in res for x in ['inc', 'src', 'dep', 'tgt', 'func']])) + return res + + @noKwargs + @stringArgs + def get_variable(self, state, args, kwargs): + return self.subp.get_variable_method(args, kwargs) + + @FeatureNewKwargs('dependency', '0.56.0', ['include_type']) + @permittedKwargs({'include_type'}) + @stringArgs + def dependency(self, state, args, kwargs): + info = self._args_to_info(args) + if info['func'] == 'executable': + raise InvalidArguments(f'{args[0]} is an executable and does not support the dependency() method. Use target() instead.') + orig = self.get_variable(state, [info['dep']], {}) + assert isinstance(orig, dependencies.Dependency) + actual = orig.include_type + if 'include_type' in kwargs and kwargs['include_type'] != actual: + mlog.debug('Current include type is {}. Converting to requested {}'.format(actual, kwargs['include_type'])) + return orig.generate_system_dependency(kwargs['include_type']) + return orig + + @noKwargs + @stringArgs + def include_directories(self, state, args, kwargs): + info = self._args_to_info(args) + return self.get_variable(state, [info['inc']], kwargs) + + @noKwargs + @stringArgs + def target(self, state, args, kwargs): + info = self._args_to_info(args) + return self.get_variable(state, [info['tgt']], kwargs) + + @noKwargs + @stringArgs + def target_type(self, state, args, kwargs): + info = self._args_to_info(args) + return info['func'] + + @noPosargs + @noKwargs + def target_list(self, state, args, kwargs): + return self.subp.cm_interpreter.target_list() + + @noPosargs + @noKwargs + @FeatureNew('CMakeSubproject.found()', '0.53.2') + def found_method(self, state, args, kwargs): + return self.subp is not None + + +class CMakeSubprojectOptions(ModuleObject): + def __init__(self) -> None: + super().__init__() + self.cmake_options = [] # type: T.List[str] + self.target_options = TargetOptions() + + self.methods.update( + { + 'add_cmake_defines': self.add_cmake_defines, + 'set_override_option': self.set_override_option, + 'set_install': self.set_install, + 'append_compile_args': self.append_compile_args, + 'append_link_args': self.append_link_args, + 'clear': self.clear, + } + ) + + def _get_opts(self, kwargs: dict) -> SingleTargetOptions: + if 'target' in kwargs: + return self.target_options[kwargs['target']] + return self.target_options.global_options + + @noKwargs + def add_cmake_defines(self, state, args, kwargs) -> None: + self.cmake_options += cmake_defines_to_args(args) + + @stringArgs + @permittedKwargs({'target'}) + def set_override_option(self, state, args, kwargs) -> None: + if len(args) != 2: + raise InvalidArguments('set_override_option takes exactly 2 positional arguments') + self._get_opts(kwargs).set_opt(args[0], args[1]) + + @permittedKwargs({'target'}) + def set_install(self, state, args, kwargs) -> None: + if len(args) != 1 or not isinstance(args[0], bool): + raise InvalidArguments('set_install takes exactly 1 boolean argument') + self._get_opts(kwargs).set_install(args[0]) + + @stringArgs + @permittedKwargs({'target'}) + def append_compile_args(self, state, args, kwargs) -> None: + if len(args) < 2: + raise InvalidArguments('append_compile_args takes at least 2 positional arguments') + self._get_opts(kwargs).append_args(args[0], args[1:]) + + @stringArgs + @permittedKwargs({'target'}) + def append_link_args(self, state, args, kwargs) -> None: + if not args: + raise InvalidArguments('append_link_args takes at least 1 positional argument') + self._get_opts(kwargs).append_link_args(args) + + @noPosargs + @noKwargs + def clear(self, state, args, kwargs) -> None: + self.cmake_options.clear() + self.target_options = TargetOptions() + + +class CmakeModule(ExtensionModule): + cmake_detected = False + cmake_root = None + + def __init__(self, interpreter): + super().__init__(interpreter) + self.methods.update({ + 'write_basic_package_version_file': self.write_basic_package_version_file, + 'configure_package_config_file': self.configure_package_config_file, + 'subproject': self.subproject, + 'subproject_options': self.subproject_options, + }) + + def detect_voidp_size(self, env): + compilers = env.coredata.compilers.host + compiler = compilers.get('c', None) + if not compiler: + compiler = compilers.get('cpp', None) + + if not compiler: + raise mesonlib.MesonException('Requires a C or C++ compiler to compute sizeof(void *).') + + return compiler.sizeof('void *', '', env) + + def detect_cmake(self): + if self.cmake_detected: + return True + + cmakebin = ExternalProgram('cmake', silent=False) + p, stdout, stderr = mesonlib.Popen_safe(cmakebin.get_command() + ['--system-information', '-G', 'Ninja'])[0:3] + if p.returncode != 0: + mlog.log(f'error retrieving cmake information: returnCode={p.returncode} stdout={stdout} stderr={stderr}') + return False + + match = re.search('\nCMAKE_ROOT \\"([^"]+)"\n', stdout.strip()) + if not match: + mlog.log('unable to determine cmake root') + return False + + cmakePath = pathlib.PurePath(match.group(1)) + self.cmake_root = os.path.join(*cmakePath.parts) + self.cmake_detected = True + return True + + @permittedKwargs({'version', 'name', 'compatibility', 'install_dir'}) + def write_basic_package_version_file(self, state, _args, kwargs): + version = kwargs.get('version', None) + if not isinstance(version, str): + raise mesonlib.MesonException('Version must be specified.') + + name = kwargs.get('name', None) + if not isinstance(name, str): + raise mesonlib.MesonException('Name not specified.') + + compatibility = kwargs.get('compatibility', 'AnyNewerVersion') + if not isinstance(compatibility, str): + raise mesonlib.MesonException('compatibility is not string.') + if compatibility not in COMPATIBILITIES: + raise mesonlib.MesonException('compatibility must be either AnyNewerVersion, SameMajorVersion or ExactVersion.') + + if not self.detect_cmake(): + raise mesonlib.MesonException('Unable to find cmake') + + pkgroot = kwargs.get('install_dir', None) + if pkgroot is None: + pkgroot = os.path.join(state.environment.coredata.get_option(mesonlib.OptionKey('libdir')), 'cmake', name) + if not isinstance(pkgroot, str): + raise mesonlib.MesonException('Install_dir must be a string.') + + template_file = os.path.join(self.cmake_root, 'Modules', f'BasicConfigVersion-{compatibility}.cmake.in') + if not os.path.exists(template_file): + raise mesonlib.MesonException(f'your cmake installation doesn\'t support the {compatibility} compatibility') + + version_file = os.path.join(state.environment.scratch_dir, f'{name}ConfigVersion.cmake') + + conf = { + 'CVF_VERSION': (version, ''), + 'CMAKE_SIZEOF_VOID_P': (str(self.detect_voidp_size(state.environment)), '') + } + mesonlib.do_conf_file(template_file, version_file, conf, 'meson') + + res = build.Data([mesonlib.File(True, state.environment.get_scratch_dir(), version_file)], pkgroot, None, state.subproject) + return ModuleReturnValue(res, [res]) + + def create_package_file(self, infile, outfile, PACKAGE_RELATIVE_PATH, extra, confdata): + package_init = PACKAGE_INIT_BASE.replace('@PACKAGE_RELATIVE_PATH@', PACKAGE_RELATIVE_PATH) + package_init = package_init.replace('@inputFileName@', infile) + package_init += extra + package_init += PACKAGE_INIT_SET_AND_CHECK + + try: + with open(infile, encoding='utf-8') as fin: + data = fin.readlines() + except Exception as e: + raise mesonlib.MesonException('Could not read input file {}: {}'.format(infile, str(e))) + + result = [] + regex = re.compile(r'(?:\\\\)+(?=\\?@)|\\@|@([-a-zA-Z0-9_]+)@') + for line in data: + line = line.replace('@PACKAGE_INIT@', package_init) + line, _missing = mesonlib.do_replacement(regex, line, 'meson', confdata) + + result.append(line) + + outfile_tmp = outfile + "~" + with open(outfile_tmp, "w", encoding='utf-8') as fout: + fout.writelines(result) + + shutil.copymode(infile, outfile_tmp) + mesonlib.replace_if_different(outfile, outfile_tmp) + + @permittedKwargs({'input', 'name', 'install_dir', 'configuration'}) + def configure_package_config_file(self, state, args, kwargs): + if args: + raise mesonlib.MesonException('configure_package_config_file takes only keyword arguments.') + + if 'input' not in kwargs: + raise mesonlib.MesonException('configure_package_config_file requires "input" keyword.') + inputfile = kwargs['input'] + if isinstance(inputfile, list): + if len(inputfile) != 1: + m = "Keyword argument 'input' requires exactly one file" + raise mesonlib.MesonException(m) + inputfile = inputfile[0] + if not isinstance(inputfile, (str, mesonlib.File)): + raise mesonlib.MesonException("input must be a string or a file") + if isinstance(inputfile, str): + inputfile = mesonlib.File.from_source_file(state.environment.source_dir, state.subdir, inputfile) + + ifile_abs = inputfile.absolute_path(state.environment.source_dir, state.environment.build_dir) + + if 'name' not in kwargs: + raise mesonlib.MesonException('"name" not specified.') + name = kwargs['name'] + + (ofile_path, ofile_fname) = os.path.split(os.path.join(state.subdir, f'{name}Config.cmake')) + ofile_abs = os.path.join(state.environment.build_dir, ofile_path, ofile_fname) + + install_dir = kwargs.get('install_dir', os.path.join(state.environment.coredata.get_option(mesonlib.OptionKey('libdir')), 'cmake', name)) + if not isinstance(install_dir, str): + raise mesonlib.MesonException('"install_dir" must be a string.') + + if 'configuration' not in kwargs: + raise mesonlib.MesonException('"configuration" not specified.') + conf = kwargs['configuration'] + if not isinstance(conf, ConfigurationDataObject): + raise mesonlib.MesonException('Argument "configuration" is not of type configuration_data') + + prefix = state.environment.coredata.get_option(mesonlib.OptionKey('prefix')) + abs_install_dir = install_dir + if not os.path.isabs(abs_install_dir): + abs_install_dir = os.path.join(prefix, install_dir) + + PACKAGE_RELATIVE_PATH = os.path.relpath(prefix, abs_install_dir) + extra = '' + if re.match('^(/usr)?/lib(64)?/.+', abs_install_dir): + extra = PACKAGE_INIT_EXT.replace('@absInstallDir@', abs_install_dir) + extra = extra.replace('@installPrefix@', prefix) + + self.create_package_file(ifile_abs, ofile_abs, PACKAGE_RELATIVE_PATH, extra, conf.conf_data) + conf.mark_used() + + conffile = os.path.normpath(inputfile.relative_name()) + if conffile not in self.interpreter.build_def_files: + self.interpreter.build_def_files.append(conffile) + + res = build.Data([mesonlib.File(True, ofile_path, ofile_fname)], install_dir, None, state.subproject) + self.interpreter.build.data.append(res) + + return res + + @FeatureNew('subproject', '0.51.0') + @FeatureNewKwargs('subproject', '0.55.0', ['options']) + @FeatureDeprecatedKwargs('subproject', '0.55.0', ['cmake_options']) + @permittedKwargs({'cmake_options', 'required', 'options'}) + @stringArgs + def subproject(self, state, args, kwargs): + if len(args) != 1: + raise InterpreterException('Subproject takes exactly one argument') + if 'cmake_options' in kwargs and 'options' in kwargs: + raise InterpreterException('"options" cannot be used together with "cmake_options"') + dirname = args[0] + subp = self.interpreter.do_subproject(dirname, 'cmake', kwargs) + if not subp.found(): + return subp + return CMakeSubproject(subp, dirname) + + @FeatureNew('subproject_options', '0.55.0') + @noKwargs + @noPosargs + def subproject_options(self, state, args, kwargs) -> CMakeSubprojectOptions: + return CMakeSubprojectOptions() + +def initialize(*args, **kwargs): + return CmakeModule(*args, **kwargs) diff --git a/meson/mesonbuild/modules/dlang.py b/meson/mesonbuild/modules/dlang.py new file mode 100644 index 000000000..60d28854e --- /dev/null +++ b/meson/mesonbuild/modules/dlang.py @@ -0,0 +1,135 @@ +# Copyright 2018 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This file contains the detection logic for external dependencies that +# are UI-related. + +import json +import os + +from . import ExtensionModule +from .. import dependencies +from .. import mlog +from ..mesonlib import Popen_safe, MesonException +from ..programs import ExternalProgram + +class DlangModule(ExtensionModule): + class_dubbin = None + init_dub = False + + def __init__(self, interpreter): + super().__init__(interpreter) + self.methods.update({ + 'generate_dub_file': self.generate_dub_file, + }) + + def _init_dub(self): + if DlangModule.class_dubbin is None: + self.dubbin = dependencies.DubDependency.class_dubbin + DlangModule.class_dubbin = self.dubbin + else: + self.dubbin = DlangModule.class_dubbin + + if DlangModule.class_dubbin is None: + self.dubbin = self.check_dub() + DlangModule.class_dubbin = self.dubbin + else: + self.dubbin = DlangModule.class_dubbin + + if not self.dubbin: + if not self.dubbin: + raise MesonException('DUB not found.') + + def generate_dub_file(self, state, args, kwargs): + if not DlangModule.init_dub: + self._init_dub() + + if len(args) < 2: + raise MesonException('Missing arguments') + + config = { + 'name': args[0] + } + + config_path = os.path.join(args[1], 'dub.json') + if os.path.exists(config_path): + with open(config_path, encoding='utf-8') as ofile: + try: + config = json.load(ofile) + except ValueError: + mlog.warning('Failed to load the data in dub.json') + + warn_publishing = ['description', 'license'] + for arg in warn_publishing: + if arg not in kwargs and \ + arg not in config: + mlog.warning('Without', mlog.bold(arg), 'the DUB package can\'t be published') + + for key, value in kwargs.items(): + if key == 'dependencies': + config[key] = {} + if isinstance(value, list): + for dep in value: + if isinstance(dep, dependencies.Dependency): + name = dep.get_name() + ret, res = self._call_dubbin(['describe', name]) + if ret == 0: + version = dep.get_version() + if version is None: + config[key][name] = '' + else: + config[key][name] = version + elif isinstance(value, dependencies.Dependency): + name = value.get_name() + ret, res = self._call_dubbin(['describe', name]) + if ret == 0: + version = value.get_version() + if version is None: + config[key][name] = '' + else: + config[key][name] = version + else: + config[key] = value + + with open(config_path, 'w', encoding='utf-8') as ofile: + ofile.write(json.dumps(config, indent=4, ensure_ascii=False)) + + def _call_dubbin(self, args, env=None): + p, out = Popen_safe(self.dubbin.get_command() + args, env=env)[0:2] + return p.returncode, out.strip() + + def check_dub(self): + dubbin = ExternalProgram('dub', silent=True) + if dubbin.found(): + try: + p, out = Popen_safe(dubbin.get_command() + ['--version'])[0:2] + if p.returncode != 0: + mlog.warning('Found dub {!r} but couldn\'t run it' + ''.format(' '.join(dubbin.get_command()))) + # Set to False instead of None to signify that we've already + # searched for it and not found it + dubbin = False + except (FileNotFoundError, PermissionError): + dubbin = False + else: + dubbin = False + if dubbin: + mlog.log('Found DUB:', mlog.bold(dubbin.get_path()), + '(%s)' % out.strip()) + else: + mlog.log('Found DUB:', mlog.red('NO')) + return dubbin + +def initialize(*args, **kwargs): + return DlangModule(*args, **kwargs) diff --git a/meson/mesonbuild/modules/fs.py b/meson/mesonbuild/modules/fs.py new file mode 100644 index 000000000..ab3aae2b1 --- /dev/null +++ b/meson/mesonbuild/modules/fs.py @@ -0,0 +1,258 @@ +# Copyright 2019 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import typing as T +import hashlib +import os +from pathlib import Path, PurePath, PureWindowsPath + +from .. import mlog +from . import ExtensionModule +from ..mesonlib import ( + File, + FileOrString, + MesonException, + path_is_in_root, +) +from ..interpreterbase import FeatureNew, KwargInfo, typed_kwargs, typed_pos_args, noKwargs + +if T.TYPE_CHECKING: + from . import ModuleState + from ..interpreter import Interpreter + + from typing_extensions import TypedDict + + class ReadKwArgs(TypedDict): + """Keyword Arguments for fs.read.""" + + encoding: str + + +class FSModule(ExtensionModule): + + def __init__(self, interpreter: 'Interpreter') -> None: + super().__init__(interpreter) + self.methods.update({ + 'expanduser': self.expanduser, + 'is_absolute': self.is_absolute, + 'as_posix': self.as_posix, + 'exists': self.exists, + 'is_symlink': self.is_symlink, + 'is_file': self.is_file, + 'is_dir': self.is_dir, + 'hash': self.hash, + 'size': self.size, + 'is_samepath': self.is_samepath, + 'replace_suffix': self.replace_suffix, + 'parent': self.parent, + 'name': self.name, + 'stem': self.stem, + 'read': self.read, + }) + + def _absolute_dir(self, state: 'ModuleState', arg: 'FileOrString') -> Path: + """ + make an absolute path from a relative path, WITHOUT resolving symlinks + """ + if isinstance(arg, File): + return Path(arg.absolute_path(state.source_root, self.interpreter.environment.get_build_dir())) + return Path(state.source_root) / Path(state.subdir) / Path(arg).expanduser() + + def _resolve_dir(self, state: 'ModuleState', arg: 'FileOrString') -> Path: + """ + resolves symlinks and makes absolute a directory relative to calling meson.build, + if not already absolute + """ + path = self._absolute_dir(state, arg) + try: + # accommodate unresolvable paths e.g. symlink loops + path = path.resolve() + except Exception: + # return the best we could do + pass + return path + + @noKwargs + @FeatureNew('fs.expanduser', '0.54.0') + @typed_pos_args('fs.expanduser', str) + def expanduser(self, state: 'ModuleState', args: T.Tuple[str], kwargs: T.Dict[str, T.Any]) -> str: + return str(Path(args[0]).expanduser()) + + @noKwargs + @FeatureNew('fs.is_absolute', '0.54.0') + @typed_pos_args('fs.is_absolute', (str, File)) + def is_absolute(self, state: 'ModuleState', args: T.Tuple['FileOrString'], kwargs: T.Dict[str, T.Any]) -> bool: + if isinstance(args[0], File): + FeatureNew('fs.is_absolute_file', '0.59.0').use(state.subproject) + return PurePath(str(args[0])).is_absolute() + + @noKwargs + @FeatureNew('fs.as_posix', '0.54.0') + @typed_pos_args('fs.as_posix', str) + def as_posix(self, state: 'ModuleState', args: T.Tuple[str], kwargs: T.Dict[str, T.Any]) -> str: + """ + this function assumes you are passing a Windows path, even if on a Unix-like system + and so ALL '\' are turned to '/', even if you meant to escape a character + """ + return PureWindowsPath(args[0]).as_posix() + + @noKwargs + @typed_pos_args('fs.exists', str) + def exists(self, state: 'ModuleState', args: T.Tuple[str], kwargs: T.Dict[str, T.Any]) -> bool: + return self._resolve_dir(state, args[0]).exists() + + @noKwargs + @typed_pos_args('fs.is_symlink', (str, File)) + def is_symlink(self, state: 'ModuleState', args: T.Tuple['FileOrString'], kwargs: T.Dict[str, T.Any]) -> bool: + if isinstance(args[0], File): + FeatureNew('fs.is_symlink_file', '0.59.0').use(state.subproject) + return self._absolute_dir(state, args[0]).is_symlink() + + @noKwargs + @typed_pos_args('fs.is_file', str) + def is_file(self, state: 'ModuleState', args: T.Tuple[str], kwargs: T.Dict[str, T.Any]) -> bool: + return self._resolve_dir(state, args[0]).is_file() + + @noKwargs + @typed_pos_args('fs.is_dir', str) + def is_dir(self, state: 'ModuleState', args: T.Tuple[str], kwargs: T.Dict[str, T.Any]) -> bool: + return self._resolve_dir(state, args[0]).is_dir() + + @noKwargs + @typed_pos_args('fs.hash', (str, File), str) + def hash(self, state: 'ModuleState', args: T.Tuple['FileOrString', str], kwargs: T.Dict[str, T.Any]) -> str: + if isinstance(args[0], File): + FeatureNew('fs.hash_file', '0.59.0').use(state.subproject) + file = self._resolve_dir(state, args[0]) + if not file.is_file(): + raise MesonException(f'{file} is not a file and therefore cannot be hashed') + try: + h = hashlib.new(args[1]) + except ValueError: + raise MesonException('hash algorithm {} is not available'.format(args[1])) + mlog.debug('computing {} sum of {} size {} bytes'.format(args[1], file, file.stat().st_size)) + h.update(file.read_bytes()) + return h.hexdigest() + + @noKwargs + @typed_pos_args('fs.size', (str, File)) + def size(self, state: 'ModuleState', args: T.Tuple['FileOrString'], kwargs: T.Dict[str, T.Any]) -> int: + if isinstance(args[0], File): + FeatureNew('fs.size_file', '0.59.0').use(state.subproject) + file = self._resolve_dir(state, args[0]) + if not file.is_file(): + raise MesonException(f'{file} is not a file and therefore cannot be sized') + try: + return file.stat().st_size + except ValueError: + raise MesonException('{} size could not be determined'.format(args[0])) + + @noKwargs + @typed_pos_args('fs.is_samepath', (str, File), (str, File)) + def is_samepath(self, state: 'ModuleState', args: T.Tuple['FileOrString', 'FileOrString'], kwargs: T.Dict[str, T.Any]) -> bool: + if isinstance(args[0], File) or isinstance(args[1], File): + FeatureNew('fs.is_samepath_file', '0.59.0').use(state.subproject) + file1 = self._resolve_dir(state, args[0]) + file2 = self._resolve_dir(state, args[1]) + if not file1.exists(): + return False + if not file2.exists(): + return False + try: + return file1.samefile(file2) + except OSError: + return False + + @noKwargs + @typed_pos_args('fs.replace_suffix', (str, File), str) + def replace_suffix(self, state: 'ModuleState', args: T.Tuple['FileOrString', str], kwargs: T.Dict[str, T.Any]) -> str: + if isinstance(args[0], File): + FeatureNew('fs.replace_suffix_file', '0.59.0').use(state.subproject) + original = PurePath(str(args[0])) + new = original.with_suffix(args[1]) + return str(new) + + @noKwargs + @typed_pos_args('fs.parent', (str, File)) + def parent(self, state: 'ModuleState', args: T.Tuple['FileOrString'], kwargs: T.Dict[str, T.Any]) -> str: + if isinstance(args[0], File): + FeatureNew('fs.parent_file', '0.59.0').use(state.subproject) + original = PurePath(str(args[0])) + new = original.parent + return str(new) + + @noKwargs + @typed_pos_args('fs.name', (str, File)) + def name(self, state: 'ModuleState', args: T.Tuple['FileOrString'], kwargs: T.Dict[str, T.Any]) -> str: + if isinstance(args[0], File): + FeatureNew('fs.name_file', '0.59.0').use(state.subproject) + original = PurePath(str(args[0])) + new = original.name + return str(new) + + @noKwargs + @typed_pos_args('fs.stem', (str, File)) + @FeatureNew('fs.stem', '0.54.0') + def stem(self, state: 'ModuleState', args: T.Tuple['FileOrString'], kwargs: T.Dict[str, T.Any]) -> str: + if isinstance(args[0], File): + FeatureNew('fs.stem_file', '0.59.0').use(state.subproject) + original = PurePath(str(args[0])) + new = original.stem + return str(new) + + @FeatureNew('fs.read', '0.57.0') + @typed_pos_args('fs.read', (str, File)) + @typed_kwargs('fs.read', KwargInfo('encoding', str, default='utf-8')) + def read(self, state: 'ModuleState', args: T.Tuple['FileOrString'], kwargs: 'ReadKwArgs') -> str: + """Read a file from the source tree and return its value as a decoded + string. + + If the encoding is not specified, the file is assumed to be utf-8 + encoded. Paths must be relative by default (to prevent accidents) and + are forbidden to be read from the build directory (to prevent build + loops) + """ + path = args[0] + encoding = kwargs['encoding'] + src_dir = self.interpreter.environment.source_dir + sub_dir = self.interpreter.subdir + build_dir = self.interpreter.environment.get_build_dir() + + if isinstance(path, File): + if path.is_built: + raise MesonException( + 'fs.read_file does not accept built files() objects') + path = os.path.join(src_dir, path.relative_name()) + else: + if sub_dir: + src_dir = os.path.join(src_dir, sub_dir) + path = os.path.join(src_dir, path) + + path = os.path.abspath(path) + if path_is_in_root(Path(path), Path(build_dir), resolve=True): + raise MesonException('path must not be in the build tree') + try: + with open(path, encoding=encoding) as f: + data = f.read() + except UnicodeDecodeError: + raise MesonException(f'decoding failed for {path}') + # Reconfigure when this file changes as it can contain data used by any + # part of the build configuration (e.g. `project(..., version: + # fs.read_file('VERSION')` or `configure_file(...)` + self.interpreter.add_build_def_file(path) + return data + + +def initialize(*args: T.Any, **kwargs: T.Any) -> FSModule: + return FSModule(*args, **kwargs) diff --git a/meson/mesonbuild/modules/gnome.py b/meson/mesonbuild/modules/gnome.py new file mode 100644 index 000000000..881e4240e --- /dev/null +++ b/meson/mesonbuild/modules/gnome.py @@ -0,0 +1,1812 @@ +# Copyright 2015-2016 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +'''This module provides helper functions for Gnome/GLib related +functionality such as gobject-introspection, gresources and gtk-doc''' + +import os +import copy +import subprocess +import functools +import typing as T + +from .. import build +from .. import mlog +from .. import mesonlib +from .. import interpreter +from . import GResourceTarget, GResourceHeaderTarget, GirTarget, TypelibTarget, VapiTarget +from . import ExtensionModule +from . import ModuleReturnValue +from ..mesonlib import ( + MachineChoice, MesonException, OrderedSet, Popen_safe, extract_as_list, + join_args, HoldableObject +) +from ..dependencies import Dependency, PkgConfigDependency, InternalDependency +from ..interpreterbase import noPosargs, noKwargs, permittedKwargs, FeatureNew, FeatureNewKwargs, FeatureDeprecatedKwargs +from ..interpreterbase import typed_kwargs, KwargInfo, ContainerTypeInfo +from ..programs import ExternalProgram, OverrideProgram +from ..build import CustomTarget, CustomTargetIndex, GeneratedList + +if T.TYPE_CHECKING: + from ..compilers import Compiler + from ..interpreter import Interpreter + +# gresource compilation is broken due to the way +# the resource compiler and Ninja clash about it +# +# https://github.com/ninja-build/ninja/issues/1184 +# https://bugzilla.gnome.org/show_bug.cgi?id=774368 +gresource_dep_needed_version = '>= 2.51.1' + +native_glib_version = None + +class GnomeModule(ExtensionModule): + def __init__(self, interpreter: 'Interpreter') -> None: + super().__init__(interpreter) + self.gir_dep = None + self.install_glib_compile_schemas = False + self.install_gio_querymodules = [] + self.install_gtk_update_icon_cache = False + self.install_update_desktop_database = False + self.devenv = None + self.methods.update({ + 'post_install': self.post_install, + 'compile_resources': self.compile_resources, + 'generate_gir': self.generate_gir, + 'compile_schemas': self.compile_schemas, + 'yelp': self.yelp, + 'gtkdoc': self.gtkdoc, + 'gtkdoc_html_dir': self.gtkdoc_html_dir, + 'gdbus_codegen': self.gdbus_codegen, + 'mkenums': self.mkenums, + 'mkenums_simple': self.mkenums_simple, + 'genmarshal': self.genmarshal, + 'generate_vapi': self.generate_vapi, + }) + + @staticmethod + def _get_native_glib_version(state): + global native_glib_version + if native_glib_version is None: + glib_dep = PkgConfigDependency('glib-2.0', state.environment, + {'native': True, 'required': False}) + if glib_dep.found(): + native_glib_version = glib_dep.get_version() + else: + mlog.warning('Could not detect glib version, assuming 2.54. ' + 'You may get build errors if your glib is older.') + native_glib_version = '2.54' + return native_glib_version + + @mesonlib.run_once + def __print_gresources_warning(self, state): + if not mesonlib.version_compare(self._get_native_glib_version(state), + gresource_dep_needed_version): + mlog.warning('GLib compiled dependencies do not work reliably with \n' + 'the current version of GLib. See the following upstream issue:', + mlog.bold('https://bugzilla.gnome.org/show_bug.cgi?id=774368')) + + @staticmethod + def _print_gdbus_warning(): + mlog.warning('Code generated with gdbus_codegen() requires the root directory be added to\n' + ' include_directories of targets with GLib < 2.51.3:', + mlog.bold('https://github.com/mesonbuild/meson/issues/1387'), + once=True) + + def _get_dep(self, state, depname, native=False, required=True): + kwargs = {'native': native, 'required': required} + return self.interpreter.func_dependency(state.current_node, [depname], kwargs) + + def _get_native_binary(self, state, name, depname, varname, required=True): + # Look in overrides in case glib/gtk/etc are built as subproject + prog = self.interpreter.program_from_overrides([name], []) + if prog is not None: + return prog + + # Look in machine file + prog = state.environment.lookup_binary_entry(MachineChoice.HOST, name) + if prog is not None: + return ExternalProgram.from_entry(name, prog) + + # Check if pkgconfig has a variable + dep = self._get_dep(state, depname, native=True, required=False) + if dep.found() and dep.type_name == 'pkgconfig': + value = dep.get_pkgconfig_variable(varname, {}) + if value: + return ExternalProgram(name, value) + + # Normal program lookup + return state.find_program(name, required=required) + + @typed_kwargs('gnome.post_install', + KwargInfo('glib_compile_schemas', bool, default=False), + KwargInfo('gio_querymodules', ContainerTypeInfo(list, str), default=[], listify=True), + KwargInfo('gtk_update_icon_cache', bool, default=False), + KwargInfo('update_desktop_database', bool, default=False, since='0.59.0'), + ) + @noPosargs + @FeatureNew('gnome.post_install', '0.57.0') + def post_install(self, state, args, kwargs): + rv = [] + datadir_abs = os.path.join(state.environment.get_prefix(), state.environment.get_datadir()) + if kwargs['glib_compile_schemas'] and not self.install_glib_compile_schemas: + self.install_glib_compile_schemas = True + prog = self._get_native_binary(state, 'glib-compile-schemas', 'gio-2.0', 'glib_compile_schemas') + schemasdir = os.path.join(datadir_abs, 'glib-2.0', 'schemas') + script = state.backend.get_executable_serialisation([prog, schemasdir]) + script.skip_if_destdir = True + rv.append(script) + for d in kwargs['gio_querymodules']: + if d not in self.install_gio_querymodules: + self.install_gio_querymodules.append(d) + prog = self._get_native_binary(state, 'gio-querymodules', 'gio-2.0', 'gio_querymodules') + moduledir = os.path.join(state.environment.get_prefix(), d) + script = state.backend.get_executable_serialisation([prog, moduledir]) + script.skip_if_destdir = True + rv.append(script) + if kwargs['gtk_update_icon_cache'] and not self.install_gtk_update_icon_cache: + self.install_gtk_update_icon_cache = True + prog = self._get_native_binary(state, 'gtk4-update-icon-cache', 'gtk-4.0', 'gtk4_update_icon_cache', required=False) + found = isinstance(prog, build.Executable) or prog.found() + if not found: + prog = self._get_native_binary(state, 'gtk-update-icon-cache', 'gtk+-3.0', 'gtk_update_icon_cache') + icondir = os.path.join(datadir_abs, 'icons', 'hicolor') + script = state.backend.get_executable_serialisation([prog, '-q', '-t' ,'-f', icondir]) + script.skip_if_destdir = True + rv.append(script) + if kwargs['update_desktop_database'] and not self.install_update_desktop_database: + self.install_update_desktop_database = True + prog = self._get_native_binary(state, 'update-desktop-database', 'desktop-file-utils', 'update_desktop_database') + appdir = os.path.join(datadir_abs, 'applications') + script = state.backend.get_executable_serialisation([prog, '-q', appdir]) + script.skip_if_destdir = True + rv.append(script) + return ModuleReturnValue(None, rv) + + @FeatureNewKwargs('gnome.compile_resources', '0.37.0', ['gresource_bundle', 'export', 'install_header']) + @permittedKwargs({'source_dir', 'c_name', 'dependencies', 'export', 'gresource_bundle', 'install_header', + 'install', 'install_dir', 'extra_args', 'build_by_default'}) + def compile_resources(self, state, args, kwargs): + self.__print_gresources_warning(state) + glib_version = self._get_native_glib_version(state) + + glib_compile_resources = state.find_program('glib-compile-resources') + cmd = [glib_compile_resources, '@INPUT@'] + + source_dirs, dependencies = [mesonlib.extract_as_list(kwargs, c, pop=True) for c in ['source_dir', 'dependencies']] + + if len(args) < 2: + raise MesonException('Not enough arguments; the name of the resource ' + 'and the path to the XML file are required') + + # Validate dependencies + subdirs = [] + depends = [] + for (ii, dep) in enumerate(dependencies): + if isinstance(dep, mesonlib.File): + subdirs.append(dep.subdir) + elif isinstance(dep, (build.CustomTarget, build.CustomTargetIndex)): + depends.append(dep) + subdirs.append(dep.get_subdir()) + if not mesonlib.version_compare(glib_version, gresource_dep_needed_version): + m = 'The "dependencies" argument of gnome.compile_resources() can not\n' \ + 'be used with the current version of glib-compile-resources due to\n' \ + '<https://bugzilla.gnome.org/show_bug.cgi?id=774368>' + raise MesonException(m) + else: + m = 'Unexpected dependency type {!r} for gnome.compile_resources() ' \ + '"dependencies" argument.\nPlease pass the return value of ' \ + 'custom_target() or configure_file()' + raise MesonException(m.format(dep)) + + if not mesonlib.version_compare(glib_version, gresource_dep_needed_version): + ifile = args[1] + if isinstance(ifile, mesonlib.File): + # glib-compile-resources will be run inside the source dir, + # so we need either 'src_to_build' or the absolute path. + # Absolute path is the easiest choice. + if ifile.is_built: + ifile = os.path.join(state.environment.get_build_dir(), ifile.subdir, ifile.fname) + else: + ifile = os.path.join(ifile.subdir, ifile.fname) + elif isinstance(ifile, str): + ifile = os.path.join(state.subdir, ifile) + elif isinstance(ifile, (build.CustomTarget, + build.CustomTargetIndex, + build.GeneratedList)): + m = 'Resource xml files generated at build-time cannot be used ' \ + 'with gnome.compile_resources() because we need to scan ' \ + 'the xml for dependencies. Use configure_file() instead ' \ + 'to generate it at configure-time.' + raise MesonException(m) + else: + raise MesonException(f'Invalid file argument: {ifile!r}') + depend_files, depends, subdirs = self._get_gresource_dependencies( + state, ifile, source_dirs, dependencies) + + # Make source dirs relative to build dir now + source_dirs = [os.path.join(state.build_to_src, state.subdir, d) for d in source_dirs] + # Ensure build directories of generated deps are included + source_dirs += subdirs + # Always include current directory, but after paths set by user + source_dirs.append(os.path.join(state.build_to_src, state.subdir)) + + for source_dir in OrderedSet(source_dirs): + cmd += ['--sourcedir', source_dir] + + if 'c_name' in kwargs: + cmd += ['--c-name', kwargs.pop('c_name')] + export = kwargs.pop('export', False) + if not export: + cmd += ['--internal'] + + cmd += ['--generate', '--target', '@OUTPUT@'] + + cmd += mesonlib.stringlistify(kwargs.pop('extra_args', [])) + + gresource = kwargs.pop('gresource_bundle', False) + if gresource: + output = args[0] + '.gresource' + name = args[0] + '_gresource' + else: + if 'c' in state.environment.coredata.compilers.host.keys(): + output = args[0] + '.c' + name = args[0] + '_c' + elif 'cpp' in state.environment.coredata.compilers.host.keys(): + output = args[0] + '.cpp' + name = args[0] + '_cpp' + else: + raise MesonException('Compiling GResources into code is only supported in C and C++ projects') + + if kwargs.get('install', False) and not gresource: + raise MesonException('The install kwarg only applies to gresource bundles, see install_header') + + install_header = kwargs.pop('install_header', False) + if install_header and gresource: + raise MesonException('The install_header kwarg does not apply to gresource bundles') + if install_header and not export: + raise MesonException('GResource header is installed yet export is not enabled') + + kwargs['input'] = args[1] + kwargs['output'] = output + kwargs['depends'] = depends + if not mesonlib.version_compare(glib_version, gresource_dep_needed_version): + # This will eventually go out of sync if dependencies are added + kwargs['depend_files'] = depend_files + kwargs['command'] = cmd + else: + depfile = f'{output}.d' + kwargs['depfile'] = depfile + kwargs['command'] = copy.copy(cmd) + ['--dependency-file', '@DEPFILE@'] + target_c = GResourceTarget(name, state.subdir, state.subproject, kwargs) + + if gresource: # Only one target for .gresource files + return ModuleReturnValue(target_c, [target_c]) + + h_kwargs = { + 'command': cmd, + 'input': args[1], + 'output': args[0] + '.h', + # The header doesn't actually care about the files yet it errors if missing + 'depends': depends + } + if 'build_by_default' in kwargs: + h_kwargs['build_by_default'] = kwargs['build_by_default'] + if install_header: + h_kwargs['install'] = install_header + h_kwargs['install_dir'] = kwargs.get('install_dir', + state.environment.coredata.get_option(mesonlib.OptionKey('includedir'))) + target_h = GResourceHeaderTarget(args[0] + '_h', state.subdir, state.subproject, h_kwargs) + rv = [target_c, target_h] + return ModuleReturnValue(rv, rv) + + def _get_gresource_dependencies(self, state, input_file, source_dirs, dependencies): + + cmd = ['glib-compile-resources', + input_file, + '--generate-dependencies'] + + # Prefer generated files over source files + cmd += ['--sourcedir', state.subdir] # Current build dir + for source_dir in source_dirs: + cmd += ['--sourcedir', os.path.join(state.subdir, source_dir)] + + try: + pc, stdout, stderr = Popen_safe(cmd, cwd=state.environment.get_source_dir()) + except (FileNotFoundError, PermissionError): + raise MesonException('Could not execute glib-compile-resources.') + if pc.returncode != 0: + m = 'glib-compile-resources failed to get dependencies for {}:\n{}' + mlog.warning(m.format(cmd[1], stderr)) + raise subprocess.CalledProcessError(pc.returncode, cmd) + + dep_files = stdout.split('\n')[:-1] + + depends = [] + subdirs = [] + for resfile in dep_files[:]: + resbasename = os.path.basename(resfile) + for dep in dependencies: + if isinstance(dep, mesonlib.File): + if dep.fname != resbasename: + continue + dep_files.remove(resfile) + dep_files.append(dep) + subdirs.append(dep.subdir) + break + elif isinstance(dep, (build.CustomTarget, build.CustomTargetIndex)): + fname = None + outputs = {(o, os.path.basename(o)) for o in dep.get_outputs()} + for o, baseo in outputs: + if baseo == resbasename: + fname = o + break + if fname is not None: + dep_files.remove(resfile) + depends.append(dep) + subdirs.append(dep.get_subdir()) + break + else: + # In generate-dependencies mode, glib-compile-resources doesn't raise + # an error for missing resources but instead prints whatever filename + # was listed in the input file. That's good because it means we can + # handle resource files that get generated as part of the build, as + # follows. + # + # If there are multiple generated resource files with the same basename + # then this code will get confused. + try: + f = mesonlib.File.from_source_file(state.environment.get_source_dir(), + ".", resfile) + except MesonException: + raise MesonException( + 'Resource "%s" listed in "%s" was not found. If this is a ' + 'generated file, pass the target that generates it to ' + 'gnome.compile_resources() using the "dependencies" ' + 'keyword argument.' % (resfile, input_file)) + dep_files.remove(resfile) + dep_files.append(f) + return dep_files, depends, subdirs + + def _get_link_args(self, state, lib, depends, include_rpath=False, + use_gir_args=False): + link_command = [] + # Construct link args + if isinstance(lib, build.SharedLibrary): + libdir = os.path.join(state.environment.get_build_dir(), state.backend.get_target_dir(lib)) + link_command.append('-L' + libdir) + if include_rpath: + link_command.append('-Wl,-rpath,' + libdir) + depends.append(lib) + # Needed for the following binutils bug: + # https://github.com/mesonbuild/meson/issues/1911 + # However, g-ir-scanner does not understand -Wl,-rpath + # so we need to use -L instead + for d in state.backend.determine_rpath_dirs(lib): + d = os.path.join(state.environment.get_build_dir(), d) + link_command.append('-L' + d) + if include_rpath: + link_command.append('-Wl,-rpath,' + d) + if use_gir_args and self._gir_has_option('--extra-library'): + link_command.append('--extra-library=' + lib.name) + else: + link_command.append('-l' + lib.name) + return link_command + + def _get_dependencies_flags(self, deps, state, depends, include_rpath=False, + use_gir_args=False, separate_nodedup=False): + cflags = OrderedSet() + internal_ldflags = OrderedSet() + external_ldflags = OrderedSet() + # External linker flags that can't be de-duped reliably because they + # require two args in order, such as -framework AVFoundation + external_ldflags_nodedup = [] + gi_includes = OrderedSet() + deps = mesonlib.listify(deps) + + for dep in deps: + if isinstance(dep, Dependency): + girdir = dep.get_variable(pkgconfig='girdir', internal='girdir', default_value='') + if girdir: + gi_includes.update([girdir]) + if isinstance(dep, InternalDependency): + cflags.update(dep.get_compile_args()) + cflags.update(state.get_include_args(dep.include_directories)) + for lib in dep.libraries: + if isinstance(lib, build.SharedLibrary): + internal_ldflags.update(self._get_link_args(state, lib, depends, include_rpath)) + libdepflags = self._get_dependencies_flags(lib.get_external_deps(), state, depends, include_rpath, + use_gir_args, True) + cflags.update(libdepflags[0]) + internal_ldflags.update(libdepflags[1]) + external_ldflags.update(libdepflags[2]) + external_ldflags_nodedup += libdepflags[3] + gi_includes.update(libdepflags[4]) + extdepflags = self._get_dependencies_flags(dep.ext_deps, state, depends, include_rpath, + use_gir_args, True) + cflags.update(extdepflags[0]) + internal_ldflags.update(extdepflags[1]) + external_ldflags.update(extdepflags[2]) + external_ldflags_nodedup += extdepflags[3] + gi_includes.update(extdepflags[4]) + for source in dep.sources: + if isinstance(source, GirTarget): + gi_includes.update([os.path.join(state.environment.get_build_dir(), + source.get_subdir())]) + # This should be any dependency other than an internal one. + elif isinstance(dep, Dependency): + cflags.update(dep.get_compile_args()) + ldflags = iter(dep.get_link_args(raw=True)) + for lib in ldflags: + if (os.path.isabs(lib) and + # For PkgConfigDependency only: + getattr(dep, 'is_libtool', False)): + lib_dir = os.path.dirname(lib) + external_ldflags.update(["-L%s" % lib_dir]) + if include_rpath: + external_ldflags.update([f'-Wl,-rpath {lib_dir}']) + libname = os.path.basename(lib) + if libname.startswith("lib"): + libname = libname[3:] + libname = libname.split(".so")[0] + lib = "-l%s" % libname + # FIXME: Hack to avoid passing some compiler options in + if lib.startswith("-W"): + continue + # If it's a framework arg, slurp the framework name too + # to preserve the order of arguments + if lib == '-framework': + external_ldflags_nodedup += [lib, next(ldflags)] + else: + external_ldflags.update([lib]) + elif isinstance(dep, (build.StaticLibrary, build.SharedLibrary)): + cflags.update(state.get_include_args(dep.get_include_dirs())) + depends.append(dep) + else: + mlog.log(f'dependency {dep!r} not handled to build gir files') + continue + + if use_gir_args and self._gir_has_option('--extra-library'): + def fix_ldflags(ldflags): + fixed_ldflags = OrderedSet() + for ldflag in ldflags: + if ldflag.startswith("-l"): + ldflag = ldflag.replace('-l', '--extra-library=', 1) + fixed_ldflags.add(ldflag) + return fixed_ldflags + internal_ldflags = fix_ldflags(internal_ldflags) + external_ldflags = fix_ldflags(external_ldflags) + if not separate_nodedup: + external_ldflags.update(external_ldflags_nodedup) + return cflags, internal_ldflags, external_ldflags, gi_includes + else: + return cflags, internal_ldflags, external_ldflags, external_ldflags_nodedup, gi_includes + + def _unwrap_gir_target(self, girtarget, state): + if not isinstance(girtarget, (build.Executable, build.SharedLibrary, + build.StaticLibrary)): + raise MesonException(f'Gir target must be an executable or library but is "{girtarget}" of type {type(girtarget).__name__}') + + STATIC_BUILD_REQUIRED_VERSION = ">=1.58.1" + if isinstance(girtarget, (build.StaticLibrary)) and \ + not mesonlib.version_compare( + self._get_gir_dep(state)[0].get_version(), + STATIC_BUILD_REQUIRED_VERSION): + raise MesonException('Static libraries can only be introspected with GObject-Introspection ' + STATIC_BUILD_REQUIRED_VERSION) + + return girtarget + + def _devenv_append(self, varname: str, value: str) -> None: + if self.devenv is None: + self.devenv = build.EnvironmentVariables() + self.interpreter.build.devenv.append(self.devenv) + self.devenv.append(varname, [value]) + + def _get_gir_dep(self, state): + if not self.gir_dep: + self.gir_dep = self._get_dep(state, 'gobject-introspection-1.0') + self.giscanner = self._get_native_binary(state, 'g-ir-scanner', 'gobject-introspection-1.0', 'g_ir_scanner') + self.gicompiler = self._get_native_binary(state, 'g-ir-compiler', 'gobject-introspection-1.0', 'g_ir_compiler') + return self.gir_dep, self.giscanner, self.gicompiler + + @functools.lru_cache(maxsize=None) + def _gir_has_option(self, option) -> bool: + exe = self.giscanner + if isinstance(exe, OverrideProgram): + # Handle overridden g-ir-scanner + assert option in ['--extra-library', '--sources-top-dirs'] + return True + p, o, e = Popen_safe(exe.get_command() + ['--help'], stderr=subprocess.STDOUT) + return p.returncode == 0 and option in o + + def _scan_header(self, kwargs): + ret = [] + header = kwargs.pop('header', None) + if header: + if not isinstance(header, str): + raise MesonException('header must be a string') + ret = ['--c-include=' + header] + return ret + + def _scan_extra_args(self, kwargs): + return mesonlib.stringlistify(kwargs.pop('extra_args', [])) + + def _scan_link_withs(self, state, depends, kwargs): + ret = [] + if 'link_with' in kwargs: + link_with = mesonlib.extract_as_list(kwargs, 'link_with', pop = True) + + for link in link_with: + ret += self._get_link_args(state, link, depends, + use_gir_args=True) + return ret + + # May mutate depends and gir_inc_dirs + def _scan_include(self, state, depends, gir_inc_dirs, kwargs): + ret = [] + + if 'includes' in kwargs: + includes = mesonlib.extract_as_list(kwargs, 'includes', pop = True) + for inc in includes: + if isinstance(inc, str): + ret += [f'--include={inc}'] + elif isinstance(inc, GirTarget): + gir_inc_dirs += [ + os.path.join(state.environment.get_build_dir(), + inc.get_subdir()), + ] + ret += [ + "--include-uninstalled={}".format(os.path.join(inc.get_subdir(), inc.get_basename())) + ] + depends += [inc] + else: + raise MesonException( + 'Gir includes must be str, GirTarget, or list of them. ' + 'Got %s.' % type(inc).__name__) + + return ret + + def _scan_symbol_prefix(self, kwargs): + ret = [] + + if 'symbol_prefix' in kwargs: + sym_prefixes = mesonlib.stringlistify(kwargs.pop('symbol_prefix', [])) + ret += ['--symbol-prefix=%s' % sym_prefix for sym_prefix in sym_prefixes] + + return ret + + def _scan_identifier_prefix(self, kwargs): + ret = [] + + if 'identifier_prefix' in kwargs: + identifier_prefix = kwargs.pop('identifier_prefix') + if not isinstance(identifier_prefix, str): + raise MesonException('Gir identifier prefix must be str') + ret += ['--identifier-prefix=%s' % identifier_prefix] + + return ret + + def _scan_export_packages(self, kwargs): + ret = [] + + if 'export_packages' in kwargs: + pkgs = kwargs.pop('export_packages') + if isinstance(pkgs, str): + ret += ['--pkg-export=%s' % pkgs] + elif isinstance(pkgs, list): + ret += ['--pkg-export=%s' % pkg for pkg in pkgs] + else: + raise MesonException('Gir export packages must be str or list') + + return ret + + def _scan_inc_dirs(self, kwargs): + ret = mesonlib.extract_as_list(kwargs, 'include_directories', pop = True) + for incd in ret: + if not isinstance(incd, (str, build.IncludeDirs)): + raise MesonException( + 'Gir include dirs should be include_directories().') + return ret + + def _scan_langs(self, state, langs): + ret = [] + + for lang in langs: + link_args = state.environment.coredata.get_external_link_args(MachineChoice.HOST, lang) + for link_arg in link_args: + if link_arg.startswith('-L'): + ret.append(link_arg) + + return ret + + def _scan_gir_targets(self, state, girtargets): + ret = [] + + for girtarget in girtargets: + if isinstance(girtarget, build.Executable): + ret += ['--program', girtarget] + else: + # Because of https://gitlab.gnome.org/GNOME/gobject-introspection/merge_requests/72 + # we can't use the full path until this is merged. + libpath = os.path.join(girtarget.get_subdir(), girtarget.get_filename()) + # Must use absolute paths here because g-ir-scanner will not + # add them to the runtime path list if they're relative. This + # means we cannot use @BUILD_ROOT@ + build_root = state.environment.get_build_dir() + if isinstance(girtarget, build.SharedLibrary): + # need to put our output directory first as we need to use the + # generated libraries instead of any possibly installed system/prefix + # ones. + ret += ["-L{}/{}".format(build_root, os.path.dirname(libpath))] + libname = girtarget.get_basename() + else: + libname = os.path.join(f"{build_root}/{libpath}") + ret += ['--library', libname] + # Needed for the following binutils bug: + # https://github.com/mesonbuild/meson/issues/1911 + # However, g-ir-scanner does not understand -Wl,-rpath + # so we need to use -L instead + for d in state.backend.determine_rpath_dirs(girtarget): + d = os.path.join(state.environment.get_build_dir(), d) + ret.append('-L' + d) + + return ret + + def _get_girtargets_langs_compilers(self, girtargets: T.List[GirTarget]) -> T.List[T.Tuple[str, 'Compiler']]: + ret: T.List[T.Tuple[str, 'Compiler']] = [] + for girtarget in girtargets: + for lang, compiler in girtarget.compilers.items(): + # XXX: Can you use g-i with any other language? + if lang in ('c', 'cpp', 'objc', 'objcpp', 'd'): + ret.append((lang, compiler)) + break + + return ret + + def _get_gir_targets_deps(self, girtargets): + ret = [] + for girtarget in girtargets: + ret += girtarget.get_all_link_deps() + ret += girtarget.get_external_deps() + return ret + + def _get_gir_targets_inc_dirs(self, girtargets): + ret = [] + for girtarget in girtargets: + ret += girtarget.get_include_dirs() + return ret + + def _get_langs_compilers_flags(self, state, langs_compilers: T.List[T.Tuple[str, 'Compiler']]): + cflags = [] + internal_ldflags = [] + external_ldflags = [] + + for lang, compiler in langs_compilers: + if state.global_args.get(lang): + cflags += state.global_args[lang] + if state.project_args.get(lang): + cflags += state.project_args[lang] + if mesonlib.OptionKey('b_sanitize') in compiler.base_options: + sanitize = state.environment.coredata.options[mesonlib.OptionKey('b_sanitize')].value + cflags += compiler.sanitizer_compile_args(sanitize) + sanitize = sanitize.split(',') + # These must be first in ldflags + if 'address' in sanitize: + internal_ldflags += ['-lasan'] + if 'thread' in sanitize: + internal_ldflags += ['-ltsan'] + if 'undefined' in sanitize: + internal_ldflags += ['-lubsan'] + # FIXME: Linking directly to lib*san is not recommended but g-ir-scanner + # does not understand -f LDFLAGS. https://bugzilla.gnome.org/show_bug.cgi?id=783892 + # ldflags += compiler.sanitizer_link_args(sanitize) + + return cflags, internal_ldflags, external_ldflags + + def _make_gir_filelist(self, state, srcdir, ns, nsversion, girtargets, libsources): + gir_filelist_dir = state.backend.get_target_private_dir_abs(girtargets[0]) + if not os.path.isdir(gir_filelist_dir): + os.mkdir(gir_filelist_dir) + gir_filelist_filename = os.path.join(gir_filelist_dir, f'{ns}_{nsversion}_gir_filelist') + + with open(gir_filelist_filename, 'w', encoding='utf-8') as gir_filelist: + for s in libsources: + if isinstance(s, (build.CustomTarget, build.CustomTargetIndex)): + for custom_output in s.get_outputs(): + gir_filelist.write(os.path.join(state.environment.get_build_dir(), + state.backend.get_target_dir(s), + custom_output) + '\n') + elif isinstance(s, mesonlib.File): + gir_filelist.write(s.rel_to_builddir(state.build_to_src) + '\n') + elif isinstance(s, build.GeneratedList): + for gen_src in s.get_outputs(): + gir_filelist.write(os.path.join(srcdir, gen_src) + '\n') + else: + gir_filelist.write(os.path.join(srcdir, s) + '\n') + + return gir_filelist_filename + + def _make_gir_target(self, state, girfile, scan_command, generated_files, depends, kwargs): + scankwargs = {'input': generated_files, + 'output': girfile, + 'command': scan_command, + 'depends': depends} + + if 'install' in kwargs: + scankwargs['install'] = kwargs['install'] + scankwargs['install_dir'] = kwargs.get('install_dir_gir', + os.path.join(state.environment.get_datadir(), 'gir-1.0')) + + if 'build_by_default' in kwargs: + scankwargs['build_by_default'] = kwargs['build_by_default'] + + return GirTarget(girfile, state.subdir, state.subproject, scankwargs) + + def _make_typelib_target(self, state, typelib_output, typelib_cmd, generated_files, kwargs): + typelib_kwargs = { + 'input': generated_files, + 'output': typelib_output, + 'command': typelib_cmd, + } + + if 'install' in kwargs: + typelib_kwargs['install'] = kwargs['install'] + typelib_kwargs['install_dir'] = kwargs.get('install_dir_typelib', + os.path.join(state.environment.get_libdir(), 'girepository-1.0')) + + if 'build_by_default' in kwargs: + typelib_kwargs['build_by_default'] = kwargs['build_by_default'] + + return TypelibTarget(typelib_output, state.subdir, state.subproject, typelib_kwargs) + + # May mutate depends + def _gather_typelib_includes_and_update_depends(self, state, deps, depends): + # Need to recursively add deps on GirTarget sources from our + # dependencies and also find the include directories needed for the + # typelib generation custom target below. + typelib_includes = [] + for dep in deps: + # Add a dependency on each GirTarget listed in dependencies and add + # the directory where it will be generated to the typelib includes + if isinstance(dep, InternalDependency): + for source in dep.sources: + if isinstance(source, GirTarget) and source not in depends: + depends.append(source) + subdir = os.path.join(state.environment.get_build_dir(), + source.get_subdir()) + if subdir not in typelib_includes: + typelib_includes.append(subdir) + # Do the same, but for dependencies of dependencies. These are + # stored in the list of generated sources for each link dep (from + # girtarget.get_all_link_deps() above). + # FIXME: Store this in the original form from declare_dependency() + # so it can be used here directly. + elif isinstance(dep, build.SharedLibrary): + for source in dep.generated: + if isinstance(source, GirTarget): + subdir = os.path.join(state.environment.get_build_dir(), + source.get_subdir()) + if subdir not in typelib_includes: + typelib_includes.append(subdir) + if isinstance(dep, Dependency): + girdir = dep.get_variable(pkgconfig='girdir', internal='girdir', default_value='') + if girdir and girdir not in typelib_includes: + typelib_includes.append(girdir) + return typelib_includes + + def _get_external_args_for_langs(self, state, langs): + ret = [] + for lang in langs: + ret += state.environment.coredata.get_external_args(MachineChoice.HOST, lang) + return ret + + @staticmethod + def _get_scanner_cflags(cflags): + 'g-ir-scanner only accepts -I/-D/-U; must ignore all other flags' + for f in cflags: + # _FORTIFY_SOURCE depends on / works together with -O, on the other hand this + # just invokes the preprocessor anyway + if f.startswith(('-D', '-U', '-I')) and not f.startswith('-D_FORTIFY_SOURCE'): + yield f + + @staticmethod + def _get_scanner_ldflags(ldflags): + 'g-ir-scanner only accepts -L/-l; must ignore -F and other linker flags' + for f in ldflags: + if f.startswith(('-L', '-l', '--extra-library')): + yield f + + @FeatureNewKwargs('generate_gir', '0.55.0', ['fatal_warnings']) + @FeatureNewKwargs('generate_gir', '0.40.0', ['build_by_default']) + @permittedKwargs({'sources', 'nsversion', 'namespace', 'symbol_prefix', 'identifier_prefix', + 'export_packages', 'includes', 'dependencies', 'link_with', 'include_directories', + 'install', 'install_dir_gir', 'install_dir_typelib', 'extra_args', + 'packages', 'header', 'build_by_default', 'fatal_warnings'}) + def generate_gir(self, state, args, kwargs: T.Dict[str, T.Any]): + if not args: + raise MesonException('generate_gir takes at least one argument') + if kwargs.get('install_dir'): + raise MesonException('install_dir is not supported with generate_gir(), see "install_dir_gir" and "install_dir_typelib"') + + girtargets = [self._unwrap_gir_target(arg, state) for arg in args] + + if len(girtargets) > 1 and any([isinstance(el, build.Executable) for el in girtargets]): + raise MesonException('generate_gir only accepts a single argument when one of the arguments is an executable') + + gir_dep, giscanner, gicompiler = self._get_gir_dep(state) + + ns = kwargs.get('namespace') + if not ns: + raise MesonException('Missing "namespace" keyword argument') + nsversion = kwargs.get('nsversion') + if not nsversion: + raise MesonException('Missing "nsversion" keyword argument') + libsources = mesonlib.extract_as_list(kwargs, 'sources', pop=True) + girfile = f'{ns}-{nsversion}.gir' + srcdir = os.path.join(state.environment.get_source_dir(), state.subdir) + builddir = os.path.join(state.environment.get_build_dir(), state.subdir) + depends = gir_dep.sources + girtargets + gir_inc_dirs = [] + langs_compilers = self._get_girtargets_langs_compilers(girtargets) + cflags, internal_ldflags, external_ldflags = self._get_langs_compilers_flags(state, langs_compilers) + deps = self._get_gir_targets_deps(girtargets) + deps += extract_as_list(kwargs, 'dependencies', pop=True) + deps += [gir_dep] + typelib_includes = self._gather_typelib_includes_and_update_depends(state, deps, depends) + # ldflags will be misinterpreted by gir scanner (showing + # spurious dependencies) but building GStreamer fails if they + # are not used here. + dep_cflags, dep_internal_ldflags, dep_external_ldflags, gi_includes = \ + self._get_dependencies_flags(deps, state, depends, use_gir_args=True) + cflags += list(self._get_scanner_cflags(dep_cflags)) + cflags += list(self._get_scanner_cflags(self._get_external_args_for_langs(state, [lc[0] for lc in langs_compilers]))) + internal_ldflags += list(self._get_scanner_ldflags(dep_internal_ldflags)) + external_ldflags += list(self._get_scanner_ldflags(dep_external_ldflags)) + girtargets_inc_dirs = self._get_gir_targets_inc_dirs(girtargets) + inc_dirs = self._scan_inc_dirs(kwargs) + + scan_command = [giscanner] + scan_command += ['--no-libtool'] + scan_command += ['--namespace=' + ns, '--nsversion=' + nsversion] + scan_command += ['--warn-all'] + scan_command += ['--output', '@OUTPUT@'] + scan_command += self._scan_header(kwargs) + scan_command += self._scan_extra_args(kwargs) + scan_command += ['-I' + srcdir, '-I' + builddir] + scan_command += state.get_include_args(girtargets_inc_dirs) + scan_command += ['--filelist=' + self._make_gir_filelist(state, srcdir, ns, nsversion, girtargets, libsources)] + scan_command += self._scan_link_withs(state, depends, kwargs) + scan_command += self._scan_include(state, depends, gir_inc_dirs, kwargs) + scan_command += self._scan_symbol_prefix(kwargs) + scan_command += self._scan_identifier_prefix(kwargs) + scan_command += self._scan_export_packages(kwargs) + scan_command += ['--cflags-begin'] + scan_command += cflags + scan_command += ['--cflags-end'] + scan_command += state.get_include_args(inc_dirs) + scan_command += state.get_include_args(list(gi_includes) + gir_inc_dirs + inc_dirs, prefix='--add-include-path=') + scan_command += list(internal_ldflags) + scan_command += self._scan_gir_targets(state, girtargets) + scan_command += self._scan_langs(state, [lc[0] for lc in langs_compilers]) + scan_command += list(external_ldflags) + + if self._gir_has_option('--sources-top-dirs'): + scan_command += ['--sources-top-dirs', os.path.join(state.environment.get_source_dir(), self.interpreter.subproject_dir, state.subproject)] + scan_command += ['--sources-top-dirs', os.path.join(state.environment.get_build_dir(), self.interpreter.subproject_dir, state.subproject)] + + if '--warn-error' in scan_command: + mlog.deprecation('Passing --warn-error is deprecated in favor of "fatal_warnings" keyword argument since v0.55') + fatal_warnings = kwargs.get('fatal_warnings', False) + if not isinstance(fatal_warnings, bool): + raise MesonException('fatal_warnings keyword argument must be a boolean') + if fatal_warnings: + scan_command.append('--warn-error') + + generated_files = [f for f in libsources if isinstance(f, (GeneratedList, CustomTarget, CustomTargetIndex))] + + scan_target = self._make_gir_target(state, girfile, scan_command, generated_files, depends, kwargs) + + typelib_output = f'{ns}-{nsversion}.typelib' + typelib_cmd = [gicompiler, scan_target, '--output', '@OUTPUT@'] + typelib_cmd += state.get_include_args(gir_inc_dirs, prefix='--includedir=') + + for incdir in typelib_includes: + typelib_cmd += ["--includedir=" + incdir] + + typelib_target = self._make_typelib_target(state, typelib_output, typelib_cmd, generated_files, kwargs) + + self._devenv_append('GI_TYPELIB_PATH', os.path.join(state.environment.get_build_dir(), state.subdir)) + + rv = [scan_target, typelib_target] + + return ModuleReturnValue(rv, rv) + + @FeatureNewKwargs('build target', '0.40.0', ['build_by_default']) + @permittedKwargs({'build_by_default', 'depend_files'}) + def compile_schemas(self, state, args, kwargs): + if args: + raise MesonException('Compile_schemas does not take positional arguments.') + srcdir = os.path.join(state.build_to_src, state.subdir) + outdir = state.subdir + + cmd = [state.find_program('glib-compile-schemas')] + cmd += ['--targetdir', outdir, srcdir] + kwargs['command'] = cmd + kwargs['input'] = [] + kwargs['output'] = 'gschemas.compiled' + if state.subdir == '': + targetname = 'gsettings-compile' + else: + targetname = 'gsettings-compile-' + state.subdir.replace('/', '_') + target_g = build.CustomTarget(targetname, state.subdir, state.subproject, kwargs) + self._devenv_append('GSETTINGS_SCHEMA_DIR', os.path.join(state.environment.get_build_dir(), state.subdir)) + return ModuleReturnValue(target_g, [target_g]) + + @permittedKwargs({'sources', 'media', 'symlink_media', 'languages'}) + @FeatureDeprecatedKwargs('gnome.yelp', '0.43.0', ['languages'], + 'Use a LINGUAS file in the source directory instead') + def yelp(self, state, args, kwargs): + if len(args) < 1: + raise MesonException('Yelp requires a project id') + + project_id = args[0] + sources = mesonlib.stringlistify(kwargs.pop('sources', [])) + if not sources: + if len(args) > 1: + sources = mesonlib.stringlistify(args[1:]) + if not sources: + raise MesonException('Yelp requires a list of sources') + source_str = '@@'.join(sources) + + langs = mesonlib.stringlistify(kwargs.pop('languages', [])) + media = mesonlib.stringlistify(kwargs.pop('media', [])) + symlinks = kwargs.pop('symlink_media', True) + + if not isinstance(symlinks, bool): + raise MesonException('symlink_media must be a boolean') + + if kwargs: + raise MesonException('Unknown arguments passed: {}'.format(', '.join(kwargs.keys()))) + + script = state.environment.get_build_command() + args = ['--internal', + 'yelphelper', + 'install', + '--subdir=' + state.subdir, + '--id=' + project_id, + '--installdir=' + os.path.join(state.environment.get_datadir(), 'help'), + '--sources=' + source_str] + if symlinks: + args.append('--symlinks=true') + if media: + args.append('--media=' + '@@'.join(media)) + if langs: + args.append('--langs=' + '@@'.join(langs)) + inscript = state.backend.get_executable_serialisation(script + args) + + potargs = state.environment.get_build_command() + [ + '--internal', 'yelphelper', 'pot', + '--subdir=' + state.subdir, + '--id=' + project_id, + '--sources=' + source_str, + ] + pottarget = build.RunTarget('help-' + project_id + '-pot', potargs, + [], state.subdir, state.subproject) + + poargs = state.environment.get_build_command() + [ + '--internal', 'yelphelper', 'update-po', + '--subdir=' + state.subdir, + '--id=' + project_id, + '--sources=' + source_str, + '--langs=' + '@@'.join(langs), + ] + potarget = build.RunTarget('help-' + project_id + '-update-po', poargs, + [], state.subdir, state.subproject) + + rv = [inscript, pottarget, potarget] + return ModuleReturnValue(None, rv) + + @FeatureNewKwargs('gnome.gtkdoc', '0.52.0', ['check']) + @FeatureNewKwargs('gnome.gtkdoc', '0.48.0', ['c_args']) + @FeatureNewKwargs('gnome.gtkdoc', '0.48.0', ['module_version']) + @FeatureNewKwargs('gnome.gtkdoc', '0.37.0', ['namespace', 'mode']) + @permittedKwargs({'main_xml', 'main_sgml', 'src_dir', 'dependencies', 'install', + 'install_dir', 'scan_args', 'scanobjs_args', 'gobject_typesfile', + 'fixxref_args', 'html_args', 'html_assets', 'content_files', + 'mkdb_args', 'ignore_headers', 'include_directories', + 'namespace', 'mode', 'expand_content_files', 'module_version', + 'c_args', 'check'}) + def gtkdoc(self, state, args, kwargs): + if len(args) != 1: + raise MesonException('Gtkdoc must have one positional argument.') + modulename = args[0] + if not isinstance(modulename, str): + raise MesonException('Gtkdoc arg must be string.') + if 'src_dir' not in kwargs: + raise MesonException('Keyword argument src_dir missing.') + main_file = kwargs.get('main_sgml', '') + if not isinstance(main_file, str): + raise MesonException('Main sgml keyword argument must be a string.') + main_xml = kwargs.get('main_xml', '') + if not isinstance(main_xml, str): + raise MesonException('Main xml keyword argument must be a string.') + moduleversion = kwargs.get('module_version', '') + if not isinstance(moduleversion, str): + raise MesonException('Module version keyword argument must be a string.') + if main_xml != '': + if main_file != '': + raise MesonException('You can only specify main_xml or main_sgml, not both.') + main_file = main_xml + targetname = modulename + ('-' + moduleversion if moduleversion else '') + '-doc' + command = state.environment.get_build_command() + + namespace = kwargs.get('namespace', '') + mode = kwargs.get('mode', 'auto') + VALID_MODES = ('xml', 'sgml', 'none', 'auto') + if mode not in VALID_MODES: + raise MesonException(f'gtkdoc: Mode {mode} is not a valid mode: {VALID_MODES}') + + src_dirs = mesonlib.extract_as_list(kwargs, 'src_dir') + header_dirs = [] + for src_dir in src_dirs: + if isinstance(src_dir, HoldableObject): + if not isinstance(src_dir, build.IncludeDirs): + raise MesonException('Invalid keyword argument for src_dir.') + for inc_dir in src_dir.get_incdirs(): + header_dirs.append(os.path.join(state.environment.get_source_dir(), + src_dir.get_curdir(), inc_dir)) + header_dirs.append(os.path.join(state.environment.get_build_dir(), + src_dir.get_curdir(), inc_dir)) + else: + header_dirs.append(src_dir) + + args = ['--internal', 'gtkdoc', + '--sourcedir=' + state.environment.get_source_dir(), + '--builddir=' + state.environment.get_build_dir(), + '--subdir=' + state.subdir, + '--headerdirs=' + '@@'.join(header_dirs), + '--mainfile=' + main_file, + '--modulename=' + modulename, + '--moduleversion=' + moduleversion, + '--mode=' + mode] + for tool in ['scan', 'scangobj', 'mkdb', 'mkhtml', 'fixxref']: + program_name = 'gtkdoc-' + tool + program = state.find_program(program_name) + path = program.get_path() + args.append(f'--{program_name}={path}') + if namespace: + args.append('--namespace=' + namespace) + args += self._unpack_args('--htmlargs=', 'html_args', kwargs) + args += self._unpack_args('--scanargs=', 'scan_args', kwargs) + args += self._unpack_args('--scanobjsargs=', 'scanobjs_args', kwargs) + args += self._unpack_args('--gobjects-types-file=', 'gobject_typesfile', kwargs, state) + args += self._unpack_args('--fixxrefargs=', 'fixxref_args', kwargs) + args += self._unpack_args('--mkdbargs=', 'mkdb_args', kwargs) + args += self._unpack_args('--html-assets=', 'html_assets', kwargs, state) + + depends = [] + content_files = [] + for s in mesonlib.extract_as_list(kwargs, 'content_files'): + if isinstance(s, (build.CustomTarget, build.CustomTargetIndex)): + depends.append(s) + for o in s.get_outputs(): + content_files.append(os.path.join(state.environment.get_build_dir(), + state.backend.get_target_dir(s), + o)) + elif isinstance(s, mesonlib.File): + content_files.append(s.absolute_path(state.environment.get_source_dir(), + state.environment.get_build_dir())) + elif isinstance(s, build.GeneratedList): + depends.append(s) + for gen_src in s.get_outputs(): + content_files.append(os.path.join(state.environment.get_source_dir(), + state.subdir, + gen_src)) + elif isinstance(s, str): + content_files.append(os.path.join(state.environment.get_source_dir(), + state.subdir, + s)) + else: + raise MesonException( + f'Invalid object type: {s.__class__.__name__!r}') + args += ['--content-files=' + '@@'.join(content_files)] + + args += self._unpack_args('--expand-content-files=', 'expand_content_files', kwargs, state) + args += self._unpack_args('--ignore-headers=', 'ignore_headers', kwargs) + args += self._unpack_args('--installdir=', 'install_dir', kwargs) + args += self._get_build_args(kwargs, state, depends) + custom_kwargs = {'output': modulename + '-decl.txt', + 'command': command + args, + 'depends': depends, + 'build_always_stale': True, + } + custom_target = build.CustomTarget(targetname, state.subdir, state.subproject, custom_kwargs) + alias_target = build.AliasTarget(targetname, [custom_target], state.subdir, state.subproject) + if kwargs.get('check', False): + check_cmd = state.find_program('gtkdoc-check') + check_env = ['DOC_MODULE=' + modulename, + 'DOC_MAIN_SGML_FILE=' + main_file] + check_args = [targetname + '-check', check_cmd] + check_workdir = os.path.join(state.environment.get_build_dir(), state.subdir) + state.test(check_args, env=check_env, workdir=check_workdir, depends=custom_target) + res = [custom_target, alias_target] + if kwargs.get('install', True): + res.append(state.backend.get_executable_serialisation(command + args)) + return ModuleReturnValue(custom_target, res) + + def _get_build_args(self, kwargs, state, depends): + args = [] + deps = extract_as_list(kwargs, 'dependencies') + cflags = [] + cflags.extend(mesonlib.stringlistify(kwargs.pop('c_args', []))) + deps_cflags, internal_ldflags, external_ldflags, gi_includes = \ + self._get_dependencies_flags(deps, state, depends, include_rpath=True) + inc_dirs = mesonlib.extract_as_list(kwargs, 'include_directories') + for incd in inc_dirs: + if not isinstance(incd, (str, build.IncludeDirs)): + raise MesonException( + 'Gir include dirs should be include_directories().') + + cflags.extend(deps_cflags) + cflags.extend(state.get_include_args(inc_dirs)) + ldflags = [] + ldflags.extend(internal_ldflags) + ldflags.extend(external_ldflags) + + cflags.extend(state.environment.coredata.get_external_args(MachineChoice.HOST, 'c')) + ldflags.extend(state.environment.coredata.get_external_link_args(MachineChoice.HOST, 'c')) + compiler = state.environment.coredata.compilers[MachineChoice.HOST]['c'] + + compiler_flags = self._get_langs_compilers_flags(state, [('c', compiler)]) + cflags.extend(compiler_flags[0]) + ldflags.extend(compiler_flags[1]) + ldflags.extend(compiler_flags[2]) + if compiler: + args += ['--cc=%s' % join_args(compiler.get_exelist())] + args += ['--ld=%s' % join_args(compiler.get_linker_exelist())] + if cflags: + args += ['--cflags=%s' % join_args(cflags)] + if ldflags: + args += ['--ldflags=%s' % join_args(ldflags)] + + return args + + @noKwargs + def gtkdoc_html_dir(self, state, args, kwargs): + if len(args) != 1: + raise MesonException('Must have exactly one argument.') + modulename = args[0] + if not isinstance(modulename, str): + raise MesonException('Argument must be a string') + return os.path.join('share/gtk-doc/html', modulename) + + @staticmethod + def _unpack_args(arg, kwarg_name, kwargs, expend_file_state=None): + if kwarg_name not in kwargs: + return [] + + new_args = mesonlib.extract_as_list(kwargs, kwarg_name) + args = [] + for i in new_args: + if expend_file_state and isinstance(i, mesonlib.File): + i = i.absolute_path(expend_file_state.environment.get_source_dir(), expend_file_state.environment.get_build_dir()) + elif expend_file_state and isinstance(i, str): + i = os.path.join(expend_file_state.environment.get_source_dir(), expend_file_state.subdir, i) + elif not isinstance(i, str): + raise MesonException(kwarg_name + ' values must be strings.') + args.append(i) + + if args: + return [arg + '@@'.join(args)] + + return [] + + def _get_autocleanup_args(self, kwargs, glib_version): + if not mesonlib.version_compare(glib_version, '>= 2.49.1'): + # Warn if requested, silently disable if not + if 'autocleanup' in kwargs: + mlog.warning('Glib version ({}) is too old to support the \'autocleanup\' ' + 'kwarg, need 2.49.1 or newer'.format(glib_version)) + return [] + autocleanup = kwargs.pop('autocleanup', 'all') + values = ('none', 'objects', 'all') + if autocleanup not in values: + raise MesonException('gdbus_codegen does not support {!r} as an autocleanup value, ' + 'must be one of: {!r}'.format(autocleanup, ', '.join(values))) + return ['--c-generate-autocleanup', autocleanup] + + @FeatureNewKwargs('build target', '0.46.0', ['install_header', 'install_dir', 'sources']) + @FeatureNewKwargs('build target', '0.40.0', ['build_by_default']) + @FeatureNewKwargs('build target', '0.47.0', ['extra_args', 'autocleanup']) + @permittedKwargs({'interface_prefix', 'namespace', 'extra_args', 'autocleanup', 'object_manager', 'build_by_default', + 'annotations', 'docbook', 'install_header', 'install_dir', 'sources'}) + def gdbus_codegen(self, state, args, kwargs): + if len(args) not in (1, 2): + raise MesonException('gdbus_codegen takes at most two arguments, name and xml file.') + namebase = args[0] + xml_files = args[1:] + cmd = [state.find_program('gdbus-codegen')] + extra_args = mesonlib.stringlistify(kwargs.pop('extra_args', [])) + cmd += extra_args + # Autocleanup supported? + glib_version = self._get_native_glib_version(state) + cmd += self._get_autocleanup_args(kwargs, glib_version) + if 'interface_prefix' in kwargs: + cmd += ['--interface-prefix', kwargs.pop('interface_prefix')] + if 'namespace' in kwargs: + cmd += ['--c-namespace', kwargs.pop('namespace')] + if kwargs.get('object_manager', False): + cmd += ['--c-generate-object-manager'] + if 'sources' in kwargs: + xml_files += mesonlib.listify(kwargs.pop('sources')) + build_by_default = kwargs.get('build_by_default', False) + + # Annotations are a bit ugly in that they are a list of lists of strings... + annotations = kwargs.pop('annotations', []) + if not isinstance(annotations, list): + raise MesonException('annotations takes a list') + if annotations and isinstance(annotations, list) and not isinstance(annotations[0], list): + annotations = [annotations] + + for annotation in annotations: + if len(annotation) != 3 or not all(isinstance(i, str) for i in annotation): + raise MesonException('Annotations must be made up of 3 strings for ELEMENT, KEY, and VALUE') + cmd += ['--annotate'] + annotation + + targets = [] + install_header = kwargs.get('install_header', False) + install_dir = kwargs.get('install_dir', state.environment.coredata.get_option(mesonlib.OptionKey('includedir'))) + + output = namebase + '.c' + # Added in https://gitlab.gnome.org/GNOME/glib/commit/e4d68c7b3e8b01ab1a4231bf6da21d045cb5a816 (2.55.2) + # Fixed in https://gitlab.gnome.org/GNOME/glib/commit/cd1f82d8fc741a2203582c12cc21b4dacf7e1872 (2.56.2) + if mesonlib.version_compare(glib_version, '>= 2.56.2'): + custom_kwargs = {'input': xml_files, + 'output': output, + 'command': cmd + ['--body', '--output', '@OUTPUT@', '@INPUT@'], + 'build_by_default': build_by_default + } + else: + if 'docbook' in kwargs: + docbook = kwargs['docbook'] + if not isinstance(docbook, str): + raise MesonException('docbook value must be a string.') + + cmd += ['--generate-docbook', docbook] + + # https://git.gnome.org/browse/glib/commit/?id=ee09bb704fe9ccb24d92dd86696a0e6bb8f0dc1a + if mesonlib.version_compare(glib_version, '>= 2.51.3'): + cmd += ['--output-directory', '@OUTDIR@', '--generate-c-code', namebase, '@INPUT@'] + else: + self._print_gdbus_warning() + cmd += ['--generate-c-code', '@OUTDIR@/' + namebase, '@INPUT@'] + + custom_kwargs = {'input': xml_files, + 'output': output, + 'command': cmd, + 'build_by_default': build_by_default + } + + cfile_custom_target = build.CustomTarget(output, state.subdir, state.subproject, custom_kwargs) + targets.append(cfile_custom_target) + + output = namebase + '.h' + if mesonlib.version_compare(glib_version, '>= 2.56.2'): + custom_kwargs = {'input': xml_files, + 'output': output, + 'command': cmd + ['--header', '--output', '@OUTPUT@', '@INPUT@'], + 'build_by_default': build_by_default, + 'install': install_header, + 'install_dir': install_dir + } + else: + custom_kwargs = {'input': xml_files, + 'output': output, + 'command': cmd, + 'build_by_default': build_by_default, + 'install': install_header, + 'install_dir': install_dir, + 'depends': cfile_custom_target + } + + hfile_custom_target = build.CustomTarget(output, state.subdir, state.subproject, custom_kwargs) + targets.append(hfile_custom_target) + + if 'docbook' in kwargs: + docbook = kwargs['docbook'] + if not isinstance(docbook, str): + raise MesonException('docbook value must be a string.') + + docbook_cmd = cmd + ['--output-directory', '@OUTDIR@', '--generate-docbook', docbook, '@INPUT@'] + + # The docbook output is always ${docbook}-${name_of_xml_file} + output = namebase + '-docbook' + outputs = [] + for f in xml_files: + outputs.append('{}-{}'.format(docbook, os.path.basename(str(f)))) + + if mesonlib.version_compare(glib_version, '>= 2.56.2'): + custom_kwargs = {'input': xml_files, + 'output': outputs, + 'command': docbook_cmd, + 'build_by_default': build_by_default + } + else: + custom_kwargs = {'input': xml_files, + 'output': outputs, + 'command': cmd, + 'build_by_default': build_by_default, + 'depends': cfile_custom_target + } + + docbook_custom_target = build.CustomTarget(output, state.subdir, state.subproject, custom_kwargs) + targets.append(docbook_custom_target) + + return ModuleReturnValue(targets, targets) + + @permittedKwargs({'sources', 'c_template', 'h_template', 'install_header', 'install_dir', + 'comments', 'identifier_prefix', 'symbol_prefix', 'eprod', 'vprod', + 'fhead', 'fprod', 'ftail', 'vhead', 'vtail', 'depends'}) + def mkenums(self, state, args, kwargs): + if len(args) != 1: + raise MesonException('Mkenums requires one positional argument.') + basename = args[0] + + if 'sources' not in kwargs: + raise MesonException('Missing keyword argument "sources".') + sources = kwargs.pop('sources') + if isinstance(sources, str): + sources = [sources] + elif not isinstance(sources, list): + raise MesonException( + 'Sources keyword argument must be a string or array.') + + cmd = [] + known_kwargs = ['comments', 'eprod', 'fhead', 'fprod', 'ftail', + 'identifier_prefix', 'symbol_prefix', 'template', + 'vhead', 'vprod', 'vtail'] + known_custom_target_kwargs = ['install_dir', 'build_always', + 'depends', 'depend_files'] + c_template = h_template = None + install_header = False + for arg, value in kwargs.items(): + if arg == 'sources': + raise AssertionError("sources should've already been handled") + elif arg == 'c_template': + c_template = value + if isinstance(c_template, mesonlib.File): + c_template = c_template.absolute_path(state.environment.source_dir, state.environment.build_dir) + if 'template' in kwargs: + raise MesonException('Mkenums does not accept both ' + 'c_template and template keyword ' + 'arguments at the same time.') + elif arg == 'h_template': + h_template = value + if isinstance(h_template, mesonlib.File): + h_template = h_template.absolute_path(state.environment.source_dir, state.environment.build_dir) + if 'template' in kwargs: + raise MesonException('Mkenums does not accept both ' + 'h_template and template keyword ' + 'arguments at the same time.') + elif arg == 'install_header': + install_header = value + elif arg in known_kwargs: + cmd += ['--' + arg.replace('_', '-'), value] + elif arg not in known_custom_target_kwargs: + raise MesonException( + f'Mkenums does not take a {arg} keyword argument.') + cmd = [state.find_program(['glib-mkenums', 'mkenums'])] + cmd + custom_kwargs = {} + for arg in known_custom_target_kwargs: + if arg in kwargs: + custom_kwargs[arg] = kwargs[arg] + + targets = [] + + if h_template is not None: + h_output = os.path.basename(os.path.splitext(h_template)[0]) + # We always set template as the first element in the source array + # so --template consumes it. + h_cmd = cmd + ['--template', '@INPUT@'] + h_sources = [h_template] + sources + custom_kwargs['install'] = install_header + if 'install_dir' not in custom_kwargs: + custom_kwargs['install_dir'] = \ + state.environment.coredata.get_option(mesonlib.OptionKey('includedir')) + h_target = self._make_mkenum_custom_target(state, h_sources, + h_output, h_cmd, + custom_kwargs) + targets.append(h_target) + + if c_template is not None: + c_output = os.path.basename(os.path.splitext(c_template)[0]) + # We always set template as the first element in the source array + # so --template consumes it. + c_cmd = cmd + ['--template', '@INPUT@'] + c_sources = [c_template] + sources + # Never install the C file. Complain on bug tracker if you need it. + custom_kwargs['install'] = False + if h_template is not None: + if 'depends' in custom_kwargs: + custom_kwargs['depends'] += [h_target] + else: + custom_kwargs['depends'] = h_target + c_target = self._make_mkenum_custom_target(state, c_sources, + c_output, c_cmd, + custom_kwargs) + targets.insert(0, c_target) + + if c_template is None and h_template is None: + generic_cmd = cmd + ['@INPUT@'] + custom_kwargs['install'] = install_header + if 'install_dir' not in custom_kwargs: + custom_kwargs['install_dir'] = \ + state.environment.coredata.get_option(mesonlib.OptionKey('includedir')) + target = self._make_mkenum_custom_target(state, sources, basename, + generic_cmd, custom_kwargs) + return ModuleReturnValue(target, [target]) + elif len(targets) == 1: + return ModuleReturnValue(targets[0], [targets[0]]) + else: + return ModuleReturnValue(targets, targets) + + @FeatureNew('gnome.mkenums_simple', '0.42.0') + def mkenums_simple(self, state, args, kwargs): + hdr_filename = args[0] + '.h' + body_filename = args[0] + '.c' + + # not really needed, just for sanity checking + forbidden_kwargs = ['c_template', 'h_template', 'eprod', 'fhead', + 'fprod', 'ftail', 'vhead', 'vtail', 'comments'] + for arg in forbidden_kwargs: + if arg in kwargs: + raise MesonException(f'mkenums_simple() does not take a {arg} keyword argument') + + # kwargs to pass as-is from mkenums_simple() to mkenums() + shared_kwargs = ['sources', 'install_header', 'install_dir', + 'identifier_prefix', 'symbol_prefix'] + mkenums_kwargs = {} + for arg in shared_kwargs: + if arg in kwargs: + mkenums_kwargs[arg] = kwargs[arg] + + # .c file generation + c_file_kwargs = copy.deepcopy(mkenums_kwargs) + if 'sources' not in kwargs: + raise MesonException('Missing keyword argument "sources".') + sources = kwargs['sources'] + if isinstance(sources, str): + sources = [sources] + elif not isinstance(sources, list): + raise MesonException( + 'Sources keyword argument must be a string or array.') + + # The `install_header` argument will be used by mkenums() when + # not using template files, so we need to forcibly unset it + # when generating the C source file, otherwise we will end up + # installing it + c_file_kwargs['install_header'] = False + + header_prefix = kwargs.get('header_prefix', '') + decl_decorator = kwargs.get('decorator', '') + func_prefix = kwargs.get('function_prefix', '') + body_prefix = kwargs.get('body_prefix', '') + + # Maybe we should write our own template files into the build dir + # instead, but that seems like much more work, nice as it would be. + fhead = '' + if body_prefix != '': + fhead += '%s\n' % body_prefix + fhead += '#include "%s"\n' % hdr_filename + for hdr in sources: + fhead += '#include "%s"\n' % os.path.basename(str(hdr)) + fhead += ''' +#define C_ENUM(v) ((gint) v) +#define C_FLAGS(v) ((guint) v) +''' + c_file_kwargs['fhead'] = fhead + + c_file_kwargs['fprod'] = ''' +/* enumerations from "@basename@" */ +''' + + c_file_kwargs['vhead'] = ''' +GType +%s@enum_name@_get_type (void) +{ + static gsize gtype_id = 0; + static const G@Type@Value values[] = {''' % func_prefix + + c_file_kwargs['vprod'] = ' { C_@TYPE@(@VALUENAME@), "@VALUENAME@", "@valuenick@" },' + + c_file_kwargs['vtail'] = ''' { 0, NULL, NULL } + }; + if (g_once_init_enter (>ype_id)) { + GType new_type = g_@type@_register_static (g_intern_static_string ("@EnumName@"), values); + g_once_init_leave (>ype_id, new_type); + } + return (GType) gtype_id; +}''' + + rv = self.mkenums(state, [body_filename], c_file_kwargs) + c_file = rv.return_value + + # .h file generation + h_file_kwargs = copy.deepcopy(mkenums_kwargs) + + h_file_kwargs['fhead'] = '''#pragma once + +#include <glib-object.h> +{} + +G_BEGIN_DECLS +'''.format(header_prefix) + + h_file_kwargs['fprod'] = ''' +/* enumerations from "@basename@" */ +''' + + h_file_kwargs['vhead'] = ''' +{} +GType {}@enum_name@_get_type (void); +#define @ENUMPREFIX@_TYPE_@ENUMSHORT@ ({}@enum_name@_get_type())'''.format(decl_decorator, func_prefix, func_prefix) + + h_file_kwargs['ftail'] = ''' +G_END_DECLS''' + + rv = self.mkenums(state, [hdr_filename], h_file_kwargs) + h_file = rv.return_value + + return ModuleReturnValue([c_file, h_file], [c_file, h_file]) + + @staticmethod + def _make_mkenum_custom_target(state, sources, output, cmd, kwargs): + custom_kwargs = { + 'input': sources, + 'output': output, + 'capture': True, + 'command': cmd + } + custom_kwargs.update(kwargs) + return build.CustomTarget(output, state.subdir, state.subproject, custom_kwargs, + # https://github.com/mesonbuild/meson/issues/973 + absolute_paths=True) + + @permittedKwargs({'sources', 'prefix', 'install_header', 'install_dir', 'stdinc', + 'nostdinc', 'internal', 'skip_source', 'valist_marshallers', + 'extra_args'}) + def genmarshal(self, state, args, kwargs): + if len(args) != 1: + raise MesonException( + 'Genmarshal requires one positional argument.') + output = args[0] + + if 'sources' not in kwargs: + raise MesonException('Missing keyword argument "sources".') + sources = kwargs.pop('sources') + if isinstance(sources, str): + sources = [sources] + elif not isinstance(sources, list): + raise MesonException( + 'Sources keyword argument must be a string or array.') + + new_genmarshal = mesonlib.version_compare(self._get_native_glib_version(state), '>= 2.53.3') + + cmd = [state.find_program('glib-genmarshal')] + known_kwargs = ['internal', 'nostdinc', 'skip_source', 'stdinc', + 'valist_marshallers', 'extra_args'] + known_custom_target_kwargs = ['build_always', 'depends', + 'depend_files', 'install_dir', + 'install_header'] + for arg, value in kwargs.items(): + if arg == 'prefix': + cmd += ['--prefix', value] + elif arg == 'extra_args': + if new_genmarshal: + cmd += mesonlib.stringlistify(value) + else: + mlog.warning('The current version of GLib does not support extra arguments \n' + 'for glib-genmarshal. You need at least GLib 2.53.3. See ', + mlog.bold('https://github.com/mesonbuild/meson/pull/2049')) + elif arg in known_kwargs and value: + cmd += ['--' + arg.replace('_', '-')] + elif arg not in known_custom_target_kwargs: + raise MesonException( + 'Genmarshal does not take a {} keyword argument.'.format( + arg)) + + install_header = kwargs.pop('install_header', False) + install_dir = kwargs.pop('install_dir', []) + + custom_kwargs = { + 'input': sources, + } + + # https://github.com/GNOME/glib/commit/0fbc98097fac4d3e647684f344e508abae109fdf + if mesonlib.version_compare(self._get_native_glib_version(state), '>= 2.51.0'): + cmd += ['--output', '@OUTPUT@'] + else: + custom_kwargs['capture'] = True + + for arg in known_custom_target_kwargs: + if arg in kwargs: + custom_kwargs[arg] = kwargs[arg] + + header_file = output + '.h' + custom_kwargs['command'] = cmd + ['--body', '@INPUT@'] + if mesonlib.version_compare(self._get_native_glib_version(state), '>= 2.53.4'): + # Silence any warnings about missing prototypes + custom_kwargs['command'] += ['--include-header', header_file] + custom_kwargs['output'] = output + '.c' + body = build.CustomTarget(output + '_c', state.subdir, state.subproject, custom_kwargs) + + custom_kwargs['install'] = install_header + custom_kwargs['install_dir'] = install_dir + if new_genmarshal: + cmd += ['--pragma-once'] + custom_kwargs['command'] = cmd + ['--header', '@INPUT@'] + custom_kwargs['output'] = header_file + header = build.CustomTarget(output + '_h', state.subdir, state.subproject, custom_kwargs) + + rv = [body, header] + return ModuleReturnValue(rv, rv) + + @staticmethod + def _vapi_args_to_command(prefix, variable, kwargs, accept_vapi=False): + arg_list = mesonlib.extract_as_list(kwargs, variable) + ret = [] + for arg in arg_list: + if not isinstance(arg, str): + types = 'strings' + ' or InternalDependencys' if accept_vapi else '' + raise MesonException(f'All {variable} must be {types}') + ret.append(prefix + arg) + return ret + + def _extract_vapi_packages(self, state, kwargs): + ''' + Packages are special because we need to: + - Get a list of packages for the .deps file + - Get a list of depends for any VapiTargets + - Get package name from VapiTargets + - Add include dirs for any VapiTargets + ''' + arg_list = kwargs.get('packages') + if not arg_list: + return [], [], [], [] + arg_list = mesonlib.listify(arg_list) + vapi_depends = [] + vapi_packages = [] + vapi_includes = [] + ret = [] + remaining_args = [] + for arg in arg_list: + if isinstance(arg, InternalDependency): + targets = [t for t in arg.sources if isinstance(t, VapiTarget)] + for target in targets: + srcdir = os.path.join(state.environment.get_source_dir(), + target.get_subdir()) + outdir = os.path.join(state.environment.get_build_dir(), + target.get_subdir()) + outfile = target.get_outputs()[0][:-5] # Strip .vapi + ret.append('--vapidir=' + outdir) + ret.append('--girdir=' + outdir) + ret.append('--pkg=' + outfile) + vapi_depends.append(target) + vapi_packages.append(outfile) + vapi_includes.append(srcdir) + else: + vapi_packages.append(arg) + remaining_args.append(arg) + + kwargs['packages'] = remaining_args + vapi_args = ret + self._vapi_args_to_command('--pkg=', 'packages', kwargs, accept_vapi=True) + return vapi_args, vapi_depends, vapi_packages, vapi_includes + + def _generate_deps(self, state, library, packages, install_dir): + outdir = state.environment.scratch_dir + fname = os.path.join(outdir, library + '.deps') + with open(fname, 'w', encoding='utf-8') as ofile: + for package in packages: + ofile.write(package + '\n') + return build.Data([mesonlib.File(True, outdir, fname)], install_dir, None, state.subproject) + + def _get_vapi_link_with(self, target): + link_with = [] + for dep in target.get_target_dependencies(): + if isinstance(dep, build.SharedLibrary): + link_with.append(dep) + elif isinstance(dep, GirTarget): + link_with += self._get_vapi_link_with(dep) + return link_with + + @permittedKwargs({'sources', 'packages', 'metadata_dirs', 'gir_dirs', + 'vapi_dirs', 'install', 'install_dir'}) + def generate_vapi(self, state, args, kwargs): + if len(args) != 1: + raise MesonException('The library name is required') + + if not isinstance(args[0], str): + raise MesonException('The first argument must be the name of the library') + created_values = [] + + library = args[0] + build_dir = os.path.join(state.environment.get_build_dir(), state.subdir) + source_dir = os.path.join(state.environment.get_source_dir(), state.subdir) + pkg_cmd, vapi_depends, vapi_packages, vapi_includes = self._extract_vapi_packages(state, kwargs) + if 'VAPIGEN' in os.environ: + cmd = [state.find_program(os.environ['VAPIGEN'])] + else: + cmd = [state.find_program('vapigen')] + cmd += ['--quiet', '--library=' + library, '--directory=' + build_dir] + cmd += self._vapi_args_to_command('--vapidir=', 'vapi_dirs', kwargs) + cmd += self._vapi_args_to_command('--metadatadir=', 'metadata_dirs', kwargs) + cmd += self._vapi_args_to_command('--girdir=', 'gir_dirs', kwargs) + cmd += pkg_cmd + cmd += ['--metadatadir=' + source_dir] + + if 'sources' not in kwargs: + raise MesonException('sources are required to generate the vapi file') + + inputs = mesonlib.extract_as_list(kwargs, 'sources') + + link_with = [] + for i in inputs: + if isinstance(i, str): + cmd.append(os.path.join(source_dir, i)) + elif isinstance(i, GirTarget): + link_with += self._get_vapi_link_with(i) + subdir = os.path.join(state.environment.get_build_dir(), + i.get_subdir()) + gir_file = os.path.join(subdir, i.get_outputs()[0]) + cmd.append(gir_file) + else: + raise MesonException('Input must be a str or GirTarget') + + vapi_output = library + '.vapi' + custom_kwargs = { + 'command': cmd, + 'input': inputs, + 'output': vapi_output, + 'depends': vapi_depends, + } + install_dir = kwargs.get('install_dir', + os.path.join(state.environment.coredata.get_option(mesonlib.OptionKey('datadir')), + 'vala', 'vapi')) + if kwargs.get('install'): + custom_kwargs['install'] = kwargs['install'] + custom_kwargs['install_dir'] = install_dir + + # We shouldn't need this locally but we install it + deps_target = self._generate_deps(state, library, vapi_packages, install_dir) + created_values.append(deps_target) + vapi_target = VapiTarget(vapi_output, state.subdir, state.subproject, custom_kwargs) + + # So to try our best to get this to just work we need: + # - link with with the correct library + # - include the vapi and dependent vapi files in sources + # - add relevant directories to include dirs + incs = [build.IncludeDirs(state.subdir, ['.'] + vapi_includes, False)] + sources = [vapi_target] + vapi_depends + rv = InternalDependency(None, incs, [], [], link_with, [], sources, [], {}) + created_values.append(rv) + return ModuleReturnValue(rv, created_values) + +def initialize(*args, **kwargs): + mod = GnomeModule(*args, **kwargs) + mod.interpreter.append_holder_map(GResourceTarget, interpreter.CustomTargetHolder) + mod.interpreter.append_holder_map(GResourceHeaderTarget, interpreter.CustomTargetHolder) + mod.interpreter.append_holder_map(GirTarget, interpreter.CustomTargetHolder) + mod.interpreter.append_holder_map(TypelibTarget, interpreter.CustomTargetHolder) + mod.interpreter.append_holder_map(VapiTarget, interpreter.CustomTargetHolder) + return mod diff --git a/meson/mesonbuild/modules/hotdoc.py b/meson/mesonbuild/modules/hotdoc.py new file mode 100644 index 000000000..4dccd067a --- /dev/null +++ b/meson/mesonbuild/modules/hotdoc.py @@ -0,0 +1,432 @@ +# Copyright 2018 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +'''This module provides helper functions for generating documentation using hotdoc''' + +import os +from collections import OrderedDict + +from mesonbuild import mesonlib +from mesonbuild import mlog, build +from mesonbuild.coredata import MesonException +from . import ModuleReturnValue +from . import ExtensionModule +from ..dependencies import Dependency, InternalDependency +from ..interpreterbase import FeatureNew, InvalidArguments, noPosargs, noKwargs +from ..interpreter import CustomTargetHolder +from ..programs import ExternalProgram + + +def ensure_list(value): + if not isinstance(value, list): + return [value] + return value + + +MIN_HOTDOC_VERSION = '0.8.100' + + +class HotdocTargetBuilder: + def __init__(self, name, state, hotdoc, interpreter, kwargs): + self.hotdoc = hotdoc + self.build_by_default = kwargs.pop('build_by_default', False) + self.kwargs = kwargs + self.name = name + self.state = state + self.interpreter = interpreter + self.include_paths = OrderedDict() + + self.builddir = state.environment.get_build_dir() + self.sourcedir = state.environment.get_source_dir() + self.subdir = state.subdir + self.build_command = state.environment.get_build_command() + + self.cmd = ['conf', '--project-name', name, "--disable-incremental-build", + '--output', os.path.join(self.builddir, self.subdir, self.name + '-doc')] + + self._extra_extension_paths = set() + self.extra_assets = set() + self._dependencies = [] + self._subprojects = [] + + def process_known_arg(self, option, types, argname=None, + value_processor=None, mandatory=False, + force_list=False): + if not argname: + argname = option.strip("-").replace("-", "_") + + value, _ = self.get_value( + types, argname, None, value_processor, mandatory, force_list) + + self.set_arg_value(option, value) + + def set_arg_value(self, option, value): + if value is None: + return + + if isinstance(value, bool): + if value: + self.cmd.append(option) + elif isinstance(value, list): + # Do not do anything on empty lists + if value: + # https://bugs.python.org/issue9334 (from 2010 :( ) + # The syntax with nargs=+ is inherently ambiguous + # A workaround for this case is to simply prefix with a space + # every value starting with a dash + escaped_value = [] + for e in value: + if isinstance(e, str) and e.startswith('-'): + escaped_value += [' %s' % e] + else: + escaped_value += [e] + if option: + self.cmd.extend([option] + escaped_value) + else: + self.cmd.extend(escaped_value) + else: + # argparse gets confused if value(s) start with a dash. + # When an option expects a single value, the unambiguous way + # to specify it is with = + if isinstance(value, str): + self.cmd.extend([f'{option}={value}']) + else: + self.cmd.extend([option, value]) + + def check_extra_arg_type(self, arg, value): + if isinstance(value, list): + for v in value: + self.check_extra_arg_type(arg, v) + return + + valid_types = (str, bool, mesonlib.File, build.IncludeDirs, build.CustomTarget, build.BuildTarget) + if not isinstance(value, valid_types): + raise InvalidArguments('Argument "{}={}" should be of type: {}.'.format( + arg, value, [t.__name__ for t in valid_types])) + + def process_extra_args(self): + for arg, value in self.kwargs.items(): + option = "--" + arg.replace("_", "-") + self.check_extra_arg_type(arg, value) + self.set_arg_value(option, value) + + def get_value(self, types, argname, default=None, value_processor=None, + mandatory=False, force_list=False): + if not isinstance(types, list): + types = [types] + try: + uvalue = value = self.kwargs.pop(argname) + if value_processor: + value = value_processor(value) + + for t in types: + if isinstance(value, t): + if force_list and not isinstance(value, list): + return [value], uvalue + return value, uvalue + raise MesonException("%s field value %s is not valid," + " valid types are %s" % (argname, value, + types)) + except KeyError: + if mandatory: + raise MesonException("%s mandatory field not found" % argname) + + if default is not None: + return default, default + + return None, None + + def setup_extension_paths(self, paths): + if not isinstance(paths, list): + paths = [paths] + + for path in paths: + self.add_extension_paths([path]) + + return [] + + def add_extension_paths(self, paths): + for path in paths: + if path in self._extra_extension_paths: + continue + + self._extra_extension_paths.add(path) + self.cmd.extend(["--extra-extension-path", path]) + + def process_extra_extension_paths(self): + self.get_value([list, str], 'extra_extensions_paths', + default="", value_processor=self.setup_extension_paths) + + def replace_dirs_in_string(self, string): + return string.replace("@SOURCE_ROOT@", self.sourcedir).replace("@BUILD_ROOT@", self.builddir) + + def process_gi_c_source_roots(self): + if self.hotdoc.run_hotdoc(['--has-extension=gi-extension']) != 0: + return + + value, _ = self.get_value([list, str], 'gi_c_source_roots', default=[], force_list=True) + value.extend([ + os.path.join(self.state.environment.get_source_dir(), + self.interpreter.subproject_dir, self.state.subproject), + os.path.join(self.state.environment.get_build_dir(), self.interpreter.subproject_dir, self.state.subproject) + ]) + + self.cmd += ['--gi-c-source-roots'] + value + + def process_dependencies(self, deps): + cflags = set() + for dep in mesonlib.listify(ensure_list(deps)): + if isinstance(dep, InternalDependency): + inc_args = self.state.get_include_args(dep.include_directories) + cflags.update([self.replace_dirs_in_string(x) + for x in inc_args]) + cflags.update(self.process_dependencies(dep.libraries)) + cflags.update(self.process_dependencies(dep.sources)) + cflags.update(self.process_dependencies(dep.ext_deps)) + elif isinstance(dep, Dependency): + cflags.update(dep.get_compile_args()) + elif isinstance(dep, (build.StaticLibrary, build.SharedLibrary)): + self._dependencies.append(dep) + for incd in dep.get_include_dirs(): + cflags.update(incd.get_incdirs()) + elif isinstance(dep, HotdocTarget): + # Recurse in hotdoc target dependencies + self.process_dependencies(dep.get_target_dependencies()) + self._subprojects.extend(dep.subprojects) + self.process_dependencies(dep.subprojects) + self.add_include_path(os.path.join(self.builddir, dep.hotdoc_conf.subdir)) + self.cmd += ['--extra-assets=' + p for p in dep.extra_assets] + self.add_extension_paths(dep.extra_extension_paths) + elif isinstance(dep, build.CustomTarget) or isinstance(dep, build.BuildTarget): + self._dependencies.append(dep) + + return [f.strip('-I') for f in cflags] + + def process_extra_assets(self): + self._extra_assets, _ = self.get_value("--extra-assets", (str, list), default=[], + force_list=True) + for assets_path in self._extra_assets: + self.cmd.extend(["--extra-assets", assets_path]) + + def process_subprojects(self): + _, value = self.get_value([ + list, HotdocTarget], argname="subprojects", + force_list=True, value_processor=self.process_dependencies) + + if value is not None: + self._subprojects.extend(value) + + def flatten_config_command(self): + cmd = [] + for arg in mesonlib.listify(self.cmd, flatten=True): + if isinstance(arg, mesonlib.File): + arg = arg.absolute_path(self.state.environment.get_source_dir(), + self.state.environment.get_build_dir()) + elif isinstance(arg, build.IncludeDirs): + for inc_dir in arg.get_incdirs(): + cmd.append(os.path.join(self.sourcedir, arg.get_curdir(), inc_dir)) + cmd.append(os.path.join(self.builddir, arg.get_curdir(), inc_dir)) + + continue + elif isinstance(arg, build.CustomTarget) or isinstance(arg, build.BuildTarget): + self._dependencies.append(arg) + arg = self.interpreter.backend.get_target_filename_abs(arg) + + cmd.append(arg) + + return cmd + + def generate_hotdoc_config(self): + cwd = os.path.abspath(os.curdir) + ncwd = os.path.join(self.sourcedir, self.subdir) + mlog.log('Generating Hotdoc configuration for: ', mlog.bold(self.name)) + os.chdir(ncwd) + self.hotdoc.run_hotdoc(self.flatten_config_command()) + os.chdir(cwd) + + def ensure_file(self, value): + if isinstance(value, list): + res = [] + for val in value: + res.append(self.ensure_file(val)) + return res + + if not isinstance(value, mesonlib.File): + return mesonlib.File.from_source_file(self.sourcedir, self.subdir, value) + + return value + + def ensure_dir(self, value): + if os.path.isabs(value): + _dir = value + else: + _dir = os.path.join(self.sourcedir, self.subdir, value) + + if not os.path.isdir(_dir): + raise InvalidArguments('"%s" is not a directory.' % _dir) + + return os.path.relpath(_dir, os.path.join(self.builddir, self.subdir)) + + def check_forbidden_args(self): + for arg in ['conf_file']: + if arg in self.kwargs: + raise InvalidArguments('Argument "%s" is forbidden.' % arg) + + def add_include_path(self, path): + self.include_paths[path] = path + + def make_targets(self): + self.check_forbidden_args() + file_types = (str, mesonlib.File) + self.process_known_arg("--index", file_types, mandatory=True, value_processor=self.ensure_file) + self.process_known_arg("--project-version", str, mandatory=True) + self.process_known_arg("--sitemap", file_types, mandatory=True, value_processor=self.ensure_file) + self.process_known_arg("--html-extra-theme", str, value_processor=self.ensure_dir) + self.process_known_arg(None, list, "include_paths", force_list=True, + value_processor=lambda x: [self.add_include_path(self.ensure_dir(v)) for v in ensure_list(x)]) + self.process_known_arg('--c-include-directories', + [Dependency, build.StaticLibrary, build.SharedLibrary, list], argname="dependencies", + force_list=True, value_processor=self.process_dependencies) + self.process_gi_c_source_roots() + self.process_extra_assets() + self.process_extra_extension_paths() + self.process_subprojects() + + install, install = self.get_value(bool, "install", mandatory=False) + self.process_extra_args() + + fullname = self.name + '-doc' + hotdoc_config_name = fullname + '.json' + hotdoc_config_path = os.path.join( + self.builddir, self.subdir, hotdoc_config_name) + with open(hotdoc_config_path, 'w', encoding='utf-8') as f: + f.write('{}') + + self.cmd += ['--conf-file', hotdoc_config_path] + self.add_include_path(os.path.join(self.builddir, self.subdir)) + self.add_include_path(os.path.join(self.sourcedir, self.subdir)) + + depfile = os.path.join(self.builddir, self.subdir, self.name + '.deps') + self.cmd += ['--deps-file-dest', depfile] + + for path in self.include_paths.keys(): + self.cmd.extend(['--include-path', path]) + + if self.state.environment.coredata.get_option(mesonlib.OptionKey('werror', subproject=self.state.subproject)): + self.cmd.append('--fatal-warning') + self.generate_hotdoc_config() + + target_cmd = self.build_command + ["--internal", "hotdoc"] + \ + self.hotdoc.get_command() + ['run', '--conf-file', hotdoc_config_name] + \ + ['--builddir', os.path.join(self.builddir, self.subdir)] + + target = HotdocTarget(fullname, + subdir=self.subdir, + subproject=self.state.subproject, + hotdoc_conf=mesonlib.File.from_built_file( + self.subdir, hotdoc_config_name), + extra_extension_paths=self._extra_extension_paths, + extra_assets=self._extra_assets, + subprojects=self._subprojects, + command=target_cmd, + depends=self._dependencies, + output=fullname, + depfile=os.path.basename(depfile), + build_by_default=self.build_by_default) + + install_script = None + if install is True: + install_script = self.state.backend.get_executable_serialisation(self.build_command + [ + "--internal", "hotdoc", + "--install", os.path.join(fullname, 'html'), + '--name', self.name, + '--builddir', os.path.join(self.builddir, self.subdir)] + + self.hotdoc.get_command() + + ['run', '--conf-file', hotdoc_config_name]) + + return (target, install_script) + + +class HotdocTargetHolder(CustomTargetHolder): + def __init__(self, target, interp): + super().__init__(target, interp) + self.methods.update({'config_path': self.config_path_method}) + + @noPosargs + @noKwargs + def config_path_method(self, *args, **kwargs): + conf = self.held_object.hotdoc_conf.absolute_path(self.interpreter.environment.source_dir, + self.interpreter.environment.build_dir) + return conf + + +class HotdocTarget(build.CustomTarget): + def __init__(self, name, subdir, subproject, hotdoc_conf, extra_extension_paths, extra_assets, + subprojects, **kwargs): + super().__init__(name, subdir, subproject, kwargs, absolute_paths=True) + self.hotdoc_conf = hotdoc_conf + self.extra_extension_paths = extra_extension_paths + self.extra_assets = extra_assets + self.subprojects = subprojects + + def __getstate__(self): + # Make sure we do not try to pickle subprojects + res = self.__dict__.copy() + res['subprojects'] = [] + + return res + + +class HotDocModule(ExtensionModule): + @FeatureNew('Hotdoc Module', '0.48.0') + def __init__(self, interpreter): + super().__init__(interpreter) + self.hotdoc = ExternalProgram('hotdoc') + if not self.hotdoc.found(): + raise MesonException('hotdoc executable not found') + + try: + from hotdoc.run_hotdoc import run # noqa: F401 + self.hotdoc.run_hotdoc = run + except Exception as e: + raise MesonException('hotdoc {} required but not found. ({})'.format( + MIN_HOTDOC_VERSION, e)) + self.methods.update({ + 'has_extensions': self.has_extensions, + 'generate_doc': self.generate_doc, + }) + + @noKwargs + def has_extensions(self, state, args, kwargs): + return self.hotdoc.run_hotdoc(['--has-extension=%s' % extension for extension in args]) == 0 + + def generate_doc(self, state, args, kwargs): + if len(args) != 1: + raise MesonException('One positional argument is' + ' required for the project name.') + + project_name = args[0] + builder = HotdocTargetBuilder(project_name, state, self.hotdoc, self.interpreter, kwargs) + target, install_script = builder.make_targets() + targets = [target] + if install_script: + targets.append(install_script) + + return ModuleReturnValue(targets[0], targets) + + +def initialize(interpreter): + mod = HotDocModule(interpreter) + mod.interpreter.append_holder_map(HotdocTarget, HotdocTargetHolder) + return mod diff --git a/meson/mesonbuild/modules/i18n.py b/meson/mesonbuild/modules/i18n.py new file mode 100644 index 000000000..a64838b57 --- /dev/null +++ b/meson/mesonbuild/modules/i18n.py @@ -0,0 +1,197 @@ +# Copyright 2016 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import shutil + +from os import path +from .. import coredata, mesonlib, build, mlog +from ..mesonlib import MesonException +from . import ModuleReturnValue +from . import ExtensionModule +from ..interpreterbase import permittedKwargs, FeatureNew, FeatureNewKwargs + +PRESET_ARGS = { + 'glib': [ + '--from-code=UTF-8', + '--add-comments', + + # https://developer.gnome.org/glib/stable/glib-I18N.html + '--keyword=_', + '--keyword=N_', + '--keyword=C_:1c,2', + '--keyword=NC_:1c,2', + '--keyword=g_dcgettext:2', + '--keyword=g_dngettext:2,3', + '--keyword=g_dpgettext2:2c,3', + + '--flag=N_:1:pass-c-format', + '--flag=C_:2:pass-c-format', + '--flag=NC_:2:pass-c-format', + '--flag=g_dngettext:2:pass-c-format', + '--flag=g_strdup_printf:1:c-format', + '--flag=g_string_printf:2:c-format', + '--flag=g_string_append_printf:2:c-format', + '--flag=g_error_new:3:c-format', + '--flag=g_set_error:4:c-format', + '--flag=g_markup_printf_escaped:1:c-format', + '--flag=g_log:3:c-format', + '--flag=g_print:1:c-format', + '--flag=g_printerr:1:c-format', + '--flag=g_printf:1:c-format', + '--flag=g_fprintf:2:c-format', + '--flag=g_sprintf:2:c-format', + '--flag=g_snprintf:3:c-format', + ] +} + + +class I18nModule(ExtensionModule): + def __init__(self, interpreter): + super().__init__(interpreter) + self.methods.update({ + 'merge_file': self.merge_file, + 'gettext': self.gettext, + }) + + @staticmethod + def nogettext_warning(): + mlog.warning('Gettext not found, all translation targets will be ignored.', once=True) + + @staticmethod + def _get_data_dirs(state, dirs): + """Returns source directories of relative paths""" + src_dir = path.join(state.environment.get_source_dir(), state.subdir) + return [path.join(src_dir, d) for d in dirs] + + @FeatureNew('i18n.merge_file', '0.37.0') + @FeatureNewKwargs('i18n.merge_file', '0.51.0', ['args']) + @permittedKwargs(build.CustomTarget.known_kwargs | {'data_dirs', 'po_dir', 'type', 'args'}) + def merge_file(self, state, args, kwargs): + if not shutil.which('xgettext'): + self.nogettext_warning() + return + podir = kwargs.pop('po_dir', None) + if not podir: + raise MesonException('i18n: po_dir is a required kwarg') + podir = path.join(state.build_to_src, state.subdir, podir) + + file_type = kwargs.pop('type', 'xml') + VALID_TYPES = ('xml', 'desktop') + if file_type not in VALID_TYPES: + raise MesonException(f'i18n: "{file_type}" is not a valid type {VALID_TYPES}') + + datadirs = self._get_data_dirs(state, mesonlib.stringlistify(kwargs.pop('data_dirs', []))) + datadirs = '--datadirs=' + ':'.join(datadirs) if datadirs else None + + command = state.environment.get_build_command() + [ + '--internal', 'msgfmthelper', + '@INPUT@', '@OUTPUT@', file_type, podir + ] + if datadirs: + command.append(datadirs) + + if 'args' in kwargs: + command.append('--') + command.append(mesonlib.stringlistify(kwargs.pop('args', []))) + + kwargs['command'] = command + + # We only use this input file to create a name of the custom target. + # Thus we can ignore the other entries. + inputfile = mesonlib.extract_as_list(kwargs, 'input')[0] + if isinstance(inputfile, str): + inputfile = mesonlib.File.from_source_file(state.environment.source_dir, + state.subdir, inputfile) + if isinstance(inputfile, mesonlib.File): + # output could be '@BASENAME@' in which case we need to do substitutions + # to get a unique target name. + output = kwargs['output'] + ifile_abs = inputfile.absolute_path(state.environment.source_dir, + state.environment.build_dir) + values = mesonlib.get_filenames_templates_dict([ifile_abs], None) + outputs = mesonlib.substitute_values([output], values) + output = outputs[0] + ct = build.CustomTarget(output + '_' + state.subdir.replace('/', '@').replace('\\', '@') + '_merge', state.subdir, state.subproject, kwargs) + else: + ct = build.CustomTarget(kwargs['output'] + '_merge', state.subdir, state.subproject, kwargs) + + return ModuleReturnValue(ct, [ct]) + + @FeatureNewKwargs('i18n.gettext', '0.37.0', ['preset']) + @FeatureNewKwargs('i18n.gettext', '0.50.0', ['install_dir']) + @permittedKwargs({'po_dir', 'data_dirs', 'type', 'languages', 'args', 'preset', 'install', 'install_dir'}) + def gettext(self, state, args, kwargs): + if len(args) != 1: + raise coredata.MesonException('Gettext requires one positional argument (package name).') + if not shutil.which('xgettext'): + self.nogettext_warning() + return + packagename = args[0] + languages = mesonlib.stringlistify(kwargs.get('languages', [])) + datadirs = self._get_data_dirs(state, mesonlib.stringlistify(kwargs.get('data_dirs', []))) + extra_args = mesonlib.stringlistify(kwargs.get('args', [])) + + preset = kwargs.pop('preset', None) + if preset: + preset_args = PRESET_ARGS.get(preset) + if not preset_args: + raise coredata.MesonException('i18n: Preset "{}" is not one of the valid options: {}'.format( + preset, list(PRESET_ARGS.keys()))) + extra_args = set(preset_args + extra_args) + + pkg_arg = '--pkgname=' + packagename + lang_arg = '--langs=' + '@@'.join(languages) if languages else None + datadirs = '--datadirs=' + ':'.join(datadirs) if datadirs else None + extra_args = '--extra-args=' + '@@'.join(extra_args) if extra_args else None + + potargs = state.environment.get_build_command() + ['--internal', 'gettext', 'pot', pkg_arg] + if datadirs: + potargs.append(datadirs) + if extra_args: + potargs.append(extra_args) + pottarget = build.RunTarget(packagename + '-pot', potargs, [], state.subdir, state.subproject) + + gmoargs = state.environment.get_build_command() + ['--internal', 'gettext', 'gen_gmo'] + if lang_arg: + gmoargs.append(lang_arg) + gmotarget = build.RunTarget(packagename + '-gmo', gmoargs, [], state.subdir, state.subproject) + + updatepoargs = state.environment.get_build_command() + ['--internal', 'gettext', 'update_po', pkg_arg] + if lang_arg: + updatepoargs.append(lang_arg) + if datadirs: + updatepoargs.append(datadirs) + if extra_args: + updatepoargs.append(extra_args) + updatepotarget = build.RunTarget(packagename + '-update-po', updatepoargs, [], state.subdir, state.subproject) + + targets = [pottarget, gmotarget, updatepotarget] + + install = kwargs.get('install', True) + if install: + install_dir = kwargs.get('install_dir', state.environment.coredata.get_option(mesonlib.OptionKey('localedir'))) + script = state.environment.get_build_command() + args = ['--internal', 'gettext', 'install', + '--subdir=' + state.subdir, + '--localedir=' + install_dir, + pkg_arg] + if lang_arg: + args.append(lang_arg) + iscript = state.backend.get_executable_serialisation(script + args) + targets.append(iscript) + + return ModuleReturnValue(None, targets) + +def initialize(*args, **kwargs): + return I18nModule(*args, **kwargs) diff --git a/meson/mesonbuild/modules/keyval.py b/meson/mesonbuild/modules/keyval.py new file mode 100644 index 000000000..b2d54db01 --- /dev/null +++ b/meson/mesonbuild/modules/keyval.py @@ -0,0 +1,72 @@ +# Copyright 2017, 2019 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from . import ExtensionModule + +from .. import mesonlib +from ..mesonlib import typeslistify +from ..interpreterbase import FeatureNew, noKwargs, InvalidCode + +import os + +class KeyvalModule(ExtensionModule): + + @FeatureNew('Keyval Module', '0.55.0') + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.methods.update({ + 'load': self.load, + }) + + def _load_file(self, path_to_config): + result = dict() + try: + with open(path_to_config, encoding='utf-8') as f: + for line in f: + if '#' in line: + comment_idx = line.index('#') + line = line[:comment_idx] + line = line.strip() + try: + name, val = line.split('=', 1) + except ValueError: + continue + result[name.strip()] = val.strip() + except OSError as e: + raise mesonlib.MesonException(f'Failed to load {path_to_config}: {e}') + + return result + + @noKwargs + def load(self, state, args, kwargs): + sources = typeslistify(args, (str, mesonlib.File)) + if len(sources) != 1: + raise InvalidCode('load takes only one file input.') + + s = sources[0] + is_built = False + if isinstance(s, mesonlib.File): + is_built = is_built or s.is_built + s = s.absolute_path(self.interpreter.environment.source_dir, self.interpreter.environment.build_dir) + else: + s = os.path.join(self.interpreter.environment.source_dir, s) + + if s not in self.interpreter.build_def_files and not is_built: + self.interpreter.build_def_files.append(s) + + return self._load_file(s) + + +def initialize(*args, **kwargs): + return KeyvalModule(*args, **kwargs) diff --git a/meson/mesonbuild/modules/modtest.py b/meson/mesonbuild/modules/modtest.py new file mode 100644 index 000000000..dd2e2ff8d --- /dev/null +++ b/meson/mesonbuild/modules/modtest.py @@ -0,0 +1,30 @@ +# Copyright 2015 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from . import ExtensionModule +from ..interpreterbase import noKwargs + +class TestModule(ExtensionModule): + def __init__(self, interpreter): + super().__init__(interpreter) + self.methods.update({ + 'print_hello': self.print_hello, + }) + + @noKwargs + def print_hello(self, state, args, kwargs): + print('Hello from a Meson module') + +def initialize(*args, **kwargs): + return TestModule(*args, **kwargs) diff --git a/meson/mesonbuild/modules/pkgconfig.py b/meson/mesonbuild/modules/pkgconfig.py new file mode 100644 index 000000000..c6eaedca5 --- /dev/null +++ b/meson/mesonbuild/modules/pkgconfig.py @@ -0,0 +1,591 @@ +# Copyright 2015 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +from pathlib import PurePath + +from .. import build +from .. import dependencies +from ..dependencies import ThreadDependency +from .. import mesonlib +from .. import mlog +from . import ModuleReturnValue +from . import ExtensionModule +from ..interpreterbase import permittedKwargs, FeatureNew, FeatureNewKwargs + +already_warned_objs = set() + +class DependenciesHelper: + def __init__(self, state, name): + self.state = state + self.name = name + self.pub_libs = [] + self.pub_reqs = [] + self.priv_libs = [] + self.priv_reqs = [] + self.cflags = [] + self.version_reqs = {} + self.link_whole_targets = [] + + def add_pub_libs(self, libs): + libs, reqs, cflags = self._process_libs(libs, True) + self.pub_libs = libs + self.pub_libs # prepend to preserve dependencies + self.pub_reqs += reqs + self.cflags += cflags + + def add_priv_libs(self, libs): + libs, reqs, _ = self._process_libs(libs, False) + self.priv_libs = libs + self.priv_libs + self.priv_reqs += reqs + + def add_pub_reqs(self, reqs): + self.pub_reqs += self._process_reqs(reqs) + + def add_priv_reqs(self, reqs): + self.priv_reqs += self._process_reqs(reqs) + + def _check_generated_pc_deprecation(self, obj): + if not hasattr(obj, 'generated_pc_warn'): + return + name = obj.generated_pc_warn[0] + if (name, obj.name) in already_warned_objs: + return + mlog.deprecation('Library', mlog.bold(obj.name), 'was passed to the ' + '"libraries" keyword argument of a previous call ' + 'to generate() method instead of first positional ' + 'argument.', 'Adding', mlog.bold(obj.generated_pc), + 'to "Requires" field, but this is a deprecated ' + 'behaviour that will change in a future version ' + 'of Meson. Please report the issue if this ' + 'warning cannot be avoided in your case.', + location=obj.generated_pc_warn[1]) + already_warned_objs.add((name, obj.name)) + + def _process_reqs(self, reqs): + '''Returns string names of requirements''' + processed_reqs = [] + for obj in mesonlib.listify(reqs): + if not isinstance(obj, str): + FeatureNew.single_use('pkgconfig.generate requirement from non-string object', '0.46.0', self.state.subproject) + if hasattr(obj, 'generated_pc'): + self._check_generated_pc_deprecation(obj) + processed_reqs.append(obj.generated_pc) + elif hasattr(obj, 'pcdep'): + pcdeps = mesonlib.listify(obj.pcdep) + for d in pcdeps: + processed_reqs.append(d.name) + self.add_version_reqs(d.name, obj.version_reqs) + elif isinstance(obj, dependencies.PkgConfigDependency): + if obj.found(): + processed_reqs.append(obj.name) + self.add_version_reqs(obj.name, obj.version_reqs) + elif isinstance(obj, str): + name, version_req = self.split_version_req(obj) + processed_reqs.append(name) + self.add_version_reqs(name, version_req) + elif isinstance(obj, dependencies.Dependency) and not obj.found(): + pass + elif isinstance(obj, ThreadDependency): + pass + else: + raise mesonlib.MesonException('requires argument not a string, ' + 'library with pkgconfig-generated file ' + 'or pkgconfig-dependency object, ' + 'got {!r}'.format(obj)) + return processed_reqs + + def add_cflags(self, cflags): + self.cflags += mesonlib.stringlistify(cflags) + + def _process_libs(self, libs, public: bool): + libs = mesonlib.listify(libs) + processed_libs = [] + processed_reqs = [] + processed_cflags = [] + for obj in libs: + if hasattr(obj, 'pcdep'): + pcdeps = mesonlib.listify(obj.pcdep) + for d in pcdeps: + processed_reqs.append(d.name) + self.add_version_reqs(d.name, obj.version_reqs) + elif hasattr(obj, 'generated_pc'): + self._check_generated_pc_deprecation(obj) + processed_reqs.append(obj.generated_pc) + elif isinstance(obj, dependencies.PkgConfigDependency): + if obj.found(): + processed_reqs.append(obj.name) + self.add_version_reqs(obj.name, obj.version_reqs) + elif isinstance(obj, dependencies.InternalDependency): + if obj.found(): + processed_libs += obj.get_link_args() + processed_cflags += obj.get_compile_args() + self._add_lib_dependencies(obj.libraries, obj.whole_libraries, obj.ext_deps, public, private_external_deps=True) + elif isinstance(obj, dependencies.Dependency): + if obj.found(): + processed_libs += obj.get_link_args() + processed_cflags += obj.get_compile_args() + elif isinstance(obj, build.SharedLibrary) and obj.shared_library_only: + # Do not pull dependencies for shared libraries because they are + # only required for static linking. Adding private requires has + # the side effect of exposing their cflags, which is the + # intended behaviour of pkg-config but force Debian to add more + # than needed build deps. + # See https://bugs.freedesktop.org/show_bug.cgi?id=105572 + processed_libs.append(obj) + elif isinstance(obj, (build.SharedLibrary, build.StaticLibrary)): + processed_libs.append(obj) + # If there is a static library in `Libs:` all its deps must be + # public too, otherwise the generated pc file will never be + # usable without --static. + self._add_lib_dependencies(obj.link_targets, + obj.link_whole_targets, + obj.external_deps, + isinstance(obj, build.StaticLibrary) and public) + elif isinstance(obj, (build.CustomTarget, build.CustomTargetIndex)): + if not obj.is_linkable_target(): + raise mesonlib.MesonException('library argument contains a not linkable custom_target.') + FeatureNew.single_use('custom_target in pkgconfig.generate libraries', '0.58.0', self.state.subproject) + processed_libs.append(obj) + elif isinstance(obj, str): + processed_libs.append(obj) + else: + raise mesonlib.MesonException(f'library argument of type {type(obj).__name__} not a string, library or dependency object.') + + return processed_libs, processed_reqs, processed_cflags + + def _add_lib_dependencies(self, link_targets, link_whole_targets, external_deps, public, private_external_deps=False): + add_libs = self.add_pub_libs if public else self.add_priv_libs + # Recursively add all linked libraries + for t in link_targets: + # Internal libraries (uninstalled static library) will be promoted + # to link_whole, treat them as such here. + if t.is_internal(): + self._add_link_whole(t, public) + else: + add_libs([t]) + for t in link_whole_targets: + self._add_link_whole(t, public) + # And finally its external dependencies + if private_external_deps: + self.add_priv_libs(external_deps) + else: + add_libs(external_deps) + + def _add_link_whole(self, t, public): + # Don't include static libraries that we link_whole. But we still need to + # include their dependencies: a static library we link_whole + # could itself link to a shared library or an installed static library. + # Keep track of link_whole_targets so we can remove them from our + # lists in case a library is link_with and link_whole at the same time. + # See remove_dups() below. + self.link_whole_targets.append(t) + self._add_lib_dependencies(t.link_targets, t.link_whole_targets, t.external_deps, public) + + def add_version_reqs(self, name, version_reqs): + if version_reqs: + if name not in self.version_reqs: + self.version_reqs[name] = set() + # Note that pkg-config is picky about whitespace. + # 'foo > 1.2' is ok but 'foo>1.2' is not. + # foo, bar' is ok, but 'foo,bar' is not. + new_vreqs = [s for s in mesonlib.stringlistify(version_reqs)] + self.version_reqs[name].update(new_vreqs) + + def split_version_req(self, s): + for op in ['>=', '<=', '!=', '==', '=', '>', '<']: + pos = s.find(op) + if pos > 0: + return s[0:pos].strip(), s[pos:].strip() + return s, None + + def format_vreq(self, vreq): + # vreq are '>=1.0' and pkgconfig wants '>= 1.0' + for op in ['>=', '<=', '!=', '==', '=', '>', '<']: + if vreq.startswith(op): + return op + ' ' + vreq[len(op):] + return vreq + + def format_reqs(self, reqs): + result = [] + for name in reqs: + vreqs = self.version_reqs.get(name, None) + if vreqs: + result += [name + ' ' + self.format_vreq(vreq) for vreq in vreqs] + else: + result += [name] + return ', '.join(result) + + def remove_dups(self): + # Set of ids that have already been handled and should not be added any more + exclude = set() + + # We can't just check if 'x' is excluded because we could have copies of + # the same SharedLibrary object for example. + def _ids(x): + if hasattr(x, 'generated_pc'): + yield x.generated_pc + if isinstance(x, build.Target): + yield x.get_id() + yield x + + # Exclude 'x' in all its forms and return if it was already excluded + def _add_exclude(x): + was_excluded = False + for i in _ids(x): + if i in exclude: + was_excluded = True + else: + exclude.add(i) + return was_excluded + + # link_whole targets are already part of other targets, exclude them all. + for t in self.link_whole_targets: + _add_exclude(t) + + def _fn(xs, libs=False): + # Remove duplicates whilst preserving original order + result = [] + for x in xs: + # Don't de-dup unknown strings to avoid messing up arguments like: + # ['-framework', 'CoreAudio', '-framework', 'CoreMedia'] + known_flags = ['-pthread'] + cannot_dedup = libs and isinstance(x, str) and \ + not x.startswith(('-l', '-L')) and \ + x not in known_flags + if not cannot_dedup and _add_exclude(x): + continue + result.append(x) + return result + + # Handle lists in priority order: public items can be excluded from + # private and Requires can excluded from Libs. + self.pub_reqs = _fn(self.pub_reqs) + self.pub_libs = _fn(self.pub_libs, True) + self.priv_reqs = _fn(self.priv_reqs) + self.priv_libs = _fn(self.priv_libs, True) + # Reset exclude list just in case some values can be both cflags and libs. + exclude = set() + self.cflags = _fn(self.cflags) + +class PkgConfigModule(ExtensionModule): + def __init__(self, interpreter): + super().__init__(interpreter) + self.methods.update({ + 'generate': self.generate, + }) + + def _get_lname(self, l, msg, pcfile, is_custom_target): + if is_custom_target: + basename = os.path.basename(l.get_filename()) + name = os.path.splitext(basename)[0] + if name.startswith('lib'): + name = name[3:] + return name + # Nothing special + if not l.name_prefix_set: + return l.name + # Sometimes people want the library to start with 'lib' everywhere, + # which is achieved by setting name_prefix to '' and the target name to + # 'libfoo'. In that case, try to get the pkg-config '-lfoo' arg correct. + if l.prefix == '' and l.name.startswith('lib'): + return l.name[3:] + # If the library is imported via an import library which is always + # named after the target name, '-lfoo' is correct. + if isinstance(l, build.SharedLibrary) and l.import_filename: + return l.name + # In other cases, we can't guarantee that the compiler will be able to + # find the library via '-lfoo', so tell the user that. + mlog.warning(msg.format(l.name, 'name_prefix', l.name, pcfile)) + return l.name + + def _escape(self, value): + ''' + We cannot use quote_arg because it quotes with ' and " which does not + work with pkg-config and pkgconf at all. + ''' + # We should always write out paths with / because pkg-config requires + # spaces to be quoted with \ and that messes up on Windows: + # https://bugs.freedesktop.org/show_bug.cgi?id=103203 + if isinstance(value, PurePath): + value = value.as_posix() + return value.replace(' ', r'\ ') + + def _make_relative(self, prefix, subdir): + prefix = PurePath(prefix) + subdir = PurePath(subdir) + try: + return subdir.relative_to(prefix).as_posix() + except ValueError: + return subdir.as_posix() + + def _generate_pkgconfig_file(self, state, deps, subdirs, name, description, + url, version, pcfile, conflicts, variables, + unescaped_variables, uninstalled=False, dataonly=False): + coredata = state.environment.get_coredata() + if uninstalled: + outdir = os.path.join(state.environment.build_dir, 'meson-uninstalled') + if not os.path.exists(outdir): + os.mkdir(outdir) + prefix = PurePath(state.environment.get_build_dir()) + srcdir = PurePath(state.environment.get_source_dir()) + else: + outdir = state.environment.scratch_dir + prefix = PurePath(coredata.get_option(mesonlib.OptionKey('prefix'))) + # These always return paths relative to prefix + libdir = PurePath(coredata.get_option(mesonlib.OptionKey('libdir'))) + incdir = PurePath(coredata.get_option(mesonlib.OptionKey('includedir'))) + fname = os.path.join(outdir, pcfile) + with open(fname, 'w', encoding='utf-8') as ofile: + if not dataonly: + ofile.write('prefix={}\n'.format(self._escape(prefix))) + if uninstalled: + ofile.write('srcdir={}\n'.format(self._escape(srcdir))) + ofile.write('libdir={}\n'.format(self._escape('${prefix}' / libdir))) + ofile.write('includedir={}\n'.format(self._escape('${prefix}' / incdir))) + if variables or unescaped_variables: + ofile.write('\n') + for k, v in variables: + ofile.write('{}={}\n'.format(k, self._escape(v))) + for k, v in unescaped_variables: + ofile.write(f'{k}={v}\n') + ofile.write('\n') + ofile.write('Name: %s\n' % name) + if len(description) > 0: + ofile.write('Description: %s\n' % description) + if len(url) > 0: + ofile.write('URL: %s\n' % url) + ofile.write('Version: %s\n' % version) + reqs_str = deps.format_reqs(deps.pub_reqs) + if len(reqs_str) > 0: + ofile.write(f'Requires: {reqs_str}\n') + reqs_str = deps.format_reqs(deps.priv_reqs) + if len(reqs_str) > 0: + ofile.write(f'Requires.private: {reqs_str}\n') + if len(conflicts) > 0: + ofile.write('Conflicts: {}\n'.format(' '.join(conflicts))) + + def generate_libs_flags(libs): + msg = 'Library target {0!r} has {1!r} set. Compilers ' \ + 'may not find it from its \'-l{2}\' linker flag in the ' \ + '{3!r} pkg-config file.' + Lflags = [] + for l in libs: + if isinstance(l, str): + yield l + else: + if uninstalled: + install_dir = os.path.dirname(state.backend.get_target_filename_abs(l)) + else: + install_dir = l.get_custom_install_dir()[0] + if install_dir is False: + continue + is_custom_target = isinstance(l, (build.CustomTarget, build.CustomTargetIndex)) + if not is_custom_target and 'cs' in l.compilers: + if isinstance(install_dir, str): + Lflag = '-r${{prefix}}/{}/{}'.format(self._escape(self._make_relative(prefix, install_dir)), l.filename) + else: # install_dir is True + Lflag = '-r${libdir}/%s' % l.filename + else: + if isinstance(install_dir, str): + Lflag = '-L${prefix}/%s' % self._escape(self._make_relative(prefix, install_dir)) + else: # install_dir is True + Lflag = '-L${libdir}' + if Lflag not in Lflags: + Lflags.append(Lflag) + yield Lflag + lname = self._get_lname(l, msg, pcfile, is_custom_target) + # If using a custom suffix, the compiler may not be able to + # find the library + if not is_custom_target and l.name_suffix_set: + mlog.warning(msg.format(l.name, 'name_suffix', lname, pcfile)) + if is_custom_target or 'cs' not in l.compilers: + yield '-l%s' % lname + + def get_uninstalled_include_dirs(libs): + result = [] + for l in libs: + if isinstance(l, (str, build.CustomTarget, build.CustomTargetIndex)): + continue + if l.get_subdir() not in result: + result.append(l.get_subdir()) + for i in l.get_include_dirs(): + curdir = i.get_curdir() + for d in i.get_incdirs(): + path = os.path.join(curdir, d) + if path not in result: + result.append(path) + return result + + def generate_uninstalled_cflags(libs): + for d in get_uninstalled_include_dirs(libs): + for basedir in ['${prefix}', '${srcdir}']: + path = PurePath(basedir, d) + yield '-I%s' % self._escape(path.as_posix()) + + if len(deps.pub_libs) > 0: + ofile.write('Libs: {}\n'.format(' '.join(generate_libs_flags(deps.pub_libs)))) + if len(deps.priv_libs) > 0: + ofile.write('Libs.private: {}\n'.format(' '.join(generate_libs_flags(deps.priv_libs)))) + + cflags = [] + if uninstalled: + cflags += generate_uninstalled_cflags(deps.pub_libs + deps.priv_libs) + else: + for d in subdirs: + if d == '.': + cflags.append('-I${includedir}') + else: + cflags.append(self._escape(PurePath('-I${includedir}') / d)) + cflags += [self._escape(f) for f in deps.cflags] + if cflags and not dataonly: + ofile.write('Cflags: {}\n'.format(' '.join(cflags))) + + @FeatureNewKwargs('pkgconfig.generate', '0.59.0', ['unescaped_variables', 'unescaped_uninstalled_variables']) + @FeatureNewKwargs('pkgconfig.generate', '0.54.0', ['uninstalled_variables']) + @FeatureNewKwargs('pkgconfig.generate', '0.42.0', ['extra_cflags']) + @FeatureNewKwargs('pkgconfig.generate', '0.41.0', ['variables']) + @FeatureNewKwargs('pkgconfig.generate', '0.54.0', ['dataonly']) + @permittedKwargs({'libraries', 'version', 'name', 'description', 'filebase', + 'subdirs', 'requires', 'requires_private', 'libraries_private', + 'install_dir', 'extra_cflags', 'variables', 'url', 'd_module_versions', + 'dataonly', 'conflicts', 'uninstalled_variables', + 'unescaped_variables', 'unescaped_uninstalled_variables'}) + def generate(self, state, args, kwargs): + default_version = state.project_version['version'] + default_install_dir = None + default_description = None + default_name = None + mainlib = None + default_subdirs = ['.'] + if not args and 'version' not in kwargs: + FeatureNew.single_use('pkgconfig.generate implicit version keyword', '0.46.0', state.subproject) + elif len(args) == 1: + FeatureNew.single_use('pkgconfig.generate optional positional argument', '0.46.0', state.subproject) + mainlib = args[0] + if not isinstance(mainlib, (build.StaticLibrary, build.SharedLibrary)): + raise mesonlib.MesonException('Pkgconfig_gen first positional argument must be a library object') + default_name = mainlib.name + default_description = state.project_name + ': ' + mainlib.name + install_dir = mainlib.get_custom_install_dir()[0] + if isinstance(install_dir, str): + default_install_dir = os.path.join(install_dir, 'pkgconfig') + elif len(args) > 1: + raise mesonlib.MesonException('Too many positional arguments passed to Pkgconfig_gen.') + + dataonly = kwargs.get('dataonly', False) + if not isinstance(dataonly, bool): + raise mesonlib.MesonException('dataonly must be boolean.') + if dataonly: + default_subdirs = [] + blocked_vars = ['libraries', 'libraries_private', 'require_private', 'extra_cflags', 'subdirs'] + if any(k in kwargs for k in blocked_vars): + raise mesonlib.MesonException(f'Cannot combine dataonly with any of {blocked_vars}') + + subdirs = mesonlib.stringlistify(kwargs.get('subdirs', default_subdirs)) + version = kwargs.get('version', default_version) + if not isinstance(version, str): + raise mesonlib.MesonException('Version must be specified.') + name = kwargs.get('name', default_name) + if not isinstance(name, str): + raise mesonlib.MesonException('Name not specified.') + filebase = kwargs.get('filebase', name) + if not isinstance(filebase, str): + raise mesonlib.MesonException('Filebase must be a string.') + description = kwargs.get('description', default_description) + if not isinstance(description, str): + raise mesonlib.MesonException('Description is not a string.') + url = kwargs.get('url', '') + if not isinstance(url, str): + raise mesonlib.MesonException('URL is not a string.') + conflicts = mesonlib.stringlistify(kwargs.get('conflicts', [])) + + # Prepend the main library to public libraries list. This is required + # so dep.add_pub_libs() can handle dependency ordering correctly and put + # extra libraries after the main library. + libraries = mesonlib.extract_as_list(kwargs, 'libraries') + if mainlib: + libraries = [mainlib] + libraries + + deps = DependenciesHelper(state, filebase) + deps.add_pub_libs(libraries) + deps.add_priv_libs(kwargs.get('libraries_private', [])) + deps.add_pub_reqs(kwargs.get('requires', [])) + deps.add_priv_reqs(kwargs.get('requires_private', [])) + deps.add_cflags(kwargs.get('extra_cflags', [])) + + dversions = kwargs.get('d_module_versions', None) + if dversions: + compiler = state.environment.coredata.compilers.host.get('d') + if compiler: + deps.add_cflags(compiler.get_feature_args({'versions': dversions}, None)) + + deps.remove_dups() + + def parse_variable_list(vardict): + reserved = ['prefix', 'libdir', 'includedir'] + variables = [] + for name, value in vardict.items(): + if not dataonly and name in reserved: + raise mesonlib.MesonException(f'Variable "{name}" is reserved') + variables.append((name, value)) + return variables + + variables = self.interpreter.extract_variables(kwargs, dict_new=True) + variables = parse_variable_list(variables) + unescaped_variables = self.interpreter.extract_variables(kwargs, argname='unescaped_variables') + unescaped_variables = parse_variable_list(unescaped_variables) + + pcfile = filebase + '.pc' + pkgroot = kwargs.get('install_dir', default_install_dir) + if pkgroot is None: + if mesonlib.is_freebsd(): + pkgroot = os.path.join(state.environment.coredata.get_option(mesonlib.OptionKey('prefix')), 'libdata', 'pkgconfig') + else: + pkgroot = os.path.join(state.environment.coredata.get_option(mesonlib.OptionKey('libdir')), 'pkgconfig') + if not isinstance(pkgroot, str): + raise mesonlib.MesonException('Install_dir must be a string.') + self._generate_pkgconfig_file(state, deps, subdirs, name, description, url, + version, pcfile, conflicts, variables, + unescaped_variables, False, dataonly) + res = build.Data([mesonlib.File(True, state.environment.get_scratch_dir(), pcfile)], pkgroot, None, state.subproject) + variables = self.interpreter.extract_variables(kwargs, argname='uninstalled_variables', dict_new=True) + variables = parse_variable_list(variables) + unescaped_variables = self.interpreter.extract_variables(kwargs, argname='unescaped_uninstalled_variables') + unescaped_variables = parse_variable_list(unescaped_variables) + + pcfile = filebase + '-uninstalled.pc' + self._generate_pkgconfig_file(state, deps, subdirs, name, description, url, + version, pcfile, conflicts, variables, + unescaped_variables, uninstalled=True, dataonly=dataonly) + # Associate the main library with this generated pc file. If the library + # is used in any subsequent call to the generated, it will generate a + # 'Requires:' or 'Requires.private:'. + # Backward compatibility: We used to set 'generated_pc' on all public + # libraries instead of just the main one. Keep doing that but warn if + # anyone is relying on that deprecated behaviour. + if mainlib: + if not hasattr(mainlib, 'generated_pc'): + mainlib.generated_pc = filebase + else: + mlog.warning('Already generated a pkg-config file for', mlog.bold(mainlib.name)) + else: + for lib in deps.pub_libs: + if not isinstance(lib, str) and not hasattr(lib, 'generated_pc'): + lib.generated_pc = filebase + location = state.current_node + lib.generated_pc_warn = [name, location] + return ModuleReturnValue(res, [res]) + +def initialize(*args, **kwargs): + return PkgConfigModule(*args, **kwargs) diff --git a/meson/mesonbuild/modules/python.py b/meson/mesonbuild/modules/python.py new file mode 100644 index 000000000..c4ebbc77b --- /dev/null +++ b/meson/mesonbuild/modules/python.py @@ -0,0 +1,661 @@ +# Copyright 2018 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import json +import shutil +import typing as T + +from pathlib import Path +from .. import mesonlib +from ..mesonlib import MachineChoice, MesonException +from . import ExtensionModule +from ..interpreterbase import ( + noPosargs, noKwargs, permittedKwargs, + InvalidArguments, + FeatureNew, FeatureNewKwargs, disablerIfNotFound +) +from ..interpreter import ExternalProgramHolder, extract_required_kwarg, permitted_dependency_kwargs +from ..build import known_shmod_kwargs +from .. import mlog +from ..environment import detect_cpu_family +from ..dependencies import DependencyMethods, PkgConfigDependency, NotFoundDependency, SystemDependency +from ..programs import ExternalProgram, NonExistingExternalProgram + +mod_kwargs = {'subdir'} +mod_kwargs.update(known_shmod_kwargs) +mod_kwargs -= {'name_prefix', 'name_suffix'} + +class PythonDependency(SystemDependency): + + def __init__(self, python_holder, environment, kwargs): + super().__init__('python', environment, kwargs) + self.name = 'python' + self.static = kwargs.get('static', False) + self.embed = kwargs.get('embed', False) + self.version = python_holder.version + self.platform = python_holder.platform + self.pkgdep = None + self.variables = python_holder.variables + self.paths = python_holder.paths + self.link_libpython = python_holder.link_libpython + self.info: T.Optional[T.Dict[str, str]] = None + if mesonlib.version_compare(self.version, '>= 3.0'): + self.major_version = 3 + else: + self.major_version = 2 + + # We first try to find the necessary python variables using pkgconfig + if DependencyMethods.PKGCONFIG in self.methods and not python_holder.is_pypy: + pkg_version = self.variables.get('LDVERSION') or self.version + pkg_libdir = self.variables.get('LIBPC') + pkg_embed = '-embed' if self.embed and mesonlib.version_compare(self.version, '>=3.8') else '' + pkg_name = f'python-{pkg_version}{pkg_embed}' + + # If python-X.Y.pc exists in LIBPC, we will try to use it + if pkg_libdir is not None and Path(os.path.join(pkg_libdir, f'{pkg_name}.pc')).is_file(): + old_pkg_libdir = os.environ.get('PKG_CONFIG_LIBDIR') + old_pkg_path = os.environ.get('PKG_CONFIG_PATH') + + os.environ.pop('PKG_CONFIG_PATH', None) + + if pkg_libdir: + os.environ['PKG_CONFIG_LIBDIR'] = pkg_libdir + + try: + self.pkgdep = PkgConfigDependency(pkg_name, environment, kwargs) + mlog.debug(f'Found "{pkg_name}" via pkgconfig lookup in LIBPC ({pkg_libdir})') + py_lookup_method = 'pkgconfig' + except MesonException as e: + mlog.debug(f'"{pkg_name}" could not be found in LIBPC ({pkg_libdir})') + mlog.debug(e) + + if old_pkg_path is not None: + os.environ['PKG_CONFIG_PATH'] = old_pkg_path + + if old_pkg_libdir is not None: + os.environ['PKG_CONFIG_LIBDIR'] = old_pkg_libdir + else: + os.environ.pop('PKG_CONFIG_LIBDIR', None) + else: + mlog.debug(f'"{pkg_name}" could not be found in LIBPC ({pkg_libdir}), this is likely due to a relocated python installation') + + # If lookup via LIBPC failed, try to use fallback PKG_CONFIG_LIBDIR/PKG_CONFIG_PATH mechanisms + if self.pkgdep is None or not self.pkgdep.found(): + try: + self.pkgdep = PkgConfigDependency(pkg_name, environment, kwargs) + mlog.debug(f'Found "{pkg_name}" via fallback pkgconfig lookup in PKG_CONFIG_LIBDIR/PKG_CONFIG_PATH') + py_lookup_method = 'pkgconfig-fallback' + except MesonException as e: + mlog.debug(f'"{pkg_name}" could not be found via fallback pkgconfig lookup in PKG_CONFIG_LIBDIR/PKG_CONFIG_PATH') + mlog.debug(e) + + if self.pkgdep and self.pkgdep.found(): + self.compile_args = self.pkgdep.get_compile_args() + self.link_args = self.pkgdep.get_link_args() + self.is_found = True + self.pcdep = self.pkgdep + else: + self.pkgdep = None + + # Finally, try to find python via SYSCONFIG as a final measure + if DependencyMethods.SYSCONFIG in self.methods: + if mesonlib.is_windows(): + self._find_libpy_windows(environment) + else: + self._find_libpy(python_holder, environment) + if self.is_found: + mlog.debug(f'Found "python-{self.version}" via SYSCONFIG module') + py_lookup_method = 'sysconfig' + + if self.is_found: + mlog.log('Dependency', mlog.bold(self.name), 'found:', mlog.green(f'YES ({py_lookup_method})')) + else: + mlog.log('Dependency', mlog.bold(self.name), 'found:', mlog.red('NO')) + + def _find_libpy(self, python_holder, environment): + if python_holder.is_pypy: + if self.major_version == 3: + libname = 'pypy3-c' + else: + libname = 'pypy-c' + libdir = os.path.join(self.variables.get('base'), 'bin') + libdirs = [libdir] + else: + libname = f'python{self.version}' + if 'DEBUG_EXT' in self.variables: + libname += self.variables['DEBUG_EXT'] + if 'ABIFLAGS' in self.variables: + libname += self.variables['ABIFLAGS'] + libdirs = [] + + largs = self.clib_compiler.find_library(libname, environment, libdirs) + if largs is not None: + self.link_args = largs + + self.is_found = largs is not None or self.link_libpython + + inc_paths = mesonlib.OrderedSet([ + self.variables.get('INCLUDEPY'), + self.paths.get('include'), + self.paths.get('platinclude')]) + + self.compile_args += ['-I' + path for path in inc_paths if path] + + def get_windows_python_arch(self): + if self.platform == 'mingw': + pycc = self.variables.get('CC') + if pycc.startswith('x86_64'): + return '64' + elif pycc.startswith(('i686', 'i386')): + return '32' + else: + mlog.log('MinGW Python built with unknown CC {!r}, please file' + 'a bug'.format(pycc)) + return None + elif self.platform == 'win32': + return '32' + elif self.platform in ('win64', 'win-amd64'): + return '64' + mlog.log(f'Unknown Windows Python platform {self.platform!r}') + return None + + def get_windows_link_args(self): + if self.platform.startswith('win'): + vernum = self.variables.get('py_version_nodot') + if self.static: + libpath = Path('libs') / f'libpython{vernum}.a' + else: + comp = self.get_compiler() + if comp.id == "gcc": + libpath = f'python{vernum}.dll' + else: + libpath = Path('libs') / f'python{vernum}.lib' + lib = Path(self.variables.get('base')) / libpath + elif self.platform == 'mingw': + if self.static: + libname = self.variables.get('LIBRARY') + else: + libname = self.variables.get('LDLIBRARY') + lib = Path(self.variables.get('LIBDIR')) / libname + if not lib.exists(): + mlog.log('Could not find Python3 library {!r}'.format(str(lib))) + return None + return [str(lib)] + + def _find_libpy_windows(self, env): + ''' + Find python3 libraries on Windows and also verify that the arch matches + what we are building for. + ''' + pyarch = self.get_windows_python_arch() + if pyarch is None: + self.is_found = False + return + arch = detect_cpu_family(env.coredata.compilers.host) + if arch == 'x86': + arch = '32' + elif arch == 'x86_64': + arch = '64' + else: + # We can't cross-compile Python 3 dependencies on Windows yet + mlog.log(f'Unknown architecture {arch!r} for', + mlog.bold(self.name)) + self.is_found = False + return + # Pyarch ends in '32' or '64' + if arch != pyarch: + mlog.log('Need', mlog.bold(self.name), 'for {}-bit, but ' + 'found {}-bit'.format(arch, pyarch)) + self.is_found = False + return + # This can fail if the library is not found + largs = self.get_windows_link_args() + if largs is None: + self.is_found = False + return + self.link_args = largs + # Compile args + inc_paths = mesonlib.OrderedSet([ + self.variables.get('INCLUDEPY'), + self.paths.get('include'), + self.paths.get('platinclude')]) + + self.compile_args += ['-I' + path for path in inc_paths if path] + + # https://sourceforge.net/p/mingw-w64/mailman/message/30504611/ + if pyarch == '64' and self.major_version == 2: + self.compile_args += ['-DMS_WIN64'] + + self.is_found = True + + @staticmethod + def get_methods(): + if mesonlib.is_windows(): + return [DependencyMethods.PKGCONFIG, DependencyMethods.SYSCONFIG] + elif mesonlib.is_osx(): + return [DependencyMethods.PKGCONFIG, DependencyMethods.EXTRAFRAMEWORK] + else: + return [DependencyMethods.PKGCONFIG, DependencyMethods.SYSCONFIG] + + def get_pkgconfig_variable(self, variable_name, kwargs): + if self.pkgdep: + return self.pkgdep.get_pkgconfig_variable(variable_name, kwargs) + else: + return super().get_pkgconfig_variable(variable_name, kwargs) + + +INTROSPECT_COMMAND = '''import sysconfig +import json +import sys + +install_paths = sysconfig.get_paths(scheme='posix_prefix', vars={'base': '', 'platbase': '', 'installed_base': ''}) + +def links_against_libpython(): + from distutils.core import Distribution, Extension + cmd = Distribution().get_command_obj('build_ext') + cmd.ensure_finalized() + return bool(cmd.get_libraries(Extension('dummy', []))) + +print (json.dumps ({ + 'variables': sysconfig.get_config_vars(), + 'paths': sysconfig.get_paths(), + 'install_paths': install_paths, + 'sys_paths': sys.path, + 'version': sysconfig.get_python_version(), + 'platform': sysconfig.get_platform(), + 'is_pypy': '__pypy__' in sys.builtin_module_names, + 'link_libpython': links_against_libpython(), +})) +''' + +if T.TYPE_CHECKING: + class PythonIntrospectionDict(TypedDict): + + install_paths: T.Dict[str, str] + is_pypy: bool + link_libpython: bool + paths: T.Dict[str, str] + platform: str + suffix : str + variables: T.Dict[str, str] + version: str + +class PythonExternalProgram(ExternalProgram): + def __init__(self, name: str, command: T.Optional[T.List[str]] = None, ext_prog: T.Optional[ExternalProgram] = None): + if ext_prog is None: + super().__init__(name, command=command, silent=True) + else: + self.name = name + self.command = ext_prog.command + self.path = ext_prog.path + self.info: 'PythonIntrospectionDict' = { + 'install_paths': {}, + 'is_pypy': False, + 'link_libpython': False, + 'paths': {}, + 'platform': 'sentinal', + 'variables': {}, + 'version': '0.0', + } + + def _check_version(self, version: str) -> bool: + if self.name == 'python2': + return mesonlib.version_compare(version, '< 3.0') + elif self.name == 'python3': + return mesonlib.version_compare(version, '>= 3.0') + return True + + def sanity(self) -> bool: + # Sanity check, we expect to have something that at least quacks in tune + cmd = self.get_command() + ['-c', INTROSPECT_COMMAND] + p, stdout, stderr = mesonlib.Popen_safe(cmd) + try: + info = json.loads(stdout) + except json.JSONDecodeError: + info = None + mlog.debug('Could not introspect Python (%s): exit code %d' % (str(p.args), p.returncode)) + mlog.debug('Program stdout:\n') + mlog.debug(stdout) + mlog.debug('Program stderr:\n') + mlog.debug(stderr) + + if info is not None and self._check_version(info['version']): + variables = info['variables'] + info['suffix'] = variables.get('EXT_SUFFIX') or variables.get('SO') or variables.get('.so') + self.info = T.cast('PythonIntrospectionDict', info) + self.platlib = self._get_path('platlib') + self.purelib = self._get_path('purelib') + return True + else: + return False + + def _get_path(self, key: str) -> None: + user_dir = str(Path.home()) + sys_paths = self.info['sys_paths'] + rel_path = self.info['install_paths'][key][1:] + if not any(p.endswith(rel_path) for p in sys_paths if not p.startswith(user_dir)): + # On Debian derivatives sysconfig install path is broken and is not + # included in the locations python actually lookup. + # See https://github.com/mesonbuild/meson/issues/8739. + mlog.warning('Broken python installation detected. Python files', + 'installed by Meson might not be found by python interpreter.', + once=True) + if mesonlib.is_debianlike(): + rel_path = 'lib/python3/dist-packages' + return rel_path + + +class PythonInstallation(ExternalProgramHolder): + def __init__(self, python, interpreter): + ExternalProgramHolder.__init__(self, python, interpreter) + info = python.info + prefix = self.interpreter.environment.coredata.get_option(mesonlib.OptionKey('prefix')) + self.variables = info['variables'] + self.suffix = info['suffix'] + self.paths = info['paths'] + self.platlib_install_path = os.path.join(prefix, python.platlib) + self.purelib_install_path = os.path.join(prefix, python.purelib) + self.version = info['version'] + self.platform = info['platform'] + self.is_pypy = info['is_pypy'] + self.link_libpython = info['link_libpython'] + self.methods.update({ + 'extension_module': self.extension_module_method, + 'dependency': self.dependency_method, + 'install_sources': self.install_sources_method, + 'get_install_dir': self.get_install_dir_method, + 'language_version': self.language_version_method, + 'found': self.found_method, + 'has_path': self.has_path_method, + 'get_path': self.get_path_method, + 'has_variable': self.has_variable_method, + 'get_variable': self.get_variable_method, + 'path': self.path_method, + }) + + @permittedKwargs(mod_kwargs) + def extension_module_method(self, args, kwargs): + if 'install_dir' in kwargs: + if 'subdir' in kwargs: + raise InvalidArguments('"subdir" and "install_dir" are mutually exclusive') + else: + subdir = kwargs.pop('subdir', '') + if not isinstance(subdir, str): + raise InvalidArguments('"subdir" argument must be a string.') + + kwargs['install_dir'] = os.path.join(self.platlib_install_path, subdir) + + # On macOS and some Linux distros (Debian) distutils doesn't link + # extensions against libpython. We call into distutils and mirror its + # behavior. See https://github.com/mesonbuild/meson/issues/4117 + if not self.link_libpython: + new_deps = [] + for dep in mesonlib.extract_as_list(kwargs, 'dependencies'): + if isinstance(dep, PythonDependency): + dep = dep.get_partial_dependency(compile_args=True) + new_deps.append(dep) + kwargs['dependencies'] = new_deps + + # msys2's python3 has "-cpython-36m.dll", we have to be clever + # FIXME: explain what the specific cleverness is here + split, suffix = self.suffix.rsplit('.', 1) + args[0] += split + + kwargs['name_prefix'] = '' + kwargs['name_suffix'] = suffix + + return self.interpreter.func_shared_module(None, args, kwargs) + + @permittedKwargs(permitted_dependency_kwargs | {'embed'}) + @FeatureNewKwargs('python_installation.dependency', '0.53.0', ['embed']) + def dependency_method(self, args, kwargs): + if args: + mlog.warning('python_installation.dependency() does not take any ' + 'positional arguments. It always returns a Python ' + 'dependency. This will become an error in the future.', + location=self.interpreter.current_node) + disabled, required, feature = extract_required_kwarg(kwargs, self.subproject) + if disabled: + mlog.log('Dependency', mlog.bold('python'), 'skipped: feature', mlog.bold(feature), 'disabled') + dep = NotFoundDependency(self.interpreter.environment) + else: + dep = PythonDependency(self, self.interpreter.environment, kwargs) + if required and not dep.found(): + raise mesonlib.MesonException('Python dependency not found') + return dep + + @permittedKwargs(['pure', 'subdir']) + def install_sources_method(self, args, kwargs): + pure = kwargs.pop('pure', True) + if not isinstance(pure, bool): + raise InvalidArguments('"pure" argument must be a boolean.') + + subdir = kwargs.pop('subdir', '') + if not isinstance(subdir, str): + raise InvalidArguments('"subdir" argument must be a string.') + + if pure: + kwargs['install_dir'] = os.path.join(self.purelib_install_path, subdir) + else: + kwargs['install_dir'] = os.path.join(self.platlib_install_path, subdir) + + return self.interpreter.func_install_data(None, args, kwargs) + + @noPosargs + @permittedKwargs(['pure', 'subdir']) + def get_install_dir_method(self, args, kwargs): + pure = kwargs.pop('pure', True) + if not isinstance(pure, bool): + raise InvalidArguments('"pure" argument must be a boolean.') + + subdir = kwargs.pop('subdir', '') + if not isinstance(subdir, str): + raise InvalidArguments('"subdir" argument must be a string.') + + if pure: + res = os.path.join(self.purelib_install_path, subdir) + else: + res = os.path.join(self.platlib_install_path, subdir) + + return res + + @noPosargs + @noKwargs + def language_version_method(self, args, kwargs): + return self.version + + @noKwargs + def has_path_method(self, args, kwargs): + if len(args) != 1: + raise InvalidArguments('has_path takes exactly one positional argument.') + path_name = args[0] + if not isinstance(path_name, str): + raise InvalidArguments('has_path argument must be a string.') + + return path_name in self.paths + + @noKwargs + def get_path_method(self, args, kwargs): + if len(args) not in (1, 2): + raise InvalidArguments('get_path must have one or two arguments.') + path_name = args[0] + if not isinstance(path_name, str): + raise InvalidArguments('get_path argument must be a string.') + + try: + path = self.paths[path_name] + except KeyError: + if len(args) == 2: + path = args[1] + else: + raise InvalidArguments(f'{path_name} is not a valid path name') + + return path + + @noKwargs + def has_variable_method(self, args, kwargs): + if len(args) != 1: + raise InvalidArguments('has_variable takes exactly one positional argument.') + var_name = args[0] + if not isinstance(var_name, str): + raise InvalidArguments('has_variable argument must be a string.') + + return var_name in self.variables + + @noKwargs + def get_variable_method(self, args, kwargs): + if len(args) not in (1, 2): + raise InvalidArguments('get_variable must have one or two arguments.') + var_name = args[0] + if not isinstance(var_name, str): + raise InvalidArguments('get_variable argument must be a string.') + + try: + var = self.variables[var_name] + except KeyError: + if len(args) == 2: + var = args[1] + else: + raise InvalidArguments(f'{var_name} is not a valid variable name') + + return var + + @noPosargs + @noKwargs + @FeatureNew('Python module path method', '0.50.0') + def path_method(self, args, kwargs): + return super().path_method(args, kwargs) + + +class PythonModule(ExtensionModule): + + @FeatureNew('Python Module', '0.46.0') + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.methods.update({ + 'find_installation': self.find_installation, + }) + + # https://www.python.org/dev/peps/pep-0397/ + def _get_win_pythonpath(self, name_or_path): + if name_or_path not in ['python2', 'python3']: + return None + if not shutil.which('py'): + # program not installed, return without an exception + return None + ver = {'python2': '-2', 'python3': '-3'}[name_or_path] + cmd = ['py', ver, '-c', "import sysconfig; print(sysconfig.get_config_var('BINDIR'))"] + _, stdout, _ = mesonlib.Popen_safe(cmd) + directory = stdout.strip() + if os.path.exists(directory): + return os.path.join(directory, 'python') + else: + return None + + + @FeatureNewKwargs('python.find_installation', '0.49.0', ['disabler']) + @FeatureNewKwargs('python.find_installation', '0.51.0', ['modules']) + @disablerIfNotFound + @permittedKwargs({'required', 'modules'}) + def find_installation(self, state, args, kwargs): + feature_check = FeatureNew('Passing "feature" option to find_installation', '0.48.0') + disabled, required, feature = extract_required_kwarg(kwargs, state.subproject, feature_check) + want_modules = mesonlib.extract_as_list(kwargs, 'modules') # type: T.List[str] + found_modules = [] # type: T.List[str] + missing_modules = [] # type: T.List[str] + fallback = args[0] if args else '' + display_name = fallback or 'python' + + if len(args) > 1: + raise InvalidArguments('find_installation takes zero or one positional argument.') + + name_or_path = state.environment.lookup_binary_entry(MachineChoice.HOST, 'python') + if name_or_path is None and args: + name_or_path = fallback + if not isinstance(name_or_path, str): + raise InvalidArguments('find_installation argument must be a string.') + + if disabled: + mlog.log('Program', name_or_path or 'python', 'found:', mlog.red('NO'), '(disabled by:', mlog.bold(feature), ')') + return NonExistingExternalProgram() + + if not name_or_path: + python = PythonExternalProgram('python3', mesonlib.python_command) + else: + tmp_python = ExternalProgram.from_entry(display_name, name_or_path) + python = PythonExternalProgram(display_name, ext_prog=tmp_python) + + if not python.found() and mesonlib.is_windows(): + pythonpath = self._get_win_pythonpath(name_or_path) + if pythonpath is not None: + name_or_path = pythonpath + python = PythonExternalProgram(name_or_path) + + # Last ditch effort, python2 or python3 can be named python + # on various platforms, let's not give up just yet, if an executable + # named python is available and has a compatible version, let's use + # it + if not python.found() and name_or_path in ['python2', 'python3']: + python = PythonExternalProgram('python') + + if python.found() and want_modules: + for mod in want_modules: + p, out, err = mesonlib.Popen_safe( + python.command + + ['-c', f'import {mod}']) + if p.returncode != 0: + missing_modules.append(mod) + else: + found_modules.append(mod) + + msg = ['Program', python.name] + if want_modules: + msg.append('({})'.format(', '.join(want_modules))) + msg.append('found:') + if python.found() and not missing_modules: + msg.extend([mlog.green('YES'), '({})'.format(' '.join(python.command))]) + else: + msg.append(mlog.red('NO')) + if found_modules: + msg.append('modules:') + msg.append(', '.join(found_modules)) + + mlog.log(*msg) + + if not python.found(): + if required: + raise mesonlib.MesonException('{} not found'.format(name_or_path or 'python')) + return NonExistingExternalProgram() + elif missing_modules: + if required: + raise mesonlib.MesonException('{} is missing modules: {}'.format(name_or_path or 'python', ', '.join(missing_modules))) + return NonExistingExternalProgram() + else: + sane = python.sanity() + + if sane: + return python + else: + if required: + raise mesonlib.MesonException(f'{python} is not a valid python or it is missing distutils') + return NonExistingExternalProgram() + + raise mesonlib.MesonBugException('Unreachable code was reached (PythonModule.find_installation).') + + +def initialize(*args, **kwargs): + mod = PythonModule(*args, **kwargs) + mod.interpreter.append_holder_map(PythonExternalProgram, PythonInstallation) + return mod diff --git a/meson/mesonbuild/modules/python3.py b/meson/mesonbuild/modules/python3.py new file mode 100644 index 000000000..dc1f7c775 --- /dev/null +++ b/meson/mesonbuild/modules/python3.py @@ -0,0 +1,81 @@ +# Copyright 2016-2017 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sysconfig +from .. import mesonlib + +from . import ExtensionModule +from ..interpreterbase import noKwargs, permittedKwargs, FeatureDeprecated +from ..build import known_shmod_kwargs +from ..programs import ExternalProgram + + +class Python3Module(ExtensionModule): + @FeatureDeprecated('python3 module', '0.48.0') + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.methods.update({ + 'extension_module': self.extension_module, + 'find_python': self.find_python, + 'language_version': self.language_version, + 'sysconfig_path': self.sysconfig_path, + }) + + @permittedKwargs(known_shmod_kwargs) + def extension_module(self, state, args, kwargs): + if 'name_prefix' in kwargs: + raise mesonlib.MesonException('Name_prefix is set automatically, specifying it is forbidden.') + if 'name_suffix' in kwargs: + raise mesonlib.MesonException('Name_suffix is set automatically, specifying it is forbidden.') + host_system = state.host_machine.system + if host_system == 'darwin': + # Default suffix is 'dylib' but Python does not use it for extensions. + suffix = 'so' + elif host_system == 'windows': + # On Windows the extension is pyd for some unexplainable reason. + suffix = 'pyd' + else: + suffix = [] + kwargs['name_prefix'] = '' + kwargs['name_suffix'] = suffix + return self.interpreter.func_shared_module(None, args, kwargs) + + @noKwargs + def find_python(self, state, args, kwargs): + command = state.environment.lookup_binary_entry(mesonlib.MachineChoice.HOST, 'python3') + if command is not None: + py3 = ExternalProgram.from_entry('python3', command) + else: + py3 = ExternalProgram('python3', mesonlib.python_command, silent=True) + return py3 + + @noKwargs + def language_version(self, state, args, kwargs): + return sysconfig.get_python_version() + + @noKwargs + def sysconfig_path(self, state, args, kwargs): + if len(args) != 1: + raise mesonlib.MesonException('sysconfig_path() requires passing the name of path to get.') + path_name = args[0] + valid_names = sysconfig.get_path_names() + if path_name not in valid_names: + raise mesonlib.MesonException(f'{path_name} is not a valid path name {valid_names}.') + + # Get a relative path without a prefix, e.g. lib/python3.6/site-packages + return sysconfig.get_path(path_name, vars={'base': '', 'platbase': '', 'installed_base': ''})[1:] + + +def initialize(*args, **kwargs): + return Python3Module(*args, **kwargs) diff --git a/meson/mesonbuild/modules/qt.py b/meson/mesonbuild/modules/qt.py new file mode 100644 index 000000000..207a02564 --- /dev/null +++ b/meson/mesonbuild/modules/qt.py @@ -0,0 +1,524 @@ +# Copyright 2015 The Meson development team +# Copyright © 2021 Intel Corporation + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from mesonbuild import coredata +import os +import shutil +import typing as T +import xml.etree.ElementTree as ET + +from . import ModuleReturnValue, ExtensionModule +from .. import build +from .. import mlog +from ..dependencies import find_external_dependency, Dependency, ExternalLibrary +from ..mesonlib import MesonException, File, FileOrString, version_compare, Popen_safe +from . import ModuleReturnValue, ExtensionModule +from ..interpreter import extract_required_kwarg +from ..interpreterbase import ContainerTypeInfo, FeatureDeprecated, KwargInfo, noPosargs, FeatureNew, typed_kwargs +from ..programs import ExternalProgram, NonExistingExternalProgram + +if T.TYPE_CHECKING: + from . import ModuleState + from ..dependencies.qt import QtPkgConfigDependency, QmakeQtDependency + from ..interpreter import Interpreter + from ..interpreter import kwargs + + QtDependencyType = T.Union[QtPkgConfigDependency, QmakeQtDependency] + + from typing_extensions import TypedDict + + class ResourceCompilerKwArgs(TypedDict): + + """Keyword arguments for the Resource Compiler method.""" + + name: T.Optional[str] + sources: T.List[FileOrString] + extra_args: T.List[str] + method: str + + class UICompilerKwArgs(TypedDict): + + """Keyword arguments for the Ui Compiler method.""" + + sources: T.Sequence[T.Union[FileOrString, build.CustomTarget]] + extra_args: T.List[str] + method: str + + class MocCompilerKwArgs(TypedDict): + + """Keyword arguments for the Moc Compiler method.""" + + sources: T.List[T.Union[FileOrString, build.CustomTarget]] + headers: T.List[T.Union[FileOrString, build.CustomTarget]] + extra_args: T.List[str] + method: str + include_directories: T.List[T.Union[str, build.IncludeDirs]] + dependencies: T.List[T.Union[Dependency, ExternalLibrary]] + + class PreprocessKwArgs(TypedDict): + + sources: T.List[FileOrString] + moc_sources: T.List[T.Union[FileOrString, build.CustomTarget]] + moc_headers: T.List[T.Union[FileOrString, build.CustomTarget]] + qresources: T.List[FileOrString] + ui_files: T.List[T.Union[FileOrString, build.CustomTarget]] + moc_extra_arguments: T.List[str] + rcc_extra_arguments: T.List[str] + uic_extra_arguments: T.List[str] + include_directories: T.List[T.Union[str, build.IncludeDirs]] + dependencies: T.List[T.Union[Dependency, ExternalLibrary]] + method: str + + class HasToolKwArgs(kwargs.ExtractRequired): + + method: str + + class CompileTranslationsKwArgs(TypedDict): + + build_by_default: bool + install: bool + install_dir: T.Optional[str] + method: str + qresource: T.Optional[str] + rcc_extra_arguments: T.List[str] + ts_files: T.List[str] + + +class QtBaseModule(ExtensionModule): + _tools_detected = False + _rcc_supports_depfiles = False + + def __init__(self, interpreter: 'Interpreter', qt_version: int = 5): + ExtensionModule.__init__(self, interpreter) + self.qt_version = qt_version + self.tools: T.Dict[str, ExternalProgram] = { + 'moc': NonExistingExternalProgram('moc'), + 'uic': NonExistingExternalProgram('uic'), + 'rcc': NonExistingExternalProgram('rcc'), + 'lrelease': NonExistingExternalProgram('lrelease'), + } + self.methods.update({ + 'has_tools': self.has_tools, + 'preprocess': self.preprocess, + 'compile_translations': self.compile_translations, + 'compile_resources': self.compile_resources, + 'compile_ui': self.compile_ui, + 'compile_moc': self.compile_moc, + }) + + def compilers_detect(self, state: 'ModuleState', qt_dep: 'QtDependencyType') -> None: + """Detect Qt (4 or 5) moc, uic, rcc in the specified bindir or in PATH""" + # It is important that this list does not change order as the order of + # the returned ExternalPrograms will change as well + wanted = f'== {qt_dep.version}' + + def gen_bins() -> T.Generator[T.Tuple[str, str], None, None]: + for b in self.tools: + if qt_dep.bindir: + yield os.path.join(qt_dep.bindir, b), b + # prefer the <tool>-qt<version> of the tool to the plain one, as we + # don't know what the unsuffixed one points to without calling it. + yield f'{b}-qt{qt_dep.qtver}', b + yield b, b + + for b, name in gen_bins(): + if self.tools[name].found(): + continue + + if name == 'lrelease': + arg = ['-version'] + elif version_compare(qt_dep.version, '>= 5'): + arg = ['--version'] + else: + arg = ['-v'] + + # Ensure that the version of qt and each tool are the same + def get_version(p: ExternalProgram) -> str: + _, out, err = Popen_safe(p.get_command() + arg) + if b.startswith('lrelease') or not qt_dep.version.startswith('4'): + care = out + else: + care = err + return care.split(' ')[-1].replace(')', '').strip() + + p = state.find_program(b, required=False, + version_func=get_version, + wanted=wanted) + if p.found(): + self.tools[name] = p + + def _detect_tools(self, state: 'ModuleState', method: str, required: bool = True) -> None: + if self._tools_detected: + return + self._tools_detected = True + mlog.log(f'Detecting Qt{self.qt_version} tools') + kwargs = {'required': required, 'modules': 'Core', 'method': method} + # Just pick one to make mypy happy + qt = T.cast('QtPkgConfigDependency', find_external_dependency(f'qt{self.qt_version}', state.environment, kwargs)) + if qt.found(): + # Get all tools and then make sure that they are the right version + self.compilers_detect(state, qt) + if version_compare(qt.version, '>=5.14.0'): + self._rcc_supports_depfiles = True + else: + mlog.warning('rcc dependencies will not work properly until you move to Qt >= 5.14:', + mlog.bold('https://bugreports.qt.io/browse/QTBUG-45460'), fatal=False) + else: + suffix = f'-qt{self.qt_version}' + self.tools['moc'] = NonExistingExternalProgram(name='moc' + suffix) + self.tools['uic'] = NonExistingExternalProgram(name='uic' + suffix) + self.tools['rcc'] = NonExistingExternalProgram(name='rcc' + suffix) + self.tools['lrelease'] = NonExistingExternalProgram(name='lrelease' + suffix) + + @staticmethod + def _qrc_nodes(state: 'ModuleState', rcc_file: 'FileOrString') -> T.Tuple[str, T.List[str]]: + abspath: str + if isinstance(rcc_file, str): + abspath = os.path.join(state.environment.source_dir, state.subdir, rcc_file) + rcc_dirname = os.path.dirname(abspath) + else: + abspath = rcc_file.absolute_path(state.environment.source_dir, state.environment.build_dir) + rcc_dirname = os.path.dirname(abspath) + + # FIXME: what error are we actually tring to check here? + try: + tree = ET.parse(abspath) + root = tree.getroot() + result: T.List[str] = [] + for child in root[0]: + if child.tag != 'file': + mlog.warning("malformed rcc file: ", os.path.join(state.subdir, str(rcc_file))) + break + else: + result.append(child.text) + + return rcc_dirname, result + except Exception: + raise MesonException(f'Unable to parse resource file {abspath}') + + def _parse_qrc_deps(self, state: 'ModuleState', rcc_file: 'FileOrString') -> T.List[File]: + rcc_dirname, nodes = self._qrc_nodes(state, rcc_file) + result: T.List[File] = [] + for resource_path in nodes: + # We need to guess if the pointed resource is: + # a) in build directory -> implies a generated file + # b) in source directory + # c) somewhere else external dependency file to bundle + # + # Also from qrc documentation: relative path are always from qrc file + # So relative path must always be computed from qrc file ! + if os.path.isabs(resource_path): + # a) + if resource_path.startswith(os.path.abspath(state.environment.build_dir)): + resource_relpath = os.path.relpath(resource_path, state.environment.build_dir) + result.append(File(is_built=True, subdir='', fname=resource_relpath)) + # either b) or c) + else: + result.append(File(is_built=False, subdir=state.subdir, fname=resource_path)) + else: + path_from_rcc = os.path.normpath(os.path.join(rcc_dirname, resource_path)) + # a) + if path_from_rcc.startswith(state.environment.build_dir): + result.append(File(is_built=True, subdir=state.subdir, fname=resource_path)) + # b) + else: + result.append(File(is_built=False, subdir=state.subdir, fname=path_from_rcc)) + return result + + @FeatureNew('qt.has_tools', '0.54.0') + @noPosargs + @typed_kwargs( + 'qt.has_tools', + KwargInfo('required', (bool, coredata.UserFeatureOption), default=False), + KwargInfo('method', str, default='auto'), + ) + def has_tools(self, state: 'ModuleState', args: T.Tuple, kwargs: 'HasToolKwArgs') -> bool: + method = kwargs.get('method', 'auto') + # We have to cast here because TypedDicts are invariant, even though + # ExtractRequiredKwArgs is a subset of HasToolKwArgs, type checkers + # will insist this is wrong + disabled, required, feature = extract_required_kwarg(kwargs, state.subproject, default=False) + if disabled: + mlog.log('qt.has_tools skipped: feature', mlog.bold(feature), 'disabled') + return False + self._detect_tools(state, method, required=False) + for tool in self.tools.values(): + if not tool.found(): + if required: + raise MesonException('Qt tools not found') + return False + return True + + @FeatureNew('qt.compile_resources', '0.59.0') + @noPosargs + @typed_kwargs( + 'qt.compile_resources', + KwargInfo('name', str), + KwargInfo('sources', ContainerTypeInfo(list, (File, str), allow_empty=False), listify=True, required=True), + KwargInfo('extra_args', ContainerTypeInfo(list, str), listify=True, default=[]), + KwargInfo('method', str, default='auto') + ) + def compile_resources(self, state: 'ModuleState', args: T.Tuple, kwargs: 'ResourceCompilerKwArgs') -> ModuleReturnValue: + """Compile Qt resources files. + + Uses CustomTargets to generate .cpp files from .qrc files. + """ + out = self._compile_resources_impl(state, kwargs) + return ModuleReturnValue(out, [out]) + + def _compile_resources_impl(self, state: 'ModuleState', kwargs: 'ResourceCompilerKwArgs') -> T.List[build.CustomTarget]: + # Avoid the FeatureNew when dispatching from preprocess + self._detect_tools(state, kwargs['method']) + if not self.tools['rcc'].found(): + err_msg = ("{0} sources specified and couldn't find {1}, " + "please check your qt{2} installation") + raise MesonException(err_msg.format('RCC', f'rcc-qt{self.qt_version}', self.qt_version)) + + # List of generated CustomTargets + targets: T.List[build.CustomTarget] = [] + + # depfile arguments + DEPFILE_ARGS: T.List[str] = ['--depfile', '@DEPFILE@'] if self._rcc_supports_depfiles else [] + + name = kwargs['name'] + sources = kwargs['sources'] + extra_args = kwargs['extra_args'] + + # If a name was set generate a single .cpp file from all of the qrc + # files, otherwise generate one .cpp file per qrc file. + if name: + qrc_deps: T.List[File] = [] + for s in sources: + qrc_deps.extend(self._parse_qrc_deps(state, s)) + + rcc_kwargs: T.Dict[str, T.Any] = { # TODO: if CustomTarget had typing information we could use that here... + 'input': sources, + 'output': name + '.cpp', + 'command': self.tools['rcc'].get_command() + ['-name', name, '-o', '@OUTPUT@'] + extra_args + ['@INPUT@'] + DEPFILE_ARGS, + 'depend_files': qrc_deps, + 'depfile': f'{name}.d', + } + res_target = build.CustomTarget(name, state.subdir, state.subproject, rcc_kwargs) + targets.append(res_target) + else: + for rcc_file in sources: + qrc_deps = self._parse_qrc_deps(state, rcc_file) + if isinstance(rcc_file, str): + basename = os.path.basename(rcc_file) + else: + basename = os.path.basename(rcc_file.fname) + name = f'qt{self.qt_version}-{basename.replace(".", "_")}' + rcc_kwargs = { + 'input': rcc_file, + 'output': f'{name}.cpp', + 'command': self.tools['rcc'].get_command() + ['-name', '@BASENAME@', '-o', '@OUTPUT@'] + extra_args + ['@INPUT@'] + DEPFILE_ARGS, + 'depend_files': qrc_deps, + 'depfile': f'{name}.d', + } + res_target = build.CustomTarget(name, state.subdir, state.subproject, rcc_kwargs) + targets.append(res_target) + + return targets + + @FeatureNew('qt.compile_ui', '0.59.0') + @noPosargs + @typed_kwargs( + 'qt.compile_ui', + KwargInfo('sources', ContainerTypeInfo(list, (File, str), allow_empty=False), listify=True, required=True), + KwargInfo('extra_args', ContainerTypeInfo(list, str), listify=True, default=[]), + KwargInfo('method', str, default='auto') + ) + def compile_ui(self, state: 'ModuleState', args: T.Tuple, kwargs: 'UICompilerKwArgs') -> ModuleReturnValue: + """Compile UI resources into cpp headers.""" + out = self._compile_ui_impl(state, kwargs) + return ModuleReturnValue(out, [out]) + + def _compile_ui_impl(self, state: 'ModuleState', kwargs: 'UICompilerKwArgs') -> build.GeneratedList: + # Avoid the FeatureNew when dispatching from preprocess + self._detect_tools(state, kwargs['method']) + if not self.tools['uic'].found(): + err_msg = ("{0} sources specified and couldn't find {1}, " + "please check your qt{2} installation") + raise MesonException(err_msg.format('UIC', f'uic-qt{self.qt_version}', self.qt_version)) + + # TODO: This generator isn't added to the generator list in the Interpreter + gen = build.Generator( + self.tools['uic'], + kwargs['extra_args'] + ['-o', '@OUTPUT@', '@INPUT@'], + ['ui_@BASENAME@.h'], + name=f'Qt{self.qt_version} ui') + return gen.process_files(kwargs['sources'], state) + + @FeatureNew('qt.compile_moc', '0.59.0') + @noPosargs + @typed_kwargs( + 'qt.compile_moc', + KwargInfo('sources', ContainerTypeInfo(list, (File, str, build.CustomTarget)), listify=True, default=[]), + KwargInfo('headers', ContainerTypeInfo(list, (File, str, build.CustomTarget)), listify=True, default=[]), + KwargInfo('extra_args', ContainerTypeInfo(list, str), listify=True, default=[]), + KwargInfo('method', str, default='auto'), + KwargInfo('include_directories', ContainerTypeInfo(list, (build.IncludeDirs, str)), listify=True, default=[]), + KwargInfo('dependencies', ContainerTypeInfo(list, (Dependency, ExternalLibrary)), listify=True, default=[]), + ) + def compile_moc(self, state: 'ModuleState', args: T.Tuple, kwargs: 'MocCompilerKwArgs') -> ModuleReturnValue: + out = self._compile_moc_impl(state, kwargs) + return ModuleReturnValue(out, [out]) + + def _compile_moc_impl(self, state: 'ModuleState', kwargs: 'MocCompilerKwArgs') -> T.List[build.GeneratedList]: + # Avoid the FeatureNew when dispatching from preprocess + self._detect_tools(state, kwargs['method']) + if not self.tools['moc'].found(): + err_msg = ("{0} sources specified and couldn't find {1}, " + "please check your qt{2} installation") + raise MesonException(err_msg.format('MOC', f'uic-qt{self.qt_version}', self.qt_version)) + + if not (kwargs['headers'] or kwargs['sources']): + raise build.InvalidArguments('At least one of the "headers" or "sources" keyword arguments must be provied and not empty') + + inc = state.get_include_args(include_dirs=kwargs['include_directories']) + compile_args: T.List[str] = [] + for dep in kwargs['dependencies']: + compile_args.extend([a for a in dep.get_all_compile_args() if a.startswith(('-I', '-D'))]) + + output: T.List[build.GeneratedList] = [] + + arguments = kwargs['extra_args'] + inc + compile_args + ['@INPUT@', '-o', '@OUTPUT@'] + if kwargs['headers']: + moc_gen = build.Generator( + self.tools['moc'], arguments, ['moc_@BASENAME@.cpp'], + name=f'Qt{self.qt_version} moc header') + output.append(moc_gen.process_files(kwargs['headers'], state)) + if kwargs['sources']: + moc_gen = build.Generator( + self.tools['moc'], arguments, ['@BASENAME@.moc'], + name=f'Qt{self.qt_version} moc source') + output.append(moc_gen.process_files(kwargs['sources'], state)) + + return output + + # We can't use typed_pos_args here, the signature is ambiguious + @typed_kwargs( + 'qt.preprocess', + KwargInfo('sources', ContainerTypeInfo(list, (File, str)), listify=True, default=[], deprecated='0.59.0'), + KwargInfo('qresources', ContainerTypeInfo(list, (File, str)), listify=True, default=[]), + KwargInfo('ui_files', ContainerTypeInfo(list, (File, str, build.CustomTarget)), listify=True, default=[]), + KwargInfo('moc_sources', ContainerTypeInfo(list, (File, str, build.CustomTarget)), listify=True, default=[]), + KwargInfo('moc_headers', ContainerTypeInfo(list, (File, str, build.CustomTarget)), listify=True, default=[]), + KwargInfo('moc_extra_arguments', ContainerTypeInfo(list, str), listify=True, default=[], since='0.44.0'), + KwargInfo('rcc_extra_arguments', ContainerTypeInfo(list, str), listify=True, default=[], since='0.49.0'), + KwargInfo('uic_extra_arguments', ContainerTypeInfo(list, str), listify=True, default=[], since='0.49.0'), + KwargInfo('method', str, default='auto'), + KwargInfo('include_directories', ContainerTypeInfo(list, (build.IncludeDirs, str)), listify=True, default=[]), + KwargInfo('dependencies', ContainerTypeInfo(list, (Dependency, ExternalLibrary)), listify=True, default=[]), + ) + def preprocess(self, state: 'ModuleState', args: T.List[T.Union[str, File]], kwargs: 'PreprocessKwArgs') -> ModuleReturnValue: + _sources = args[1:] + if _sources: + FeatureDeprecated.single_use('qt.preprocess positional sources', '0.59', state.subproject) + # List is invariant, os we have to cast... + sources = T.cast(T.List[T.Union[str, File, build.GeneratedList, build.CustomTarget]], + _sources + kwargs['sources']) + for s in sources: + if not isinstance(s, (str, File)): + raise build.InvalidArguments('Variadic arguments to qt.preprocess must be Strings or Files') + method = kwargs['method'] + + if kwargs['qresources']: + # custom output name set? -> one output file, multiple otherwise + rcc_kwargs: 'ResourceCompilerKwArgs' = {'name': '', 'sources': kwargs['qresources'], 'extra_args': kwargs['rcc_extra_arguments'], 'method': method} + if args: + name = args[0] + if not isinstance(name, str): + raise build.InvalidArguments('First argument to qt.preprocess must be a string') + rcc_kwargs['name'] = name + sources.extend(self._compile_resources_impl(state, rcc_kwargs)) + + if kwargs['ui_files']: + ui_kwargs: 'UICompilerKwArgs' = {'sources': kwargs['ui_files'], 'extra_args': kwargs['uic_extra_arguments'], 'method': method} + sources.append(self._compile_ui_impl(state, ui_kwargs)) + + if kwargs['moc_headers'] or kwargs['moc_sources']: + moc_kwargs: 'MocCompilerKwArgs' = { + 'extra_args': kwargs['moc_extra_arguments'], + 'sources': kwargs['moc_sources'], + 'headers': kwargs['moc_headers'], + 'include_directories': kwargs['include_directories'], + 'dependencies': kwargs['dependencies'], + 'method': method, + } + sources.extend(self._compile_moc_impl(state, moc_kwargs)) + + return ModuleReturnValue(sources, [sources]) + + @FeatureNew('qt.compile_translations', '0.44.0') + @noPosargs + @typed_kwargs( + 'qt.compile_translations', + KwargInfo('build_by_default', bool, default=False), + KwargInfo('install', bool, default=False), + KwargInfo('install_dir', str), + KwargInfo('method', str, default='auto'), + KwargInfo('qresource', str, since='0.56.0'), + KwargInfo('rcc_extra_arguments', ContainerTypeInfo(list, str), listify=True, default=[], since='0.56.0'), + KwargInfo('ts_files', ContainerTypeInfo(list, (str, File)), listify=True, default=[]), + ) + def compile_translations(self, state: 'ModuleState', args: T.Tuple, kwargs: 'CompileTranslationsKwArgs') -> ModuleReturnValue: + ts_files = kwargs['ts_files'] + install_dir = kwargs['install_dir'] + qresource = kwargs['qresource'] + if qresource: + if ts_files: + raise MesonException('qt.compile_translations: Cannot specify both ts_files and qresource') + if os.path.dirname(qresource) != '': + raise MesonException('qt.compile_translations: qresource file name must not contain a subdirectory.') + qresource_file = File.from_built_file(state.subdir, qresource) + infile_abs = os.path.join(state.environment.source_dir, qresource_file.relative_name()) + outfile_abs = os.path.join(state.environment.build_dir, qresource_file.relative_name()) + os.makedirs(os.path.dirname(outfile_abs), exist_ok=True) + shutil.copy2(infile_abs, outfile_abs) + self.interpreter.add_build_def_file(infile_abs) + + _, nodes = self._qrc_nodes(state, qresource_file) + for c in nodes: + if c.endswith('.qm'): + ts_files.append(c.rstrip('.qm') + '.ts') + else: + raise MesonException(f'qt.compile_translations: qresource can only contain qm files, found {c}') + results = self.preprocess(state, [], {'qresources': qresource_file, 'rcc_extra_arguments': kwargs['rcc_extra_arguments']}) + self._detect_tools(state, kwargs['method']) + translations: T.List[build.CustomTarget] = [] + for ts in ts_files: + if not self.tools['lrelease'].found(): + raise MesonException('qt.compile_translations: ' + + self.tools['lrelease'].name + ' not found') + if qresource: + outdir = os.path.dirname(os.path.normpath(os.path.join(state.subdir, ts))) + ts = os.path.basename(ts) + else: + outdir = state.subdir + cmd = [self.tools['lrelease'], '@INPUT@', '-qm', '@OUTPUT@'] + lrelease_kwargs = {'output': '@BASENAME@.qm', + 'input': ts, + 'install': kwargs.get('install', False), + 'build_by_default': kwargs.get('build_by_default', False), + 'command': cmd} + if install_dir is not None: + lrelease_kwargs['install_dir'] = install_dir + lrelease_target = build.CustomTarget(f'qt{self.qt_version}-compile-{ts}', outdir, state.subproject, lrelease_kwargs) + translations.append(lrelease_target) + if qresource: + return ModuleReturnValue(results.return_value[0], [results.new_objects, translations]) + else: + return ModuleReturnValue(translations, [translations]) diff --git a/meson/mesonbuild/modules/qt4.py b/meson/mesonbuild/modules/qt4.py new file mode 100644 index 000000000..e85a1506f --- /dev/null +++ b/meson/mesonbuild/modules/qt4.py @@ -0,0 +1,25 @@ +# Copyright 2015 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from .qt import QtBaseModule + + +class Qt4Module(QtBaseModule): + + def __init__(self, interpreter): + QtBaseModule.__init__(self, interpreter, qt_version=4) + + +def initialize(*args, **kwargs): + return Qt4Module(*args, **kwargs) diff --git a/meson/mesonbuild/modules/qt5.py b/meson/mesonbuild/modules/qt5.py new file mode 100644 index 000000000..873c2dbeb --- /dev/null +++ b/meson/mesonbuild/modules/qt5.py @@ -0,0 +1,25 @@ +# Copyright 2015 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from .qt import QtBaseModule + + +class Qt5Module(QtBaseModule): + + def __init__(self, interpreter): + QtBaseModule.__init__(self, interpreter, qt_version=5) + + +def initialize(*args, **kwargs): + return Qt5Module(*args, **kwargs) diff --git a/meson/mesonbuild/modules/qt6.py b/meson/mesonbuild/modules/qt6.py new file mode 100644 index 000000000..d9cd6519d --- /dev/null +++ b/meson/mesonbuild/modules/qt6.py @@ -0,0 +1,25 @@ +# Copyright 2020 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from .qt import QtBaseModule + + +class Qt6Module(QtBaseModule): + + def __init__(self, interpreter): + QtBaseModule.__init__(self, interpreter, qt_version=6) + + +def initialize(*args, **kwargs): + return Qt6Module(*args, **kwargs) diff --git a/meson/mesonbuild/modules/rpm.py b/meson/mesonbuild/modules/rpm.py new file mode 100644 index 000000000..1fae14444 --- /dev/null +++ b/meson/mesonbuild/modules/rpm.py @@ -0,0 +1,186 @@ +# Copyright 2015 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +'''This module provides helper functions for RPM related +functionality such as generating template RPM spec file.''' + +from .. import build +from .. import compilers +import datetime +from .. import mlog +from . import GirTarget, TypelibTarget +from . import ExtensionModule +from ..interpreterbase import noKwargs + +import os + +class RPMModule(ExtensionModule): + def __init__(self, interpreter): + super().__init__(interpreter) + self.methods.update({ + 'generate_spec_template': self.generate_spec_template, + }) + + @noKwargs + def generate_spec_template(self, state, args, kwargs): + required_compilers = self.__get_required_compilers(state) + proj = state.project_name.replace(' ', '_').replace('\t', '_') + so_installed = False + devel_subpkg = False + files = set() + files_devel = set() + to_delete = set() + for target in state.targets.values(): + if isinstance(target, build.Executable) and target.need_install: + files.add('%%{_bindir}/%s' % target.get_filename()) + elif isinstance(target, build.SharedLibrary) and target.need_install: + files.add('%%{_libdir}/%s' % target.get_filename()) + for alias in target.get_aliases(): + if alias.endswith('.so'): + files_devel.add('%%{_libdir}/%s' % alias) + else: + files.add('%%{_libdir}/%s' % alias) + so_installed = True + elif isinstance(target, build.StaticLibrary) and target.need_install: + to_delete.add('%%{buildroot}%%{_libdir}/%s' % target.get_filename()) + mlog.warning('removing', mlog.bold(target.get_filename()), + 'from package because packaging static libs not recommended') + elif isinstance(target, GirTarget) and target.should_install(): + files_devel.add('%%{_datadir}/gir-1.0/%s' % target.get_filename()[0]) + elif isinstance(target, TypelibTarget) and target.should_install(): + files.add('%%{_libdir}/girepository-1.0/%s' % target.get_filename()[0]) + for header in state.headers: + if header.get_install_subdir(): + files_devel.add('%%{_includedir}/%s/' % header.get_install_subdir()) + else: + for hdr_src in header.get_sources(): + files_devel.add('%%{_includedir}/%s' % hdr_src) + for man in state.man: + for man_file in man.get_sources(): + if man.locale: + files.add('%%{_mandir}/%s/man%u/%s.*' % (man.locale, int(man_file.split('.')[-1]), man_file)) + else: + files.add('%%{_mandir}/man%u/%s.*' % (int(man_file.split('.')[-1]), man_file)) + if files_devel: + devel_subpkg = True + + filename = os.path.join(state.environment.get_build_dir(), + '%s.spec' % proj) + with open(filename, 'w+', encoding='utf-8') as fn: + fn.write('Name: %s\n' % proj) + fn.write('Version: # FIXME\n') + fn.write('Release: 1%{?dist}\n') + fn.write('Summary: # FIXME\n') + fn.write('License: # FIXME\n') + fn.write('\n') + fn.write('Source0: %{name}-%{version}.tar.xz # FIXME\n') + fn.write('\n') + fn.write('BuildRequires: meson\n') + for compiler in required_compilers: + fn.write('BuildRequires: %s\n' % compiler) + for dep in state.environment.coredata.deps.host: + fn.write('BuildRequires: pkgconfig(%s)\n' % dep[0]) +# ext_libs and ext_progs have been removed from coredata so the following code +# no longer works. It is kept as a reminder of the idea should anyone wish +# to re-implement it. +# +# for lib in state.environment.coredata.ext_libs.values(): +# name = lib.get_name() +# fn.write('BuildRequires: {} # FIXME\n'.format(name)) +# mlog.warning('replace', mlog.bold(name), 'with the real package.', +# 'You can use following command to find package which ' +# 'contains this lib:', +# mlog.bold("dnf provides '*/lib{}.so'".format(name))) +# for prog in state.environment.coredata.ext_progs.values(): +# if not prog.found(): +# fn.write('BuildRequires: %%{_bindir}/%s # FIXME\n' % +# prog.get_name()) +# else: +# fn.write('BuildRequires: {}\n'.format(prog.get_path())) + fn.write('\n') + fn.write('%description\n') + fn.write('\n') + if devel_subpkg: + fn.write('%package devel\n') + fn.write('Summary: Development files for %{name}\n') + fn.write('Requires: %{name}%{?_isa} = %{?epoch:%{epoch}:}{version}-%{release}\n') + fn.write('\n') + fn.write('%description devel\n') + fn.write('Development files for %{name}.\n') + fn.write('\n') + fn.write('%prep\n') + fn.write('%autosetup\n') + fn.write('\n') + fn.write('%build\n') + fn.write('%meson\n') + fn.write('%meson_build\n') + fn.write('\n') + fn.write('%install\n') + fn.write('%meson_install\n') + if to_delete: + fn.write('rm -vf %s\n' % ' '.join(to_delete)) + fn.write('\n') + fn.write('%check\n') + fn.write('%meson_test\n') + fn.write('\n') + fn.write('%files\n') + for f in files: + fn.write('%s\n' % f) + fn.write('\n') + if devel_subpkg: + fn.write('%files devel\n') + for f in files_devel: + fn.write('%s\n' % f) + fn.write('\n') + if so_installed: + fn.write('%post -p /sbin/ldconfig\n') + fn.write('%postun -p /sbin/ldconfig\n') + fn.write('\n') + fn.write('%changelog\n') + fn.write('* %s meson <meson@example.com> - \n' % + datetime.date.today().strftime('%a %b %d %Y')) + fn.write('- \n') + fn.write('\n') + mlog.log('RPM spec template written to %s.spec.\n' % proj) + + def __get_required_compilers(self, state): + required_compilers = set() + for compiler in state.environment.coredata.compilers.host.values(): + # Elbrus has one 'lcc' package for every compiler + if isinstance(compiler, compilers.GnuCCompiler): + required_compilers.add('gcc') + elif isinstance(compiler, compilers.GnuCPPCompiler): + required_compilers.add('gcc-c++') + elif isinstance(compiler, compilers.ElbrusCCompiler): + required_compilers.add('lcc') + elif isinstance(compiler, compilers.ElbrusCPPCompiler): + required_compilers.add('lcc') + elif isinstance(compiler, compilers.ElbrusFortranCompiler): + required_compilers.add('lcc') + elif isinstance(compiler, compilers.ValaCompiler): + required_compilers.add('vala') + elif isinstance(compiler, compilers.GnuFortranCompiler): + required_compilers.add('gcc-gfortran') + elif isinstance(compiler, compilers.GnuObjCCompiler): + required_compilers.add('gcc-objc') + elif compiler == compilers.GnuObjCPPCompiler: + required_compilers.add('gcc-objc++') + else: + mlog.log('RPM spec file not created, generation not allowed for:', + mlog.bold(compiler.get_id())) + return required_compilers + + +def initialize(*args, **kwargs): + return RPMModule(*args, **kwargs) diff --git a/meson/mesonbuild/modules/sourceset.py b/meson/mesonbuild/modules/sourceset.py new file mode 100644 index 000000000..ba8b30032 --- /dev/null +++ b/meson/mesonbuild/modules/sourceset.py @@ -0,0 +1,198 @@ +# Copyright 2019 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from collections import namedtuple +from .. import mesonlib +from .. import build +from ..mesonlib import listify, OrderedSet +from . import ExtensionModule, ModuleObject, MutableModuleObject +from ..interpreterbase import ( + noPosargs, noKwargs, permittedKwargs, + InterpreterException, InvalidArguments, InvalidCode, FeatureNew, +) + +SourceSetRule = namedtuple('SourceSetRule', 'keys sources if_false sourcesets dependencies extra_deps') +SourceFiles = namedtuple('SourceFiles', 'sources dependencies') + +class SourceSet(MutableModuleObject): + def __init__(self, interpreter): + super().__init__() + self.rules = [] + self.subproject = interpreter.subproject + self.environment = interpreter.environment + self.subdir = interpreter.subdir + self.frozen = False + self.methods.update({ + 'add': self.add_method, + 'add_all': self.add_all_method, + 'all_sources': self.all_sources_method, + 'all_dependencies': self.all_dependencies_method, + 'apply': self.apply_method, + }) + + def check_source_files(self, arg, allow_deps): + sources = [] + deps = [] + for x in arg: + if isinstance(x, (str, mesonlib.File, + build.GeneratedList, build.CustomTarget, + build.CustomTargetIndex)): + sources.append(x) + elif hasattr(x, 'found'): + if not allow_deps: + msg = 'Dependencies are not allowed in the if_false argument.' + raise InvalidArguments(msg) + deps.append(x) + else: + msg = 'Sources must be strings or file-like objects.' + raise InvalidArguments(msg) + mesonlib.check_direntry_issues(sources) + return sources, deps + + def check_conditions(self, arg): + keys = [] + deps = [] + for x in listify(arg): + if isinstance(x, str): + keys.append(x) + elif hasattr(x, 'found'): + deps.append(x) + else: + raise InvalidArguments('Conditions must be strings or dependency object') + return keys, deps + + @permittedKwargs(['when', 'if_false', 'if_true']) + def add_method(self, state, args, kwargs): + if self.frozen: + raise InvalidCode('Tried to use \'add\' after querying the source set') + when = listify(kwargs.get('when', [])) + if_true = listify(kwargs.get('if_true', [])) + if_false = listify(kwargs.get('if_false', [])) + if not when and not if_true and not if_false: + if_true = args + elif args: + raise InterpreterException('add called with both positional and keyword arguments') + keys, dependencies = self.check_conditions(when) + sources, extra_deps = self.check_source_files(if_true, True) + if_false, _ = self.check_source_files(if_false, False) + self.rules.append(SourceSetRule(keys, sources, if_false, [], dependencies, extra_deps)) + + @permittedKwargs(['when', 'if_true']) + def add_all_method(self, state, args, kwargs): + if self.frozen: + raise InvalidCode('Tried to use \'add_all\' after querying the source set') + when = listify(kwargs.get('when', [])) + if_true = listify(kwargs.get('if_true', [])) + if not when and not if_true: + if_true = args + elif args: + raise InterpreterException('add_all called with both positional and keyword arguments') + keys, dependencies = self.check_conditions(when) + for s in if_true: + if not isinstance(s, SourceSet): + raise InvalidCode('Arguments to \'add_all\' after the first must be source sets') + s.frozen = True + self.rules.append(SourceSetRule(keys, [], [], if_true, dependencies, [])) + + def collect(self, enabled_fn, all_sources, into=None): + if not into: + into = SourceFiles(OrderedSet(), OrderedSet()) + for entry in self.rules: + if all(x.found() for x in entry.dependencies) and \ + all(enabled_fn(key) for key in entry.keys): + into.sources.update(entry.sources) + into.dependencies.update(entry.dependencies) + into.dependencies.update(entry.extra_deps) + for ss in entry.sourcesets: + ss.collect(enabled_fn, all_sources, into) + if not all_sources: + continue + into.sources.update(entry.if_false) + return into + + @noKwargs + @noPosargs + def all_sources_method(self, state, args, kwargs): + self.frozen = True + files = self.collect(lambda x: True, True) + return list(files.sources) + + @noKwargs + @noPosargs + @FeatureNew('source_set.all_dependencies() method', '0.52.0') + def all_dependencies_method(self, state, args, kwargs): + self.frozen = True + files = self.collect(lambda x: True, True) + return list(files.dependencies) + + @permittedKwargs(['strict']) + def apply_method(self, state, args, kwargs): + if len(args) != 1: + raise InterpreterException('Apply takes exactly one argument') + config_data = args[0] + self.frozen = True + strict = kwargs.get('strict', True) + if isinstance(config_data, dict): + def _get_from_config_data(key): + if strict and key not in config_data: + raise InterpreterException(f'Entry {key} not in configuration dictionary.') + return config_data.get(key, False) + else: + config_cache = dict() + + def _get_from_config_data(key): + nonlocal config_cache + if key not in config_cache: + args = [key] if strict else [key, False] + config_cache[key] = config_data.get_method(args, {}) + return config_cache[key] + + files = self.collect(_get_from_config_data, False) + res = SourceFilesObject(files) + return res + +class SourceFilesObject(ModuleObject): + def __init__(self, files): + super().__init__() + self.files = files + self.methods.update({ + 'sources': self.sources_method, + 'dependencies': self.dependencies_method, + }) + + @noPosargs + @noKwargs + def sources_method(self, state, args, kwargs): + return list(self.files.sources) + + @noPosargs + @noKwargs + def dependencies_method(self, state, args, kwargs): + return list(self.files.dependencies) + +class SourceSetModule(ExtensionModule): + @FeatureNew('SourceSet module', '0.51.0') + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.methods.update({ + 'source_set': self.source_set, + }) + + @noKwargs + @noPosargs + def source_set(self, state, args, kwargs): + return SourceSet(self.interpreter) + +def initialize(*args, **kwargs): + return SourceSetModule(*args, **kwargs) diff --git a/meson/mesonbuild/modules/unstable_cuda.py b/meson/mesonbuild/modules/unstable_cuda.py new file mode 100644 index 000000000..d542fdd54 --- /dev/null +++ b/meson/mesonbuild/modules/unstable_cuda.py @@ -0,0 +1,350 @@ +# Copyright 2017 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import typing as T +import re + +from ..mesonlib import version_compare +from ..compilers import CudaCompiler, Compiler + +from . import NewExtensionModule + +from ..interpreterbase import ( + flatten, permittedKwargs, noKwargs, + InvalidArguments, FeatureNew +) + +class CudaModule(NewExtensionModule): + + @FeatureNew('CUDA module', '0.50.0') + def __init__(self, *args, **kwargs): + super().__init__() + self.methods.update({ + "min_driver_version": self.min_driver_version, + "nvcc_arch_flags": self.nvcc_arch_flags, + "nvcc_arch_readable": self.nvcc_arch_readable, + }) + + @noKwargs + def min_driver_version(self, state: 'ModuleState', + args: T.Tuple[str], + kwargs: T.Dict[str, T.Any]) -> str: + argerror = InvalidArguments('min_driver_version must have exactly one positional argument: ' + + 'a CUDA Toolkit version string. Beware that, since CUDA 11.0, ' + + 'the CUDA Toolkit\'s components (including NVCC) are versioned ' + + 'independently from each other (and the CUDA Toolkit as a whole).') + + if len(args) != 1 or not isinstance(args[0], str): + raise argerror + + cuda_version = args[0] + driver_version_table = [ + {'cuda_version': '>=11.5.0', 'windows': '496.04', 'linux': '495.29.05'}, + {'cuda_version': '>=11.4.1', 'windows': '471.41', 'linux': '470.57.02'}, + {'cuda_version': '>=11.4.0', 'windows': '471.11', 'linux': '470.42.01'}, + {'cuda_version': '>=11.3.0', 'windows': '465.89', 'linux': '465.19.01'}, + {'cuda_version': '>=11.2.2', 'windows': '461.33', 'linux': '460.32.03'}, + {'cuda_version': '>=11.2.1', 'windows': '461.09', 'linux': '460.32.03'}, + {'cuda_version': '>=11.2.0', 'windows': '460.82', 'linux': '460.27.03'}, + {'cuda_version': '>=11.1.1', 'windows': '456.81', 'linux': '455.32'}, + {'cuda_version': '>=11.1.0', 'windows': '456.38', 'linux': '455.23'}, + {'cuda_version': '>=11.0.3', 'windows': '451.82', 'linux': '450.51.06'}, + {'cuda_version': '>=11.0.2', 'windows': '451.48', 'linux': '450.51.05'}, + {'cuda_version': '>=11.0.1', 'windows': '451.22', 'linux': '450.36.06'}, + {'cuda_version': '>=10.2.89', 'windows': '441.22', 'linux': '440.33'}, + {'cuda_version': '>=10.1.105', 'windows': '418.96', 'linux': '418.39'}, + {'cuda_version': '>=10.0.130', 'windows': '411.31', 'linux': '410.48'}, + {'cuda_version': '>=9.2.148', 'windows': '398.26', 'linux': '396.37'}, + {'cuda_version': '>=9.2.88', 'windows': '397.44', 'linux': '396.26'}, + {'cuda_version': '>=9.1.85', 'windows': '391.29', 'linux': '390.46'}, + {'cuda_version': '>=9.0.76', 'windows': '385.54', 'linux': '384.81'}, + {'cuda_version': '>=8.0.61', 'windows': '376.51', 'linux': '375.26'}, + {'cuda_version': '>=8.0.44', 'windows': '369.30', 'linux': '367.48'}, + {'cuda_version': '>=7.5.16', 'windows': '353.66', 'linux': '352.31'}, + {'cuda_version': '>=7.0.28', 'windows': '347.62', 'linux': '346.46'}, + ] + + driver_version = 'unknown' + for d in driver_version_table: + if version_compare(cuda_version, d['cuda_version']): + driver_version = d.get(state.host_machine.system, d['linux']) + break + + return driver_version + + @permittedKwargs(['detected']) + def nvcc_arch_flags(self, state: 'ModuleState', + args: T.Tuple[T.Union[Compiler, CudaCompiler, str]], + kwargs: T.Dict[str, T.Any]) -> T.List[str]: + nvcc_arch_args = self._validate_nvcc_arch_args(args, kwargs) + ret = self._nvcc_arch_flags(*nvcc_arch_args)[0] + return ret + + @permittedKwargs(['detected']) + def nvcc_arch_readable(self, state: 'ModuleState', + args: T.Tuple[T.Union[Compiler, CudaCompiler, str]], + kwargs: T.Dict[str, T.Any]) -> T.List[str]: + nvcc_arch_args = self._validate_nvcc_arch_args(args, kwargs) + ret = self._nvcc_arch_flags(*nvcc_arch_args)[1] + return ret + + @staticmethod + def _break_arch_string(s): + s = re.sub('[ \t\r\n,;]+', ';', s) + s = s.strip(';').split(';') + return s + + @staticmethod + def _detected_cc_from_compiler(c): + if isinstance(c, CudaCompiler): + return c.detected_cc + return '' + + @staticmethod + def _version_from_compiler(c): + if isinstance(c, CudaCompiler): + return c.version + if isinstance(c, str): + return c + return 'unknown' + + def _validate_nvcc_arch_args(self, args, kwargs): + argerror = InvalidArguments('The first argument must be an NVCC compiler object, or its version string!') + + if len(args) < 1: + raise argerror + else: + compiler = args[0] + cuda_version = self._version_from_compiler(compiler) + if cuda_version == 'unknown': + raise argerror + + arch_list = [] if len(args) <= 1 else flatten(args[1:]) + arch_list = [self._break_arch_string(a) for a in arch_list] + arch_list = flatten(arch_list) + if len(arch_list) > 1 and not set(arch_list).isdisjoint({'All', 'Common', 'Auto'}): + raise InvalidArguments('''The special architectures 'All', 'Common' and 'Auto' must appear alone, as a positional argument!''') + arch_list = arch_list[0] if len(arch_list) == 1 else arch_list + + detected = kwargs.get('detected', self._detected_cc_from_compiler(compiler)) + detected = flatten([detected]) + detected = [self._break_arch_string(a) for a in detected] + detected = flatten(detected) + if not set(detected).isdisjoint({'All', 'Common', 'Auto'}): + raise InvalidArguments('''The special architectures 'All', 'Common' and 'Auto' must appear alone, as a positional argument!''') + + return cuda_version, arch_list, detected + + def _filter_cuda_arch_list(self, cuda_arch_list, lo=None, hi=None, saturate=None): + """ + Filter CUDA arch list (no codenames) for >= low and < hi architecture + bounds, and deduplicate. + If saturate is provided, architectures >= hi are replaced with saturate. + """ + + filtered_cuda_arch_list = [] + for arch in cuda_arch_list: + if arch: + if lo and version_compare(arch, '<' + lo): + continue + if hi and version_compare(arch, '>=' + hi): + if not saturate: + continue + arch = saturate + if arch not in filtered_cuda_arch_list: + filtered_cuda_arch_list.append(arch) + return filtered_cuda_arch_list + + def _nvcc_arch_flags(self, cuda_version, cuda_arch_list='Auto', detected=''): + """ + Using the CUDA Toolkit version and the target architectures, compute + the NVCC architecture flags. + """ + + # Replicates much of the logic of + # https://github.com/Kitware/CMake/blob/master/Modules/FindCUDA/select_compute_arch.cmake + # except that a bug with cuda_arch_list="All" is worked around by + # tracking both lower and upper limits on GPU architectures. + + cuda_known_gpu_architectures = ['Fermi', 'Kepler', 'Maxwell'] # noqa: E221 + cuda_common_gpu_architectures = ['3.0', '3.5', '5.0'] # noqa: E221 + cuda_hi_limit_gpu_architecture = None # noqa: E221 + cuda_lo_limit_gpu_architecture = '2.0' # noqa: E221 + cuda_all_gpu_architectures = ['3.0', '3.2', '3.5', '5.0'] # noqa: E221 + + if version_compare(cuda_version, '<7.0'): + cuda_hi_limit_gpu_architecture = '5.2' + + if version_compare(cuda_version, '>=7.0'): + cuda_known_gpu_architectures += ['Kepler+Tegra', 'Kepler+Tesla', 'Maxwell+Tegra'] # noqa: E221 + cuda_common_gpu_architectures += ['5.2'] # noqa: E221 + + if version_compare(cuda_version, '<8.0'): + cuda_common_gpu_architectures += ['5.2+PTX'] # noqa: E221 + cuda_hi_limit_gpu_architecture = '6.0' # noqa: E221 + + if version_compare(cuda_version, '>=8.0'): + cuda_known_gpu_architectures += ['Pascal', 'Pascal+Tegra'] # noqa: E221 + cuda_common_gpu_architectures += ['6.0', '6.1'] # noqa: E221 + cuda_all_gpu_architectures += ['6.0', '6.1', '6.2'] # noqa: E221 + + if version_compare(cuda_version, '<9.0'): + cuda_common_gpu_architectures += ['6.1+PTX'] # noqa: E221 + cuda_hi_limit_gpu_architecture = '7.0' # noqa: E221 + + if version_compare(cuda_version, '>=9.0'): + cuda_known_gpu_architectures += ['Volta', 'Xavier'] # noqa: E221 + cuda_common_gpu_architectures += ['7.0'] # noqa: E221 + cuda_all_gpu_architectures += ['7.0', '7.2'] # noqa: E221 + # https://docs.nvidia.com/cuda/archive/9.0/cuda-toolkit-release-notes/index.html#unsupported-features + cuda_lo_limit_gpu_architecture = '3.0' # noqa: E221 + + if version_compare(cuda_version, '<10.0'): + cuda_common_gpu_architectures += ['7.2+PTX'] # noqa: E221 + cuda_hi_limit_gpu_architecture = '8.0' # noqa: E221 + + if version_compare(cuda_version, '>=10.0'): + cuda_known_gpu_architectures += ['Turing'] # noqa: E221 + cuda_common_gpu_architectures += ['7.5'] # noqa: E221 + cuda_all_gpu_architectures += ['7.5'] # noqa: E221 + + if version_compare(cuda_version, '<11.0'): + cuda_common_gpu_architectures += ['7.5+PTX'] # noqa: E221 + cuda_hi_limit_gpu_architecture = '8.0' # noqa: E221 + + if version_compare(cuda_version, '>=11.0'): + cuda_known_gpu_architectures += ['Ampere'] # noqa: E221 + cuda_common_gpu_architectures += ['8.0'] # noqa: E221 + cuda_all_gpu_architectures += ['8.0'] # noqa: E221 + # https://docs.nvidia.com/cuda/archive/11.0/cuda-toolkit-release-notes/index.html#deprecated-features + cuda_lo_limit_gpu_architecture = '3.5' # noqa: E221 + + if version_compare(cuda_version, '<11.1'): + cuda_common_gpu_architectures += ['8.0+PTX'] # noqa: E221 + cuda_hi_limit_gpu_architecture = '8.6' # noqa: E221 + + if version_compare(cuda_version, '>=11.1'): + cuda_common_gpu_architectures += ['8.6', '8.6+PTX'] # noqa: E221 + cuda_all_gpu_architectures += ['8.6'] # noqa: E221 + + if version_compare(cuda_version, '<12.0'): + cuda_hi_limit_gpu_architecture = '9.0' # noqa: E221 + + if not cuda_arch_list: + cuda_arch_list = 'Auto' + + if cuda_arch_list == 'All': # noqa: E271 + cuda_arch_list = cuda_known_gpu_architectures + elif cuda_arch_list == 'Common': # noqa: E271 + cuda_arch_list = cuda_common_gpu_architectures + elif cuda_arch_list == 'Auto': # noqa: E271 + if detected: + if isinstance(detected, list): + cuda_arch_list = detected + else: + cuda_arch_list = self._break_arch_string(detected) + cuda_arch_list = self._filter_cuda_arch_list(cuda_arch_list, + cuda_lo_limit_gpu_architecture, + cuda_hi_limit_gpu_architecture, + cuda_common_gpu_architectures[-1]) + else: + cuda_arch_list = cuda_common_gpu_architectures + elif isinstance(cuda_arch_list, str): + cuda_arch_list = self._break_arch_string(cuda_arch_list) + + cuda_arch_list = sorted([x for x in set(cuda_arch_list) if x]) + + cuda_arch_bin = [] + cuda_arch_ptx = [] + for arch_name in cuda_arch_list: + arch_bin = [] + arch_ptx = [] + add_ptx = arch_name.endswith('+PTX') + if add_ptx: + arch_name = arch_name[:-len('+PTX')] + + if re.fullmatch('[0-9]+\\.[0-9](\\([0-9]+\\.[0-9]\\))?', arch_name): + arch_bin, arch_ptx = [arch_name], [arch_name] + else: + arch_bin, arch_ptx = { + 'Fermi': (['2.0', '2.1(2.0)'], []), + 'Kepler+Tegra': (['3.2'], []), + 'Kepler+Tesla': (['3.7'], []), + 'Kepler': (['3.0', '3.5'], ['3.5']), + 'Maxwell+Tegra': (['5.3'], []), + 'Maxwell': (['5.0', '5.2'], ['5.2']), + 'Pascal': (['6.0', '6.1'], ['6.1']), + 'Pascal+Tegra': (['6.2'], []), + 'Volta': (['7.0'], ['7.0']), + 'Xavier': (['7.2'], []), + 'Turing': (['7.5'], ['7.5']), + 'Ampere': (['8.0'], ['8.0']), + }.get(arch_name, (None, None)) + + if arch_bin is None: + raise InvalidArguments('Unknown CUDA Architecture Name {}!' + .format(arch_name)) + + cuda_arch_bin += arch_bin + + if add_ptx: + if not arch_ptx: + arch_ptx = arch_bin + cuda_arch_ptx += arch_ptx + + cuda_arch_bin = sorted(list(set(cuda_arch_bin))) + cuda_arch_ptx = sorted(list(set(cuda_arch_ptx))) + + nvcc_flags = [] + nvcc_archs_readable = [] + + for arch in cuda_arch_bin: + arch, codev = re.fullmatch( + '([0-9]+\\.[0-9])(?:\\(([0-9]+\\.[0-9])\\))?', arch).groups() + + if version_compare(arch, '<' + cuda_lo_limit_gpu_architecture): + continue + if version_compare(arch, '>=' + cuda_hi_limit_gpu_architecture): + continue + + if codev: + arch = arch.replace('.', '') + codev = codev.replace('.', '') + nvcc_flags += ['-gencode', 'arch=compute_' + codev + ',code=sm_' + arch] + nvcc_archs_readable += ['sm_' + arch] + else: + arch = arch.replace('.', '') + nvcc_flags += ['-gencode', 'arch=compute_' + arch + ',code=sm_' + arch] + nvcc_archs_readable += ['sm_' + arch] + + for arch in cuda_arch_ptx: + arch, codev = re.fullmatch( + '([0-9]+\\.[0-9])(?:\\(([0-9]+\\.[0-9])\\))?', arch).groups() + + if codev: + arch = codev + + if version_compare(arch, '<' + cuda_lo_limit_gpu_architecture): + continue + if version_compare(arch, '>=' + cuda_hi_limit_gpu_architecture): + continue + + arch = arch.replace('.', '') + nvcc_flags += ['-gencode', 'arch=compute_' + arch + ',code=compute_' + arch] + nvcc_archs_readable += ['compute_' + arch] + + return nvcc_flags, nvcc_archs_readable + +def initialize(*args, **kwargs): + return CudaModule(*args, **kwargs) diff --git a/meson/mesonbuild/modules/unstable_external_project.py b/meson/mesonbuild/modules/unstable_external_project.py new file mode 100644 index 000000000..573622696 --- /dev/null +++ b/meson/mesonbuild/modules/unstable_external_project.py @@ -0,0 +1,268 @@ +# Copyright 2020 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os, subprocess, shlex +from pathlib import Path +import typing as T + +from . import ExtensionModule, ModuleReturnValue, ModuleState, NewExtensionModule +from .. import mlog, build +from ..mesonlib import (MesonException, Popen_safe, MachineChoice, + get_variable_regex, do_replacement, extract_as_list) +from ..interpreterbase import InterpreterException, FeatureNew +from ..interpreterbase import permittedKwargs, typed_pos_args +from ..compilers.compilers import CFLAGS_MAPPING, CEXE_MAPPING +from ..dependencies import InternalDependency, PkgConfigDependency +from ..mesonlib import OptionKey + +class ExternalProject(NewExtensionModule): + def __init__(self, + state: ModuleState, + configure_command: str, + configure_options: T.List[str], + cross_configure_options: T.List[str], + env: build.EnvironmentVariables, + verbose: bool): + super().__init__() + self.methods.update({'dependency': self.dependency_method, + }) + + self.subdir = Path(state.subdir) + self.project_version = state.project_version + self.subproject = state.subproject + self.env = state.environment + self.build_machine = state.build_machine + self.host_machine = state.host_machine + self.configure_command = configure_command + self.configure_options = configure_options + self.cross_configure_options = cross_configure_options + self.verbose = verbose + self.user_env = env + + self.src_dir = Path(self.env.get_source_dir(), self.subdir) + self.build_dir = Path(self.env.get_build_dir(), self.subdir, 'build') + self.install_dir = Path(self.env.get_build_dir(), self.subdir, 'dist') + self.prefix = Path(self.env.coredata.get_option(OptionKey('prefix'))) + self.libdir = Path(self.env.coredata.get_option(OptionKey('libdir'))) + self.includedir = Path(self.env.coredata.get_option(OptionKey('includedir'))) + self.name = self.src_dir.name + + # On Windows if the prefix is "c:/foo" and DESTDIR is "c:/bar", `make` + # will install files into "c:/bar/c:/foo" which is an invalid path. + # Work around that issue by removing the drive from prefix. + if self.prefix.drive: + self.prefix = self.prefix.relative_to(self.prefix.drive) + + # self.prefix is an absolute path, so we cannot append it to another path. + self.rel_prefix = self.prefix.relative_to(self.prefix.root) + + self.make = state.find_program('make') + self.make = self.make.get_command()[0] + + self._configure(state) + + self.targets = self._create_targets() + + def _configure(self, state: ModuleState): + # Assume it's the name of a script in source dir, like 'configure', + # 'autogen.sh', etc). + configure_path = Path(self.src_dir, self.configure_command) + configure_prog = state.find_program(configure_path.as_posix()) + configure_cmd = configure_prog.get_command() + + d = [('PREFIX', '--prefix=@PREFIX@', self.prefix.as_posix()), + ('LIBDIR', '--libdir=@PREFIX@/@LIBDIR@', self.libdir.as_posix()), + ('INCLUDEDIR', None, self.includedir.as_posix()), + ] + self._validate_configure_options(d) + + configure_cmd += self._format_options(self.configure_options, d) + + if self.env.is_cross_build(): + host = '{}-{}-{}'.format(self.host_machine.cpu_family, + self.build_machine.system, + self.host_machine.system) + d = [('HOST', None, host)] + configure_cmd += self._format_options(self.cross_configure_options, d) + + # Set common env variables like CFLAGS, CC, etc. + link_exelist = [] + link_args = [] + self.run_env = os.environ.copy() + for lang, compiler in self.env.coredata.compilers[MachineChoice.HOST].items(): + if any(lang not in i for i in (CEXE_MAPPING, CFLAGS_MAPPING)): + continue + cargs = self.env.coredata.get_external_args(MachineChoice.HOST, lang) + self.run_env[CEXE_MAPPING[lang]] = self._quote_and_join(compiler.get_exelist()) + self.run_env[CFLAGS_MAPPING[lang]] = self._quote_and_join(cargs) + if not link_exelist: + link_exelist = compiler.get_linker_exelist() + link_args = self.env.coredata.get_external_link_args(MachineChoice.HOST, lang) + if link_exelist: + # FIXME: Do not pass linker because Meson uses CC as linker wrapper, + # but autotools often expects the real linker (e.h. GNU ld). + # self.run_env['LD'] = self._quote_and_join(link_exelist) + pass + self.run_env['LDFLAGS'] = self._quote_and_join(link_args) + + self.run_env = self.user_env.get_env(self.run_env) + + PkgConfigDependency.setup_env(self.run_env, self.env, MachineChoice.HOST, + Path(self.env.get_build_dir(), 'meson-uninstalled').as_posix()) + + self.build_dir.mkdir(parents=True, exist_ok=True) + self._run('configure', configure_cmd) + + def _quote_and_join(self, array: T.List[str]) -> str: + return ' '.join([shlex.quote(i) for i in array]) + + def _validate_configure_options(self, variables: T.List[T.Tuple[str, str, str]]): + # Ensure the user at least try to pass basic info to the build system, + # like the prefix, libdir, etc. + for key, default, val in variables: + if default is None: + continue + key_format = f'@{key}@' + for option in self.configure_options: + if key_format in option: + break + else: + FeatureNew('Default configure_option', '0.57.0').use(self.subproject) + self.configure_options.append(default) + + def _format_options(self, options: T.List[str], variables: T.List[T.Tuple[str, str, str]]) -> T.List[str]: + out = [] + missing = set() + regex = get_variable_regex('meson') + confdata = {k: (v, None) for k, d, v in variables} + for o in options: + arg, missing_vars = do_replacement(regex, o, 'meson', confdata) + missing.update(missing_vars) + out.append(arg) + if missing: + var_list = ", ".join(map(repr, sorted(missing))) + raise EnvironmentException( + f"Variables {var_list} in configure options are missing.") + return out + + def _run(self, step: str, command: T.List[str]): + mlog.log(f'External project {self.name}:', mlog.bold(step)) + m = 'Running command ' + str(command) + ' in directory ' + str(self.build_dir) + '\n' + log_filename = Path(mlog.log_dir, f'{self.name}-{step}.log') + output = None + if not self.verbose: + output = open(log_filename, 'w', encoding='utf-8') + output.write(m + '\n') + output.flush() + else: + mlog.log(m) + p, o, e = Popen_safe(command, cwd=str(self.build_dir), env=self.run_env, + stderr=subprocess.STDOUT, + stdout=output) + if p.returncode != 0: + m = f'{step} step returned error code {p.returncode}.' + if not self.verbose: + m += '\nSee logs: ' + str(log_filename) + raise MesonException(m) + + def _create_targets(self): + cmd = self.env.get_build_command() + cmd += ['--internal', 'externalproject', + '--name', self.name, + '--srcdir', self.src_dir.as_posix(), + '--builddir', self.build_dir.as_posix(), + '--installdir', self.install_dir.as_posix(), + '--logdir', mlog.log_dir, + '--make', self.make, + ] + if self.verbose: + cmd.append('--verbose') + + target_kwargs = {'output': f'{self.name}.stamp', + 'depfile': f'{self.name}.d', + 'command': cmd + ['@OUTPUT@', '@DEPFILE@'], + 'console': True, + } + self.target = build.CustomTarget(self.name, + self.subdir.as_posix(), + self.subproject, + target_kwargs) + + idir = build.InstallDir(self.subdir.as_posix(), + Path('dist', self.rel_prefix).as_posix(), + install_dir='.', + install_mode=None, + exclude=None, + strip_directory=True, + from_source_dir=False, + subproject=self.subproject) + + return [self.target, idir] + + @permittedKwargs({'subdir'}) + @typed_pos_args('external_project.dependency', str) + def dependency_method(self, state, args: T.Tuple[str], kwargs): + libname = args[0] + + subdir = kwargs.get('subdir', '') + if not isinstance(subdir, str): + m = 'ExternalProject.dependency subdir keyword argument must be string.' + raise InterpreterException(m) + + abs_includedir = Path(self.install_dir, self.rel_prefix, self.includedir) + if subdir: + abs_includedir = Path(abs_includedir, subdir) + abs_libdir = Path(self.install_dir, self.rel_prefix, self.libdir) + + version = self.project_version['version'] + incdir = [] + compile_args = [f'-I{abs_includedir}'] + link_args = [f'-L{abs_libdir}', f'-l{libname}'] + libs = [] + libs_whole = [] + sources = self.target + final_deps = [] + variables = {} + dep = InternalDependency(version, incdir, compile_args, link_args, libs, + libs_whole, sources, final_deps, variables) + return dep + + +class ExternalProjectModule(ExtensionModule): + @FeatureNew('External build system Module', '0.56.0') + def __init__(self, interpreter): + super().__init__(interpreter) + self.methods.update({'add_project': self.add_project, + }) + + @permittedKwargs({'configure_options', 'cross_configure_options', 'verbose', 'env'}) + @typed_pos_args('external_project_mod.add_project', str) + def add_project(self, state: ModuleState, args: T.Tuple[str], kwargs: T.Dict[str, T.Any]): + configure_command = args[0] + configure_options = extract_as_list(kwargs, 'configure_options') + cross_configure_options = extract_as_list(kwargs, 'cross_configure_options') + if not cross_configure_options: + cross_configure_options = ['--host=@HOST@'] + verbose = kwargs.get('verbose', False) + env = self.interpreter.unpack_env_kwarg(kwargs) + project = ExternalProject(state, + configure_command, + configure_options, + cross_configure_options, + env, verbose) + return ModuleReturnValue(project, project.targets) + + +def initialize(*args, **kwargs): + return ExternalProjectModule(*args, **kwargs) diff --git a/meson/mesonbuild/modules/unstable_icestorm.py b/meson/mesonbuild/modules/unstable_icestorm.py new file mode 100644 index 000000000..841e647e4 --- /dev/null +++ b/meson/mesonbuild/modules/unstable_icestorm.py @@ -0,0 +1,89 @@ +# Copyright 2017 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from .. import mesonlib +from ..interpreterbase import flatten +from ..interpreterbase import FeatureNew + +from . import ExtensionModule + +class IceStormModule(ExtensionModule): + + @FeatureNew('FPGA/Icestorm Module', '0.45.0') + def __init__(self, interpreter): + super().__init__(interpreter) + self.yosys_bin = None + self.methods.update({ + 'project': self.project, + }) + + def detect_binaries(self, state): + self.yosys_bin = state.find_program('yosys') + self.arachne_bin = state.find_program('arachne-pnr') + self.icepack_bin = state.find_program('icepack') + self.iceprog_bin = state.find_program('iceprog') + self.icetime_bin = state.find_program('icetime') + + def project(self, state, args, kwargs): + if not self.yosys_bin: + self.detect_binaries(state) + if not args: + raise mesonlib.MesonException('Project requires at least one argument, which is the project name.') + proj_name = args[0] + arg_sources = args[1:] + if not isinstance(proj_name, str): + raise mesonlib.MesonException('Argument must be a string.') + kwarg_sources = kwargs.get('sources', []) + if not isinstance(kwarg_sources, list): + kwarg_sources = [kwarg_sources] + all_sources = self.interpreter.source_strings_to_files(flatten(arg_sources + kwarg_sources)) + if 'constraint_file' not in kwargs: + raise mesonlib.MesonException('Constraint file not specified.') + + constraint_file = self.interpreter.source_strings_to_files(kwargs['constraint_file']) + if len(constraint_file) != 1: + raise mesonlib.MesonException('Constraint file must contain one and only one entry.') + blif_name = proj_name + '_blif' + blif_fname = proj_name + '.blif' + asc_name = proj_name + '_asc' + asc_fname = proj_name + '.asc' + bin_name = proj_name + '_bin' + bin_fname = proj_name + '.bin' + time_name = proj_name + '-time' + upload_name = proj_name + '-upload' + + blif_target = self.interpreter.func_custom_target(None, [blif_name], { + 'input': all_sources, + 'output': blif_fname, + 'command': [self.yosys_bin, '-q', '-p', 'synth_ice40 -blif @OUTPUT@', '@INPUT@']}) + + asc_target = self.interpreter.func_custom_target(None, [asc_name], { + 'input': blif_target, + 'output': asc_fname, + 'command': [self.arachne_bin, '-q', '-d', '1k', '-p', constraint_file, '@INPUT@', '-o', '@OUTPUT@']}) + + bin_target = self.interpreter.func_custom_target(None, [bin_name], { + 'input': asc_target, + 'output': bin_fname, + 'command': [self.icepack_bin, '@INPUT@', '@OUTPUT@'], + 'build_by_default': True}) + + self.interpreter.func_run_target(None, [upload_name], { + 'command': [self.iceprog_bin, bin_target]}) + + self.interpreter.func_run_target(None, [time_name], { + 'command': [self.icetime_bin, bin_target]}) + +def initialize(*args, **kwargs): + return IceStormModule(*args, **kwargs) diff --git a/meson/mesonbuild/modules/unstable_rust.py b/meson/mesonbuild/modules/unstable_rust.py new file mode 100644 index 000000000..995370a7d --- /dev/null +++ b/meson/mesonbuild/modules/unstable_rust.py @@ -0,0 +1,227 @@ +# Copyright © 2020 Intel Corporation + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import typing as T + +from . import ExtensionModule, ModuleReturnValue +from .. import mlog +from ..build import BuildTarget, CustomTargetIndex, Executable, GeneratedList, InvalidArguments, IncludeDirs, CustomTarget +from ..interpreter.interpreter import TEST_KWARGS +from ..interpreterbase import ContainerTypeInfo, InterpreterException, KwargInfo, permittedKwargs, FeatureNew, typed_kwargs, typed_pos_args, noPosargs +from ..mesonlib import stringlistify, listify, typeslistify, File +from ..dependencies import Dependency, ExternalLibrary +from ..interpreterbase import InterpreterException, permittedKwargs, FeatureNew, typed_pos_args, noPosargs +from ..mesonlib import stringlistify, listify, typeslistify, File + +if T.TYPE_CHECKING: + from . import ModuleState + from ..interpreter import Interpreter + from ..interpreter import kwargs as _kwargs + from ..interpreter.interpreter import SourceOutputs + from ..programs import ExternalProgram + + class FuncTest(_kwargs.BaseTest): + + dependencies: T.List[T.Union[Dependency, ExternalLibrary]] + is_parallel: bool + + +class RustModule(ExtensionModule): + + """A module that holds helper functions for rust.""" + + @FeatureNew('rust module', '0.57.0') + def __init__(self, interpreter: 'Interpreter') -> None: + super().__init__(interpreter) + self._bindgen_bin: T.Optional['ExternalProgram'] = None + self.methods.update({ + 'test': self.test, + 'bindgen': self.bindgen, + }) + + @typed_pos_args('rust.test', str, BuildTarget) + @typed_kwargs( + 'rust.test', + *TEST_KWARGS, + KwargInfo('is_parallel', bool, default=False), + KwargInfo( + 'dependencies', + ContainerTypeInfo(list, (Dependency, ExternalLibrary)), + listify=True, + default=[]), + ) + def test(self, state: 'ModuleState', args: T.Tuple[str, BuildTarget], kwargs: 'FuncTest') -> ModuleReturnValue: + """Generate a rust test target from a given rust target. + + Rust puts it's unitests inside it's main source files, unlike most + languages that put them in external files. This means that normally + you have to define two separate targets with basically the same + arguments to get tests: + + ```meson + rust_lib_sources = [...] + rust_lib = static_library( + 'rust_lib', + rust_lib_sources, + ) + + rust_lib_test = executable( + 'rust_lib_test', + rust_lib_sources, + rust_args : ['--test'], + ) + + test( + 'rust_lib_test', + rust_lib_test, + protocol : 'rust', + ) + ``` + + This is all fine, but not very DRY. This method makes it much easier + to define rust tests: + + ```meson + rust = import('unstable-rust') + + rust_lib = static_library( + 'rust_lib', + [sources], + ) + + rust.test('rust_lib_test', rust_lib) + ``` + """ + name = args[0] + base_target: BuildTarget = args[1] + if not base_target.uses_rust(): + raise InterpreterException('Second positional argument to rustmod.test() must be a rust based target') + extra_args = kwargs['args'] + + # Delete any arguments we don't want passed + if '--test' in extra_args: + mlog.warning('Do not add --test to rustmod.test arguments') + extra_args.remove('--test') + if '--format' in extra_args: + mlog.warning('Do not add --format to rustmod.test arguments') + i = extra_args.index('--format') + # Also delete the argument to --format + del extra_args[i + 1] + del extra_args[i] + for i, a in enumerate(extra_args): + if isinstance(a, str) and a.startswith('--format='): + del extra_args[i] + break + + dependencies = [d for d in kwargs['dependencies']] + + # We need to cast here, as currently these don't have protocol in them, but test itself does. + tkwargs = T.cast('_kwargs.FuncTest', kwargs.copy()) + + tkwargs['args'] = extra_args + ['--test', '--format', 'pretty'] + tkwargs['protocol'] = 'rust' + + new_target_kwargs = base_target.kwargs.copy() + # Don't mutate the shallow copied list, instead replace it with a new + # one + new_target_kwargs['rust_args'] = new_target_kwargs.get('rust_args', []) + ['--test'] + new_target_kwargs['install'] = False + new_target_kwargs['dependencies'] = new_target_kwargs.get('dependencies', []) + dependencies + + new_target = Executable( + name, base_target.subdir, state.subproject, + base_target.for_machine, base_target.sources, + base_target.objects, base_target.environment, + new_target_kwargs + ) + + test = self.interpreter.make_test( + self.interpreter.current_node, (name, new_target), tkwargs) + + return ModuleReturnValue(None, [new_target, test]) + + @noPosargs + @permittedKwargs({'input', 'output', 'include_directories', 'c_args', 'args'}) + def bindgen(self, state: 'ModuleState', args: T.List, kwargs: T.Dict[str, T.Any]) -> ModuleReturnValue: + """Wrapper around bindgen to simplify it's use. + + The main thing this simplifies is the use of `include_directory` + objects, instead of having to pass a plethora of `-I` arguments. + """ + header: 'SourceOutputs' + _deps: T.Sequence['SourceOutputs'] + try: + header, *_deps = self.interpreter.source_strings_to_files(listify(kwargs['input'])) + except KeyError: + raise InvalidArguments('rustmod.bindgen() `input` argument must have at least one element.') + + try: + output: str = kwargs['output'] + except KeyError: + raise InvalidArguments('rustmod.bindgen() `output` must be provided') + if not isinstance(output, str): + raise InvalidArguments('rustmod.bindgen() `output` argument must be a string.') + + include_dirs: T.List[IncludeDirs] = typeslistify(listify(kwargs.get('include_directories', [])), IncludeDirs) + c_args: T.List[str] = stringlistify(listify(kwargs.get('c_args', []))) + bind_args: T.List[str] = stringlistify(listify(kwargs.get('args', []))) + + # Split File and Target dependencies to add pass to CustomTarget + depends: T.List[T.Union[GeneratedList, BuildTarget, CustomTargetIndex, CustomTarget]] = [] + depend_files: T.List[File] = [] + for d in _deps: + if isinstance(d, File): + depend_files.append(d) + else: + depends.append(d) + + inc_strs: T.List[str] = [] + for i in include_dirs: + # bindgen always uses clang, so it's safe to hardcode -I here + inc_strs.extend([f'-I{x}' for x in i.to_string_list(state.environment.get_source_dir())]) + + if self._bindgen_bin is None: + self._bindgen_bin = state.find_program('bindgen') + + name: str + if isinstance(header, File): + name = header.fname + else: + name = header.get_outputs()[0] + + target = CustomTarget( + f'rustmod-bindgen-{name}'.replace('/', '_'), + state.subdir, + state.subproject, + { + 'input': header, + 'output': output, + 'command': self._bindgen_bin.get_command() + [ + '@INPUT@', '--output', + os.path.join(state.environment.build_dir, '@OUTPUT@')] + + bind_args + ['--'] + c_args + inc_strs + + ['-MD', '-MQ', '@INPUT@', '-MF', '@DEPFILE@'], + 'depfile': '@PLAINNAME@.d', + 'depends': depends, + 'depend_files': depend_files, + }, + backend=state.backend, + ) + + return ModuleReturnValue([target], [target]) + + +def initialize(*args: T.List, **kwargs: T.Dict) -> RustModule: + return RustModule(*args, **kwargs) # type: ignore diff --git a/meson/mesonbuild/modules/unstable_simd.py b/meson/mesonbuild/modules/unstable_simd.py new file mode 100644 index 000000000..3339cea5a --- /dev/null +++ b/meson/mesonbuild/modules/unstable_simd.py @@ -0,0 +1,88 @@ +# Copyright 2017 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from .. import mesonlib, compilers, mlog + +from . import ExtensionModule + +from ..interpreterbase import FeatureNew + +class SimdModule(ExtensionModule): + + @FeatureNew('SIMD module', '0.42.0') + def __init__(self, interpreter): + super().__init__(interpreter) + # FIXME add Altivec and AVX512. + self.isets = ('mmx', + 'sse', + 'sse2', + 'sse3', + 'ssse3', + 'sse41', + 'sse42', + 'avx', + 'avx2', + 'neon', + ) + self.methods.update({ + 'check': self.check, + }) + + def check(self, state, args, kwargs): + result = [] + if len(args) != 1: + raise mesonlib.MesonException('Check requires one argument, a name prefix for checks.') + prefix = args[0] + if not isinstance(prefix, str): + raise mesonlib.MesonException('Argument must be a string.') + if 'compiler' not in kwargs: + raise mesonlib.MesonException('Must specify compiler keyword') + if 'sources' in kwargs: + raise mesonlib.MesonException('SIMD module does not support the "sources" keyword') + basic_kwargs = {} + for key, value in kwargs.items(): + if key not in self.isets and key != 'compiler': + basic_kwargs[key] = value + compiler = kwargs['compiler'] + if not isinstance(compiler, compilers.compilers.Compiler): + raise mesonlib.MesonException('Compiler argument must be a compiler object.') + cdata = self.interpreter.func_configuration_data(None, [], {}) + conf = cdata.conf_data + for iset in self.isets: + if iset not in kwargs: + continue + iset_fname = kwargs[iset] # Might also be an array or Files. static_library will validate. + args = compiler.get_instruction_set_args(iset) + if args is None: + mlog.log('Compiler supports %s:' % iset, mlog.red('NO')) + continue + if args: + if not compiler.has_multi_arguments(args, state.environment)[0]: + mlog.log('Compiler supports %s:' % iset, mlog.red('NO')) + continue + mlog.log('Compiler supports %s:' % iset, mlog.green('YES')) + conf.values['HAVE_' + iset.upper()] = ('1', 'Compiler supports %s.' % iset) + libname = prefix + '_' + iset + lib_kwargs = {'sources': iset_fname, + } + lib_kwargs.update(basic_kwargs) + langarg_key = compiler.get_language() + '_args' + old_lang_args = mesonlib.extract_as_list(lib_kwargs, langarg_key) + all_lang_args = old_lang_args + args + lib_kwargs[langarg_key] = all_lang_args + result.append(self.interpreter.func_static_lib(None, [libname], lib_kwargs)) + return [result, cdata] + +def initialize(*args, **kwargs): + return SimdModule(*args, **kwargs) diff --git a/meson/mesonbuild/modules/windows.py b/meson/mesonbuild/modules/windows.py new file mode 100644 index 000000000..7f627cff1 --- /dev/null +++ b/meson/mesonbuild/modules/windows.py @@ -0,0 +1,171 @@ +# Copyright 2015 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import enum +import os +import re + +from .. import mlog +from .. import mesonlib, build +from ..mesonlib import MachineChoice, MesonException, extract_as_list +from . import ModuleReturnValue +from . import ExtensionModule +from ..interpreterbase import permittedKwargs, FeatureNewKwargs, flatten +from ..programs import ExternalProgram + +class ResourceCompilerType(enum.Enum): + windres = 1 + rc = 2 + wrc = 3 + +class WindowsModule(ExtensionModule): + def __init__(self, interpreter): + super().__init__(interpreter) + self.methods.update({ + 'compile_resources': self.compile_resources, + }) + + def detect_compiler(self, compilers): + for l in ('c', 'cpp'): + if l in compilers: + return compilers[l] + raise MesonException('Resource compilation requires a C or C++ compiler.') + + def _find_resource_compiler(self, state): + # FIXME: Does not handle `native: true` executables, see + # See https://github.com/mesonbuild/meson/issues/1531 + # Take a parameter instead of the hardcoded definition below + for_machine = MachineChoice.HOST + + if hasattr(self, '_rescomp'): + return self._rescomp + + # Will try cross / native file and then env var + rescomp = ExternalProgram.from_bin_list(state.environment, for_machine, 'windres') + + if not rescomp or not rescomp.found(): + comp = self.detect_compiler(state.environment.coredata.compilers[for_machine]) + if comp.id in {'msvc', 'clang-cl', 'intel-cl'}: + rescomp = ExternalProgram('rc', silent=True) + else: + rescomp = ExternalProgram('windres', silent=True) + + if not rescomp.found(): + raise MesonException('Could not find Windows resource compiler') + + for (arg, match, rc_type) in [ + ('/?', '^.*Microsoft.*Resource Compiler.*$', ResourceCompilerType.rc), + ('--version', '^.*GNU windres.*$', ResourceCompilerType.windres), + ('--version', '^.*Wine Resource Compiler.*$', ResourceCompilerType.wrc), + ]: + p, o, e = mesonlib.Popen_safe(rescomp.get_command() + [arg]) + m = re.search(match, o, re.MULTILINE) + if m: + mlog.log('Windows resource compiler: %s' % m.group()) + self._rescomp = (rescomp, rc_type) + break + else: + raise MesonException('Could not determine type of Windows resource compiler') + + return self._rescomp + + @FeatureNewKwargs('windows.compile_resources', '0.47.0', ['depend_files', 'depends']) + @permittedKwargs({'args', 'include_directories', 'depend_files', 'depends'}) + def compile_resources(self, state, args, kwargs): + extra_args = mesonlib.stringlistify(flatten(kwargs.get('args', []))) + wrc_depend_files = extract_as_list(kwargs, 'depend_files', pop = True) + wrc_depends = extract_as_list(kwargs, 'depends', pop = True) + for d in wrc_depends: + if isinstance(d, build.CustomTarget): + extra_args += state.get_include_args([ + build.IncludeDirs('', [], False, [os.path.join('@BUILD_ROOT@', self.interpreter.backend.get_target_dir(d))]) + ]) + inc_dirs = extract_as_list(kwargs, 'include_directories', pop = True) + for incd in inc_dirs: + if not isinstance(incd, (str, build.IncludeDirs)): + raise MesonException('Resource include dirs should be include_directories().') + extra_args += state.get_include_args(inc_dirs) + + rescomp, rescomp_type = self._find_resource_compiler(state) + if rescomp_type == ResourceCompilerType.rc: + # RC is used to generate .res files, a special binary resource + # format, which can be passed directly to LINK (apparently LINK uses + # CVTRES internally to convert this to a COFF object) + suffix = 'res' + res_args = extra_args + ['/nologo', '/fo@OUTPUT@', '@INPUT@'] + elif rescomp_type == ResourceCompilerType.windres: + # ld only supports object files, so windres is used to generate a + # COFF object + suffix = 'o' + res_args = extra_args + ['@INPUT@', '@OUTPUT@'] + + m = 'Argument {!r} has a space which may not work with windres due to ' \ + 'a MinGW bug: https://sourceware.org/bugzilla/show_bug.cgi?id=4933' + for arg in extra_args: + if ' ' in arg: + mlog.warning(m.format(arg), fatal=False) + else: + suffix = 'o' + res_args = extra_args + ['@INPUT@', '-o', '@OUTPUT@'] + + res_targets = [] + + def add_target(src): + if isinstance(src, list): + for subsrc in src: + add_target(subsrc) + return + + if isinstance(src, str): + name_formatted = src + name = os.path.join(state.subdir, src) + elif isinstance(src, mesonlib.File): + name_formatted = src.fname + name = src.relative_name() + elif isinstance(src, build.CustomTarget): + if len(src.get_outputs()) > 1: + raise MesonException('windows.compile_resources does not accept custom targets with more than 1 output.') + + # Chances are that src.get_filename() is already the name of that + # target, add a prefix to avoid name clash. + name_formatted = 'windows_compile_resources_' + src.get_filename() + name = src.get_id() + else: + raise MesonException(f'Unexpected source type {src!r}. windows.compile_resources accepts only strings, files, custom targets, and lists thereof.') + + # Path separators are not allowed in target names + name = name.replace('/', '_').replace('\\', '_') + name_formatted = name_formatted.replace('/', '_').replace('\\', '_') + + res_kwargs = { + 'output': name + '_@BASENAME@.' + suffix, + 'input': [src], + 'command': [rescomp] + res_args, + 'depend_files': wrc_depend_files, + 'depends': wrc_depends, + } + + # instruct binutils windres to generate a preprocessor depfile + if rescomp_type == ResourceCompilerType.windres: + res_kwargs['depfile'] = res_kwargs['output'] + '.d' + res_kwargs['command'] += ['--preprocessor-arg=-MD', '--preprocessor-arg=-MQ@OUTPUT@', '--preprocessor-arg=-MF@DEPFILE@'] + + res_targets.append(build.CustomTarget(name_formatted, state.subdir, state.subproject, res_kwargs)) + + add_target(args) + + return ModuleReturnValue(res_targets, [res_targets]) + +def initialize(*args, **kwargs): + return WindowsModule(*args, **kwargs) |