diff options
author | Angelos Mouzakitis <a.mouzakitis@virtualopensystems.com> | 2023-10-10 14:33:42 +0000 |
---|---|---|
committer | Angelos Mouzakitis <a.mouzakitis@virtualopensystems.com> | 2023-10-10 14:33:42 +0000 |
commit | af1a266670d040d2f4083ff309d732d648afba2a (patch) | |
tree | 2fc46203448ddcc6f81546d379abfaeb323575e9 /meson/mesonbuild/mintro.py | |
parent | e02cda008591317b1625707ff8e115a4841aa889 (diff) |
Change-Id: Iaf8d18082d3991dec7c0ebbea540f092188eb4ec
Diffstat (limited to 'meson/mesonbuild/mintro.py')
-rw-r--r-- | meson/mesonbuild/mintro.py | 543 |
1 files changed, 543 insertions, 0 deletions
diff --git a/meson/mesonbuild/mintro.py b/meson/mesonbuild/mintro.py new file mode 100644 index 000000000..a79219b3d --- /dev/null +++ b/meson/mesonbuild/mintro.py @@ -0,0 +1,543 @@ +# Copyright 2014-2016 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""This is a helper script for IDE developers. It allows you to +extract information such as list of targets, files, compiler flags, +tests and so on. All output is in JSON for simple parsing. + +Currently only works for the Ninja backend. Others use generated +project files and don't need this info.""" + +import collections +import json +from . import build, coredata as cdata +from . import mesonlib +from .ast import IntrospectionInterpreter, build_target_functions, AstConditionLevel, AstIDGenerator, AstIndentationGenerator, AstJSONPrinter +from . import mlog +from .backend import backends +from .mparser import BaseNode, FunctionNode, ArrayNode, ArgumentNode, StringNode +from .interpreter import Interpreter +from pathlib import Path, PurePath +import typing as T +import os +import argparse + +from .mesonlib import OptionKey + +def get_meson_info_file(info_dir: str) -> str: + return os.path.join(info_dir, 'meson-info.json') + +def get_meson_introspection_version() -> str: + return '1.0.0' + +def get_meson_introspection_required_version() -> T.List[str]: + return ['>=1.0', '<2.0'] + +class IntroCommand: + def __init__(self, + desc: str, + func: T.Optional[T.Callable[[], T.Union[dict, list]]] = None, + no_bd: T.Optional[T.Callable[[IntrospectionInterpreter], T.Union[dict, list]]] = None) -> None: + self.desc = desc + '.' + self.func = func + self.no_bd = no_bd + +def get_meson_introspection_types(coredata: T.Optional[cdata.CoreData] = None, + builddata: T.Optional[build.Build] = None, + backend: T.Optional[backends.Backend] = None, + sourcedir: T.Optional[str] = None) -> 'T.Mapping[str, IntroCommand]': + if backend and builddata: + benchmarkdata = backend.create_test_serialisation(builddata.get_benchmarks()) + testdata = backend.create_test_serialisation(builddata.get_tests()) + installdata = backend.create_install_data() + interpreter = backend.interpreter + else: + benchmarkdata = testdata = installdata = None + + # Enforce key order for argparse + return collections.OrderedDict([ + ('ast', IntroCommand('Dump the AST of the meson file', no_bd=dump_ast)), + ('benchmarks', IntroCommand('List all benchmarks', func=lambda: list_benchmarks(benchmarkdata))), + ('buildoptions', IntroCommand('List all build options', func=lambda: list_buildoptions(coredata), no_bd=list_buildoptions_from_source)), + ('buildsystem_files', IntroCommand('List files that make up the build system', func=lambda: list_buildsystem_files(builddata, interpreter))), + ('dependencies', IntroCommand('List external dependencies', func=lambda: list_deps(coredata), no_bd=list_deps_from_source)), + ('scan_dependencies', IntroCommand('Scan for dependencies used in the meson.build file', no_bd=list_deps_from_source)), + ('installed', IntroCommand('List all installed files and directories', func=lambda: list_installed(installdata))), + ('projectinfo', IntroCommand('Information about projects', func=lambda: list_projinfo(builddata), no_bd=list_projinfo_from_source)), + ('targets', IntroCommand('List top level targets', func=lambda: list_targets(builddata, installdata, backend), no_bd=list_targets_from_source)), + ('tests', IntroCommand('List all unit tests', func=lambda: list_tests(testdata))), + ]) + +def add_arguments(parser: argparse.ArgumentParser) -> None: + intro_types = get_meson_introspection_types() + for key, val in intro_types.items(): + flag = '--' + key.replace('_', '-') + parser.add_argument(flag, action='store_true', dest=key, default=False, help=val.desc) + + parser.add_argument('--backend', choices=sorted(cdata.backendlist), dest='backend', default='ninja', + help='The backend to use for the --buildoptions introspection.') + parser.add_argument('-a', '--all', action='store_true', dest='all', default=False, + help='Print all available information.') + parser.add_argument('-i', '--indent', action='store_true', dest='indent', default=False, + help='Enable pretty printed JSON.') + parser.add_argument('-f', '--force-object-output', action='store_true', dest='force_dict', default=False, + help='Always use the new JSON format for multiple entries (even for 0 and 1 introspection commands)') + parser.add_argument('builddir', nargs='?', default='.', help='The build directory') + +def dump_ast(intr: IntrospectionInterpreter) -> T.Dict[str, T.Any]: + printer = AstJSONPrinter() + intr.ast.accept(printer) + return printer.result + +def list_installed(installdata: backends.InstallData) -> T.Dict[str, str]: + res = {} + if installdata is not None: + for t in installdata.targets: + res[os.path.join(installdata.build_dir, t.fname)] = \ + os.path.join(installdata.prefix, t.outdir, os.path.basename(t.fname)) + for alias in t.aliases.keys(): + res[os.path.join(installdata.build_dir, alias)] = \ + os.path.join(installdata.prefix, t.outdir, os.path.basename(alias)) + for i in installdata.data: + res[i.path] = os.path.join(installdata.prefix, i.install_path) + for i in installdata.headers: + res[i.path] = os.path.join(installdata.prefix, i.install_path, os.path.basename(i.path)) + for i in installdata.man: + res[i.path] = os.path.join(installdata.prefix, i.install_path) + for i in installdata.install_subdirs: + res[i.path] = os.path.join(installdata.prefix, i.install_path) + return res + +def get_target_dir(coredata: cdata.CoreData, subdir: str) -> str: + if coredata.get_option(OptionKey('layout')) == 'flat': + return 'meson-out' + else: + return subdir + +def list_targets_from_source(intr: IntrospectionInterpreter) -> T.List[T.Dict[str, T.Union[bool, str, T.List[T.Union[str, T.Dict[str, T.Union[str, T.List[str], bool]]]]]]]: + tlist = [] # type: T.List[T.Dict[str, T.Union[bool, str, T.List[T.Union[str, T.Dict[str, T.Union[str, T.List[str], bool]]]]]]] + root_dir = Path(intr.source_root) + + def nodes_to_paths(node_list: T.List[BaseNode]) -> T.List[Path]: + res = [] # type: T.List[Path] + for n in node_list: + args = [] # type: T.List[BaseNode] + if isinstance(n, FunctionNode): + args = list(n.args.arguments) + if n.func_name in build_target_functions: + args.pop(0) + elif isinstance(n, ArrayNode): + args = n.args.arguments + elif isinstance(n, ArgumentNode): + args = n.arguments + for j in args: + if isinstance(j, StringNode): + assert isinstance(j.value, str) + res += [Path(j.value)] + elif isinstance(j, str): + res += [Path(j)] + res = [root_dir / i['subdir'] / x for x in res] + res = [x.resolve() for x in res] + return res + + for i in intr.targets: + sources = nodes_to_paths(i['sources']) + extra_f = nodes_to_paths(i['extra_files']) + outdir = get_target_dir(intr.coredata, i['subdir']) + + tlist += [{ + 'name': i['name'], + 'id': i['id'], + 'type': i['type'], + 'defined_in': i['defined_in'], + 'filename': [os.path.join(outdir, x) for x in i['outputs']], + 'build_by_default': i['build_by_default'], + 'target_sources': [{ + 'language': 'unknown', + 'compiler': [], + 'parameters': [], + 'sources': [str(x) for x in sources], + 'generated_sources': [] + }], + 'extra_files': [str(x) for x in extra_f], + 'subproject': None, # Subprojects are not supported + 'installed': i['installed'] + }] + + return tlist + +def list_targets(builddata: build.Build, installdata: backends.InstallData, backend: backends.Backend) -> T.List[T.Any]: + tlist = [] # type: T.List[T.Any] + build_dir = builddata.environment.get_build_dir() + src_dir = builddata.environment.get_source_dir() + + # Fast lookup table for installation files + install_lookuptable = {} + for i in installdata.targets: + out = [os.path.join(installdata.prefix, i.outdir, os.path.basename(i.fname))] + out += [os.path.join(installdata.prefix, i.outdir, os.path.basename(x)) for x in i.aliases] + install_lookuptable[os.path.basename(i.fname)] = [str(PurePath(x)) for x in out] + + for (idname, target) in builddata.get_targets().items(): + if not isinstance(target, build.Target): + raise RuntimeError('The target object in `builddata.get_targets()` is not of type `build.Target`. Please file a bug with this error message.') + + outdir = get_target_dir(builddata.environment.coredata, target.subdir) + t = { + 'name': target.get_basename(), + 'id': idname, + 'type': target.get_typename(), + 'defined_in': os.path.normpath(os.path.join(src_dir, target.subdir, 'meson.build')), + 'filename': [os.path.join(build_dir, outdir, x) for x in target.get_outputs()], + 'build_by_default': target.build_by_default, + 'target_sources': backend.get_introspection_data(idname, target), + 'extra_files': [os.path.normpath(os.path.join(src_dir, x.subdir, x.fname)) for x in target.extra_files], + 'subproject': target.subproject or None + } + + if installdata and target.should_install(): + t['installed'] = True + ifn = [install_lookuptable.get(x, [None]) for x in target.get_outputs()] + t['install_filename'] = [x for sublist in ifn for x in sublist] # flatten the list + else: + t['installed'] = False + tlist.append(t) + return tlist + +def list_buildoptions_from_source(intr: IntrospectionInterpreter) -> T.List[T.Dict[str, T.Union[str, bool, int, T.List[str]]]]: + subprojects = [i['name'] for i in intr.project_data['subprojects']] + return list_buildoptions(intr.coredata, subprojects) + +def list_buildoptions(coredata: cdata.CoreData, subprojects: T.Optional[T.List[str]] = None) -> T.List[T.Dict[str, T.Union[str, bool, int, T.List[str]]]]: + optlist = [] # type: T.List[T.Dict[str, T.Union[str, bool, int, T.List[str]]]] + subprojects = subprojects or [] + + dir_option_names = set(cdata.BUILTIN_DIR_OPTIONS) + test_option_names = {OptionKey('errorlogs'), + OptionKey('stdsplit')} + + dir_options: 'cdata.KeyedOptionDictType' = {} + test_options: 'cdata.KeyedOptionDictType' = {} + core_options: 'cdata.KeyedOptionDictType' = {} + for k, v in coredata.options.items(): + if k in dir_option_names: + dir_options[k] = v + elif k in test_option_names: + test_options[k] = v + elif k.is_builtin(): + core_options[k] = v + if not v.yielding: + for s in subprojects: + core_options[k.evolve(subproject=s)] = v + + def add_keys(options: 'cdata.KeyedOptionDictType', section: str) -> None: + for key, opt in sorted(options.items()): + optdict = {'name': str(key), 'value': opt.value, 'section': section, + 'machine': key.machine.get_lower_case_name() if coredata.is_per_machine_option(key) else 'any'} + if isinstance(opt, cdata.UserStringOption): + typestr = 'string' + elif isinstance(opt, cdata.UserBooleanOption): + typestr = 'boolean' + elif isinstance(opt, cdata.UserComboOption): + optdict['choices'] = opt.choices + typestr = 'combo' + elif isinstance(opt, cdata.UserIntegerOption): + typestr = 'integer' + elif isinstance(opt, cdata.UserArrayOption): + typestr = 'array' + if opt.choices: + optdict['choices'] = opt.choices + else: + raise RuntimeError("Unknown option type") + optdict['type'] = typestr + optdict['description'] = opt.description + optlist.append(optdict) + + add_keys(core_options, 'core') + add_keys({k: v for k, v in coredata.options.items() if k.is_backend()}, 'backend') + add_keys({k: v for k, v in coredata.options.items() if k.is_base()}, 'base') + add_keys( + {k: v for k, v in sorted(coredata.options.items(), key=lambda i: i[0].machine) if k.is_compiler()}, + 'compiler', + ) + add_keys(dir_options, 'directory') + add_keys({k: v for k, v in coredata.options.items() if k.is_project()}, 'user') + add_keys(test_options, 'test') + return optlist + +def find_buildsystem_files_list(src_dir: str) -> T.List[str]: + # I feel dirty about this. But only slightly. + filelist = [] # type: T.List[str] + for root, _, files in os.walk(src_dir): + for f in files: + if f == 'meson.build' or f == 'meson_options.txt': + filelist.append(os.path.relpath(os.path.join(root, f), src_dir)) + return filelist + +def list_buildsystem_files(builddata: build.Build, interpreter: Interpreter) -> T.List[str]: + src_dir = builddata.environment.get_source_dir() + filelist = interpreter.get_build_def_files() # type: T.List[str] + filelist = [PurePath(src_dir, x).as_posix() for x in filelist] + return filelist + +def list_deps_from_source(intr: IntrospectionInterpreter) -> T.List[T.Dict[str, T.Union[str, bool]]]: + result = [] # type: T.List[T.Dict[str, T.Union[str, bool]]] + for i in intr.dependencies: + keys = [ + 'name', + 'required', + 'version', + 'has_fallback', + 'conditional', + ] + result += [{k: v for k, v in i.items() if k in keys}] + return result + +def list_deps(coredata: cdata.CoreData) -> T.List[T.Dict[str, T.Union[str, T.List[str]]]]: + result = [] # type: T.List[T.Dict[str, T.Union[str, T.List[str]]]] + for d in coredata.deps.host.values(): + if d.found(): + result += [{'name': d.name, + 'version': d.get_version(), + 'compile_args': d.get_compile_args(), + 'link_args': d.get_link_args()}] + return result + +def get_test_list(testdata: T.List[backends.TestSerialisation]) -> T.List[T.Dict[str, T.Union[str, int, T.List[str], T.Dict[str, str]]]]: + result = [] # type: T.List[T.Dict[str, T.Union[str, int, T.List[str], T.Dict[str, str]]]] + for t in testdata: + to = {} # type: T.Dict[str, T.Union[str, int, T.List[str], T.Dict[str, str]]] + if isinstance(t.fname, str): + fname = [t.fname] + else: + fname = t.fname + to['cmd'] = fname + t.cmd_args + if isinstance(t.env, build.EnvironmentVariables): + to['env'] = t.env.get_env({}) + else: + to['env'] = t.env + to['name'] = t.name + to['workdir'] = t.workdir + to['timeout'] = t.timeout + to['suite'] = t.suite + to['is_parallel'] = t.is_parallel + to['priority'] = t.priority + to['protocol'] = str(t.protocol) + to['depends'] = t.depends + result.append(to) + return result + +def list_tests(testdata: T.List[backends.TestSerialisation]) -> T.List[T.Dict[str, T.Union[str, int, T.List[str], T.Dict[str, str]]]]: + return get_test_list(testdata) + +def list_benchmarks(benchdata: T.List[backends.TestSerialisation]) -> T.List[T.Dict[str, T.Union[str, int, T.List[str], T.Dict[str, str]]]]: + return get_test_list(benchdata) + +def list_projinfo(builddata: build.Build) -> T.Dict[str, T.Union[str, T.List[T.Dict[str, str]]]]: + result = {'version': builddata.project_version, + 'descriptive_name': builddata.project_name, + 'subproject_dir': builddata.subproject_dir} # type: T.Dict[str, T.Union[str, T.List[T.Dict[str, str]]]] + subprojects = [] + for k, v in builddata.subprojects.items(): + c = {'name': k, + 'version': v, + 'descriptive_name': builddata.projects.get(k)} # type: T.Dict[str, str] + subprojects.append(c) + result['subprojects'] = subprojects + return result + +def list_projinfo_from_source(intr: IntrospectionInterpreter) -> T.Dict[str, T.Union[str, T.List[T.Dict[str, str]]]]: + sourcedir = intr.source_root + files = find_buildsystem_files_list(sourcedir) + files = [os.path.normpath(x) for x in files] + + for i in intr.project_data['subprojects']: + basedir = os.path.join(intr.subproject_dir, i['name']) + i['buildsystem_files'] = [x for x in files if x.startswith(basedir)] + files = [x for x in files if not x.startswith(basedir)] + + intr.project_data['buildsystem_files'] = files + intr.project_data['subproject_dir'] = intr.subproject_dir + return intr.project_data + +def print_results(options: argparse.Namespace, results: T.Sequence[T.Tuple[str, T.Union[dict, T.List[T.Any]]]], indent: int) -> int: + if not results and not options.force_dict: + print('No command specified') + return 1 + elif len(results) == 1 and not options.force_dict: + # Make to keep the existing output format for a single option + print(json.dumps(results[0][1], indent=indent)) + else: + out = {} + for i in results: + out[i[0]] = i[1] + print(json.dumps(out, indent=indent)) + return 0 + +def get_infodir(builddir: T.Optional[str] = None) -> str: + infodir = 'meson-info' + if builddir is not None: + infodir = os.path.join(builddir, infodir) + return infodir + +def get_info_file(infodir: str, kind: T.Optional[str] = None) -> str: + return os.path.join(infodir, + 'meson-info.json' if not kind else f'intro-{kind}.json') + +def load_info_file(infodir: str, kind: T.Optional[str] = None) -> T.Any: + with open(get_info_file(infodir, kind), encoding='utf-8') as fp: + return json.load(fp) + +def run(options: argparse.Namespace) -> int: + datadir = 'meson-private' + infodir = get_infodir(options.builddir) + if options.builddir is not None: + datadir = os.path.join(options.builddir, datadir) + indent = 4 if options.indent else None + results = [] # type: T.List[T.Tuple[str, T.Union[dict, T.List[T.Any]]]] + sourcedir = '.' if options.builddir == 'meson.build' else options.builddir[:-11] + intro_types = get_meson_introspection_types(sourcedir=sourcedir) + + if 'meson.build' in [os.path.basename(options.builddir), options.builddir]: + # Make sure that log entries in other parts of meson don't interfere with the JSON output + mlog.disable() + backend = backends.get_backend_from_name(options.backend) + assert backend is not None + intr = IntrospectionInterpreter(sourcedir, '', backend.name, visitors = [AstIDGenerator(), AstIndentationGenerator(), AstConditionLevel()]) + intr.analyze() + # Re-enable logging just in case + mlog.enable() + for key, val in intro_types.items(): + if (not options.all and not getattr(options, key, False)) or not val.no_bd: + continue + results += [(key, val.no_bd(intr))] + return print_results(options, results, indent) + + try: + raw = load_info_file(infodir) + intro_vers = raw.get('introspection', {}).get('version', {}).get('full', '0.0.0') + except FileNotFoundError: + if not os.path.isdir(datadir) or not os.path.isdir(infodir): + print('Current directory is not a meson build directory.\n' + 'Please specify a valid build dir or change the working directory to it.') + else: + print('Introspection file {} does not exist.\n' + 'It is also possible that the build directory was generated with an old\n' + 'meson version. Please regenerate it in this case.'.format(get_info_file(infodir))) + return 1 + + vers_to_check = get_meson_introspection_required_version() + for i in vers_to_check: + if not mesonlib.version_compare(intro_vers, i): + print('Introspection version {} is not supported. ' + 'The required version is: {}' + .format(intro_vers, ' and '.join(vers_to_check))) + return 1 + + # Extract introspection information from JSON + for i in intro_types.keys(): + if not intro_types[i].func: + continue + if not options.all and not getattr(options, i, False): + continue + try: + results += [(i, load_info_file(infodir, i))] + except FileNotFoundError: + print('Introspection file {} does not exist.'.format(get_info_file(infodir, i))) + return 1 + + return print_results(options, results, indent) + +updated_introspection_files = [] # type: T.List[str] + +def write_intro_info(intro_info: T.Sequence[T.Tuple[str, T.Union[dict, T.List[T.Any]]]], info_dir: str) -> None: + global updated_introspection_files + for i in intro_info: + out_file = os.path.join(info_dir, 'intro-{}.json'.format(i[0])) + tmp_file = os.path.join(info_dir, 'tmp_dump.json') + with open(tmp_file, 'w', encoding='utf-8') as fp: + json.dump(i[1], fp) + fp.flush() # Not sure if this is needed + os.replace(tmp_file, out_file) + updated_introspection_files += [i[0]] + +def generate_introspection_file(builddata: build.Build, backend: backends.Backend) -> None: + coredata = builddata.environment.get_coredata() + intro_types = get_meson_introspection_types(coredata=coredata, builddata=builddata, backend=backend) + intro_info = [] # type: T.List[T.Tuple[str, T.Union[dict, T.List[T.Any]]]] + + for key, val in intro_types.items(): + if not val.func: + continue + intro_info += [(key, val.func())] + + write_intro_info(intro_info, builddata.environment.info_dir) + +def update_build_options(coredata: cdata.CoreData, info_dir: str) -> None: + intro_info = [ + ('buildoptions', list_buildoptions(coredata)) + ] + + write_intro_info(intro_info, info_dir) + +def split_version_string(version: str) -> T.Dict[str, T.Union[str, int]]: + vers_list = version.split('.') + return { + 'full': version, + 'major': int(vers_list[0] if len(vers_list) > 0 else 0), + 'minor': int(vers_list[1] if len(vers_list) > 1 else 0), + 'patch': int(vers_list[2] if len(vers_list) > 2 else 0) + } + +def write_meson_info_file(builddata: build.Build, errors: list, build_files_updated: bool = False) -> None: + global updated_introspection_files + info_dir = builddata.environment.info_dir + info_file = get_meson_info_file(info_dir) + intro_types = get_meson_introspection_types() + intro_info = {} + + for i in intro_types.keys(): + if not intro_types[i].func: + continue + intro_info[i] = { + 'file': f'intro-{i}.json', + 'updated': i in updated_introspection_files + } + + info_data = { + 'meson_version': split_version_string(cdata.version), + 'directories': { + 'source': builddata.environment.get_source_dir(), + 'build': builddata.environment.get_build_dir(), + 'info': info_dir, + }, + 'introspection': { + 'version': split_version_string(get_meson_introspection_version()), + 'information': intro_info, + }, + 'build_files_updated': build_files_updated, + } + + if errors: + info_data['error'] = True + info_data['error_list'] = [x if isinstance(x, str) else str(x) for x in errors] + else: + info_data['error'] = False + + # Write the data to disc + tmp_file = os.path.join(info_dir, 'tmp_dump.json') + with open(tmp_file, 'w', encoding='utf-8') as fp: + json.dump(info_data, fp) + fp.flush() + os.replace(tmp_file, info_file) |