diff options
author | Angelos Mouzakitis <a.mouzakitis@virtualopensystems.com> | 2023-10-10 14:33:42 +0000 |
---|---|---|
committer | Angelos Mouzakitis <a.mouzakitis@virtualopensystems.com> | 2023-10-10 14:33:42 +0000 |
commit | af1a266670d040d2f4083ff309d732d648afba2a (patch) | |
tree | 2fc46203448ddcc6f81546d379abfaeb323575e9 /meson/mesonbuild/scripts | |
parent | e02cda008591317b1625707ff8e115a4841aa889 (diff) |
Change-Id: Iaf8d18082d3991dec7c0ebbea540f092188eb4ec
Diffstat (limited to 'meson/mesonbuild/scripts')
24 files changed, 2754 insertions, 0 deletions
diff --git a/meson/mesonbuild/scripts/__init__.py b/meson/mesonbuild/scripts/__init__.py new file mode 100644 index 000000000..2edbe8899 --- /dev/null +++ b/meson/mesonbuild/scripts/__init__.py @@ -0,0 +1,21 @@ +# Copyright 2016 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# TODO: consider switching to pathlib for this +def destdir_join(d1: str, d2: str) -> str: + # c:\destdir + c:\prefix must produce c:\destdir\prefix + if len(d1) > 1 and d1[1] == ':' \ + and len(d2) > 1 and d2[1] == ':': + return d1 + d2[2:] + return d1 + d2 diff --git a/meson/mesonbuild/scripts/clangformat.py b/meson/mesonbuild/scripts/clangformat.py new file mode 100644 index 000000000..8e61b5591 --- /dev/null +++ b/meson/mesonbuild/scripts/clangformat.py @@ -0,0 +1,91 @@ +# Copyright 2018 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import subprocess +import itertools +import fnmatch +from pathlib import Path +from concurrent.futures import ThreadPoolExecutor + +from ..environment import detect_clangformat +from ..compilers import lang_suffixes +import typing as T + +def parse_pattern_file(fname: Path) -> T.List[str]: + patterns = [] + try: + with fname.open(encoding='utf-8') as f: + for line in f: + pattern = line.strip() + if pattern and not pattern.startswith('#'): + patterns.append(pattern) + except FileNotFoundError: + pass + return patterns + +def run_clang_format(exelist: T.List[str], fname: Path, check: bool) -> subprocess.CompletedProcess: + if check: + original = fname.read_bytes() + before = fname.stat().st_mtime + args = ['-style=file', '-i', str(fname)] + ret = subprocess.run(exelist + args) + after = fname.stat().st_mtime + if before != after: + print('File reformatted: ', fname) + if check: + # Restore the original if only checking. + fname.write_bytes(original) + ret.returncode = 1 + return ret + +def clangformat(exelist: T.List[str], srcdir: Path, builddir: Path, check: bool) -> int: + patterns = parse_pattern_file(srcdir / '.clang-format-include') + if not patterns: + patterns = ['**/*'] + globs = [srcdir.glob(p) for p in patterns] + patterns = parse_pattern_file(srcdir / '.clang-format-ignore') + ignore = [str(builddir / '*')] + ignore.extend([str(srcdir / p) for p in patterns]) + suffixes = set(lang_suffixes['c']).union(set(lang_suffixes['cpp'])) + suffixes.add('h') + suffixes = {f'.{s}' for s in suffixes} + futures = [] + returncode = 0 + with ThreadPoolExecutor() as e: + for f in itertools.chain(*globs): + strf = str(f) + if f.is_dir() or f.suffix not in suffixes or \ + any(fnmatch.fnmatch(strf, i) for i in ignore): + continue + futures.append(e.submit(run_clang_format, exelist, f, check)) + returncode = max([x.result().returncode for x in futures]) + return returncode + +def run(args: T.List[str]) -> int: + parser = argparse.ArgumentParser() + parser.add_argument('--check', action='store_true') + parser.add_argument('sourcedir') + parser.add_argument('builddir') + options = parser.parse_args(args) + + srcdir = Path(options.sourcedir) + builddir = Path(options.builddir) + + exelist = detect_clangformat() + if not exelist: + print('Could not execute clang-format "%s"' % ' '.join(exelist)) + return 1 + + return clangformat(exelist, srcdir, builddir, options.check) diff --git a/meson/mesonbuild/scripts/clangtidy.py b/meson/mesonbuild/scripts/clangtidy.py new file mode 100644 index 000000000..8d366c84d --- /dev/null +++ b/meson/mesonbuild/scripts/clangtidy.py @@ -0,0 +1,57 @@ +# Copyright 2019 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pathlib +import subprocess +import shutil +import os +import re +from concurrent.futures import ThreadPoolExecutor +import typing as T + +from ..compilers import lang_suffixes + +def manual_clangtidy(srcdir_name: str, builddir_name: str) -> int: + srcdir = pathlib.Path(srcdir_name) + suffixes = set(lang_suffixes['c']).union(set(lang_suffixes['cpp'])) + suffixes.add('h') + futures = [] + returncode = 0 + with ThreadPoolExecutor() as e: + for f in (x for suff in suffixes for x in srcdir.glob('**/*.' + suff)): + if f.is_dir(): + continue + strf = str(f) + if strf.startswith(builddir_name): + continue + futures.append(e.submit(subprocess.run, ['clang-tidy', '-p', builddir_name, strf])) + returncode = max([x.result().returncode for x in futures]) + return returncode + +def clangtidy(srcdir_name: str, builddir_name: str) -> int: + run_clang_tidy = None + for rct in ('run-clang-tidy', 'run-clang-tidy.py'): + if shutil.which(rct): + run_clang_tidy = rct + break + if run_clang_tidy: + return subprocess.run([run_clang_tidy, '-p', builddir_name, '^(?!' + re.escape(builddir_name + os.path.sep) +').*$']).returncode + else: + print('Could not find run-clang-tidy, running checks manually.') + return manual_clangtidy(srcdir_name, builddir_name) + +def run(args: T.List[str]) -> int: + srcdir_name = args[0] + builddir_name = args[1] + return clangtidy(srcdir_name, builddir_name) diff --git a/meson/mesonbuild/scripts/cleantrees.py b/meson/mesonbuild/scripts/cleantrees.py new file mode 100644 index 000000000..1a387538e --- /dev/null +++ b/meson/mesonbuild/scripts/cleantrees.py @@ -0,0 +1,44 @@ +# Copyright 2016 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import sys +import shutil +import pickle +import typing as T + +def rmtrees(build_dir: str, trees: T.List[str]) -> None: + for t in trees: + # Never delete trees outside of the builddir + if os.path.isabs(t): + print(f'Cannot delete dir with absolute path {t!r}') + continue + bt = os.path.join(build_dir, t) + # Skip if it doesn't exist, or if it is not a directory + if os.path.isdir(bt): + shutil.rmtree(bt, ignore_errors=True) + +def run(args: T.List[str]) -> int: + if len(args) != 1: + print('Cleaner script for Meson. Do not run on your own please.') + print('cleantrees.py <data-file>') + return 1 + with open(args[0], 'rb') as f: + data = pickle.load(f) + rmtrees(data.build_dir, data.trees) + # Never fail cleaning + return 0 + +if __name__ == '__main__': + run(sys.argv[1:]) diff --git a/meson/mesonbuild/scripts/cmake_run_ctgt.py b/meson/mesonbuild/scripts/cmake_run_ctgt.py new file mode 100755 index 000000000..dfb70d10f --- /dev/null +++ b/meson/mesonbuild/scripts/cmake_run_ctgt.py @@ -0,0 +1,102 @@ +#!/usr/bin/env python3 + +import argparse +import subprocess +import shutil +import sys +from pathlib import Path +import typing as T + +def run(argsv: T.List[str]) -> int: + commands = [[]] # type: T.List[T.List[str]] + SEPARATOR = ';;;' + + # Generate CMD parameters + parser = argparse.ArgumentParser(description='Wrapper for add_custom_command') + parser.add_argument('-d', '--directory', type=str, metavar='D', required=True, help='Working directory to cwd to') + parser.add_argument('-o', '--outputs', nargs='+', metavar='O', required=True, help='Expected output files') + parser.add_argument('-O', '--original-outputs', nargs='*', metavar='O', default=[], help='Output files expected by CMake') + parser.add_argument('commands', nargs=argparse.REMAINDER, help=f'A "{SEPARATOR}" separated list of commands') + + # Parse + args = parser.parse_args(argsv) + directory = Path(args.directory) + + dummy_target = None + if len(args.outputs) == 1 and len(args.original_outputs) == 0: + dummy_target = Path(args.outputs[0]) + elif len(args.outputs) != len(args.original_outputs): + print('Length of output list and original output list differ') + return 1 + + for i in args.commands: + if i == SEPARATOR: + commands += [[]] + continue + + i = i.replace('"', '') # Remove lefover quotes + commands[-1] += [i] + + # Execute + for i in commands: + # Skip empty lists + if not i: + continue + + cmd = [] + stdout = None + stderr = None + capture_file = '' + + for j in i: + if j in ['>', '>>']: + stdout = subprocess.PIPE + continue + elif j in ['&>', '&>>']: + stdout = subprocess.PIPE + stderr = subprocess.STDOUT + continue + + if stdout is not None or stderr is not None: + capture_file += j + else: + cmd += [j] + + try: + directory.mkdir(parents=True, exist_ok=True) + + res = subprocess.run(cmd, stdout=stdout, stderr=stderr, cwd=str(directory), check=True) + if capture_file: + out_file = directory / capture_file + out_file.write_bytes(res.stdout) + except subprocess.CalledProcessError: + return 1 + + if dummy_target: + dummy_target.touch() + return 0 + + # Copy outputs + zipped_outputs = zip([Path(x) for x in args.outputs], [Path(x) for x in args.original_outputs]) + for expected, generated in zipped_outputs: + do_copy = False + if not expected.exists(): + if not generated.exists(): + print('Unable to find generated file. This can cause the build to fail:') + print(generated) + do_copy = False + else: + do_copy = True + elif generated.exists(): + if generated.stat().st_mtime > expected.stat().st_mtime: + do_copy = True + + if do_copy: + if expected.exists(): + expected.unlink() + shutil.copyfile(str(generated), str(expected)) + + return 0 + +if __name__ == '__main__': + sys.exit(run(sys.argv[1:])) diff --git a/meson/mesonbuild/scripts/cmd_or_ps.ps1 b/meson/mesonbuild/scripts/cmd_or_ps.ps1 new file mode 100644 index 000000000..ccef8e84d --- /dev/null +++ b/meson/mesonbuild/scripts/cmd_or_ps.ps1 @@ -0,0 +1,22 @@ +# Copyied from GStreamer project +# Author: Seungha Yang <seungha.yang@navercorp.com> + +$i=1 +$ppid=(gwmi win32_process -Filter "processid='$pid'").parentprocessid +$pname=(Get-Process -id $ppid).Name +While($true) { + if($pname -eq "cmd" -Or $pname -eq "powershell") { + Write-Host ("{0}.exe" -f $pname) + Break + } + + # 10 times iteration seems to be sufficient + if($i -gt 10) { + Break + } + + # not found yet, find grand parant + $ppid=(gwmi win32_process -Filter "processid='$ppid'").parentprocessid + $pname=(Get-Process -id $ppid).Name + $i++ +} diff --git a/meson/mesonbuild/scripts/coverage.py b/meson/mesonbuild/scripts/coverage.py new file mode 100644 index 000000000..5d552c301 --- /dev/null +++ b/meson/mesonbuild/scripts/coverage.py @@ -0,0 +1,173 @@ +# Copyright 2017 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from mesonbuild import environment, mesonlib + +import argparse, re, sys, os, subprocess, pathlib, stat +import typing as T + +def coverage(outputs: T.List[str], source_root: str, subproject_root: str, build_root: str, log_dir: str, use_llvm_cov: bool) -> int: + outfiles = [] + exitcode = 0 + + (gcovr_exe, gcovr_new_rootdir, lcov_exe, genhtml_exe, llvm_cov_exe) = environment.find_coverage_tools() + + # gcovr >= 4.2 requires a different syntax for out of source builds + if gcovr_new_rootdir: + gcovr_base_cmd = [gcovr_exe, '-r', source_root, build_root] + else: + gcovr_base_cmd = [gcovr_exe, '-r', build_root] + + if use_llvm_cov: + gcov_exe_args = ['--gcov-executable', llvm_cov_exe + ' gcov'] + else: + gcov_exe_args = [] + + if not outputs or 'xml' in outputs: + if gcovr_exe: + subprocess.check_call(gcovr_base_cmd + + ['-x', + '-e', re.escape(subproject_root), + '-o', os.path.join(log_dir, 'coverage.xml') + ] + gcov_exe_args) + outfiles.append(('Xml', pathlib.Path(log_dir, 'coverage.xml'))) + elif outputs: + print('gcovr >= 3.3 needed to generate Xml coverage report') + exitcode = 1 + + if not outputs or 'text' in outputs: + if gcovr_exe: + subprocess.check_call(gcovr_base_cmd + + ['-e', re.escape(subproject_root), + '-o', os.path.join(log_dir, 'coverage.txt') + ] + gcov_exe_args) + outfiles.append(('Text', pathlib.Path(log_dir, 'coverage.txt'))) + elif outputs: + print('gcovr >= 3.3 needed to generate text coverage report') + exitcode = 1 + + if not outputs or 'html' in outputs: + if lcov_exe and genhtml_exe: + htmloutdir = os.path.join(log_dir, 'coveragereport') + covinfo = os.path.join(log_dir, 'coverage.info') + initial_tracefile = covinfo + '.initial' + run_tracefile = covinfo + '.run' + raw_tracefile = covinfo + '.raw' + if use_llvm_cov: + # Create a shim to allow using llvm-cov as a gcov tool. + if mesonlib.is_windows(): + llvm_cov_shim_path = os.path.join(log_dir, 'llvm-cov.bat') + with open(llvm_cov_shim_path, 'w', encoding='utf-8') as llvm_cov_bat: + llvm_cov_bat.write(f'@"{llvm_cov_exe}" gcov %*') + else: + llvm_cov_shim_path = os.path.join(log_dir, 'llvm-cov.sh') + with open(llvm_cov_shim_path, 'w', encoding='utf-8') as llvm_cov_sh: + llvm_cov_sh.write(f'#!/usr/bin/env sh\nexec "{llvm_cov_exe}" gcov $@') + os.chmod(llvm_cov_shim_path, os.stat(llvm_cov_shim_path).st_mode | stat.S_IEXEC) + gcov_tool_args = ['--gcov-tool', llvm_cov_shim_path] + else: + gcov_tool_args = [] + subprocess.check_call([lcov_exe, + '--directory', build_root, + '--capture', + '--initial', + '--output-file', + initial_tracefile] + + gcov_tool_args) + subprocess.check_call([lcov_exe, + '--directory', build_root, + '--capture', + '--output-file', run_tracefile, + '--no-checksum', + '--rc', 'lcov_branch_coverage=1'] + + gcov_tool_args) + # Join initial and test results. + subprocess.check_call([lcov_exe, + '-a', initial_tracefile, + '-a', run_tracefile, + '--rc', 'lcov_branch_coverage=1', + '-o', raw_tracefile]) + # Remove all directories outside the source_root from the covinfo + subprocess.check_call([lcov_exe, + '--extract', raw_tracefile, + os.path.join(source_root, '*'), + '--rc', 'lcov_branch_coverage=1', + '--output-file', covinfo]) + # Remove all directories inside subproject dir + subprocess.check_call([lcov_exe, + '--remove', covinfo, + os.path.join(subproject_root, '*'), + '--rc', 'lcov_branch_coverage=1', + '--output-file', covinfo]) + subprocess.check_call([genhtml_exe, + '--prefix', build_root, + '--prefix', source_root, + '--output-directory', htmloutdir, + '--title', 'Code coverage', + '--legend', + '--show-details', + '--branch-coverage', + covinfo]) + outfiles.append(('Html', pathlib.Path(htmloutdir, 'index.html'))) + elif gcovr_exe: + htmloutdir = os.path.join(log_dir, 'coveragereport') + if not os.path.isdir(htmloutdir): + os.mkdir(htmloutdir) + subprocess.check_call(gcovr_base_cmd + + ['--html', + '--html-details', + '--print-summary', + '-e', re.escape(subproject_root), + '-o', os.path.join(htmloutdir, 'index.html'), + ]) + outfiles.append(('Html', pathlib.Path(htmloutdir, 'index.html'))) + elif outputs: + print('lcov/genhtml or gcovr >= 3.3 needed to generate Html coverage report') + exitcode = 1 + + if not outputs and not outfiles: + print('Need gcovr or lcov/genhtml to generate any coverage reports') + exitcode = 1 + + if outfiles: + print('') + for (filetype, path) in outfiles: + print(filetype + ' coverage report can be found at', path.as_uri()) + + return exitcode + +def run(args: T.List[str]) -> int: + if not os.path.isfile('build.ninja'): + print('Coverage currently only works with the Ninja backend.') + return 1 + parser = argparse.ArgumentParser(description='Generate coverage reports') + parser.add_argument('--text', dest='outputs', action='append_const', + const='text', help='generate Text report') + parser.add_argument('--xml', dest='outputs', action='append_const', + const='xml', help='generate Xml report') + parser.add_argument('--html', dest='outputs', action='append_const', + const='html', help='generate Html report') + parser.add_argument('--use_llvm_cov', action='store_true', + help='use llvm-cov') + parser.add_argument('source_root') + parser.add_argument('subproject_root') + parser.add_argument('build_root') + parser.add_argument('log_dir') + options = parser.parse_args(args) + return coverage(options.outputs, options.source_root, + options.subproject_root, options.build_root, + options.log_dir, options.use_llvm_cov) + +if __name__ == '__main__': + sys.exit(run(sys.argv[1:])) diff --git a/meson/mesonbuild/scripts/delwithsuffix.py b/meson/mesonbuild/scripts/delwithsuffix.py new file mode 100644 index 000000000..873db0d40 --- /dev/null +++ b/meson/mesonbuild/scripts/delwithsuffix.py @@ -0,0 +1,36 @@ +# Copyright 2013 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os, sys +import typing as T + +def run(args: T.List[str]) -> int: + if len(args) != 2: + print('delwithsuffix.py <root of subdir to process> <suffix to delete>') + sys.exit(1) + + topdir = args[0] + suffix = args[1] + if suffix[0] != '.': + suffix = '.' + suffix + + for (root, _, files) in os.walk(topdir): + for f in files: + if f.endswith(suffix): + fullname = os.path.join(root, f) + os.unlink(fullname) + return 0 + +if __name__ == '__main__': + run(sys.argv[1:]) diff --git a/meson/mesonbuild/scripts/depfixer.py b/meson/mesonbuild/scripts/depfixer.py new file mode 100644 index 000000000..52c7ba969 --- /dev/null +++ b/meson/mesonbuild/scripts/depfixer.py @@ -0,0 +1,509 @@ +# Copyright 2013-2016 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +import sys +import os +import stat +import struct +import shutil +import subprocess +import typing as T + +from ..mesonlib import OrderedSet + +SHT_STRTAB = 3 +DT_NEEDED = 1 +DT_RPATH = 15 +DT_RUNPATH = 29 +DT_STRTAB = 5 +DT_SONAME = 14 +DT_MIPS_RLD_MAP_REL = 1879048245 + +# Global cache for tools +INSTALL_NAME_TOOL = False + +class DataSizes: + def __init__(self, ptrsize: int, is_le: bool) -> None: + if is_le: + p = '<' + else: + p = '>' + self.Half = p + 'h' + self.HalfSize = 2 + self.Word = p + 'I' + self.WordSize = 4 + self.Sword = p + 'i' + self.SwordSize = 4 + if ptrsize == 64: + self.Addr = p + 'Q' + self.AddrSize = 8 + self.Off = p + 'Q' + self.OffSize = 8 + self.XWord = p + 'Q' + self.XWordSize = 8 + self.Sxword = p + 'q' + self.SxwordSize = 8 + else: + self.Addr = p + 'I' + self.AddrSize = 4 + self.Off = p + 'I' + self.OffSize = 4 + +class DynamicEntry(DataSizes): + def __init__(self, ifile: T.BinaryIO, ptrsize: int, is_le: bool) -> None: + super().__init__(ptrsize, is_le) + self.ptrsize = ptrsize + if ptrsize == 64: + self.d_tag = struct.unpack(self.Sxword, ifile.read(self.SxwordSize))[0] + self.val = struct.unpack(self.XWord, ifile.read(self.XWordSize))[0] + else: + self.d_tag = struct.unpack(self.Sword, ifile.read(self.SwordSize))[0] + self.val = struct.unpack(self.Word, ifile.read(self.WordSize))[0] + + def write(self, ofile: T.BinaryIO) -> None: + if self.ptrsize == 64: + ofile.write(struct.pack(self.Sxword, self.d_tag)) + ofile.write(struct.pack(self.XWord, self.val)) + else: + ofile.write(struct.pack(self.Sword, self.d_tag)) + ofile.write(struct.pack(self.Word, self.val)) + +class SectionHeader(DataSizes): + def __init__(self, ifile: T.BinaryIO, ptrsize: int, is_le: bool) -> None: + super().__init__(ptrsize, is_le) + if ptrsize == 64: + is_64 = True + else: + is_64 = False +# Elf64_Word + self.sh_name = struct.unpack(self.Word, ifile.read(self.WordSize))[0] +# Elf64_Word + self.sh_type = struct.unpack(self.Word, ifile.read(self.WordSize))[0] +# Elf64_Xword + if is_64: + self.sh_flags = struct.unpack(self.XWord, ifile.read(self.XWordSize))[0] + else: + self.sh_flags = struct.unpack(self.Word, ifile.read(self.WordSize))[0] +# Elf64_Addr + self.sh_addr = struct.unpack(self.Addr, ifile.read(self.AddrSize))[0] +# Elf64_Off + self.sh_offset = struct.unpack(self.Off, ifile.read(self.OffSize))[0] +# Elf64_Xword + if is_64: + self.sh_size = struct.unpack(self.XWord, ifile.read(self.XWordSize))[0] + else: + self.sh_size = struct.unpack(self.Word, ifile.read(self.WordSize))[0] +# Elf64_Word + self.sh_link = struct.unpack(self.Word, ifile.read(self.WordSize))[0] +# Elf64_Word + self.sh_info = struct.unpack(self.Word, ifile.read(self.WordSize))[0] +# Elf64_Xword + if is_64: + self.sh_addralign = struct.unpack(self.XWord, ifile.read(self.XWordSize))[0] + else: + self.sh_addralign = struct.unpack(self.Word, ifile.read(self.WordSize))[0] +# Elf64_Xword + if is_64: + self.sh_entsize = struct.unpack(self.XWord, ifile.read(self.XWordSize))[0] + else: + self.sh_entsize = struct.unpack(self.Word, ifile.read(self.WordSize))[0] + +class Elf(DataSizes): + def __init__(self, bfile: str, verbose: bool = True) -> None: + self.bfile = bfile + self.verbose = verbose + self.sections = [] # type: T.List[SectionHeader] + self.dynamic = [] # type: T.List[DynamicEntry] + self.open_bf(bfile) + try: + (self.ptrsize, self.is_le) = self.detect_elf_type() + super().__init__(self.ptrsize, self.is_le) + self.parse_header() + self.parse_sections() + self.parse_dynamic() + except (struct.error, RuntimeError): + self.close_bf() + raise + + def open_bf(self, bfile: str) -> None: + self.bf = None + self.bf_perms = None + try: + self.bf = open(bfile, 'r+b') + except PermissionError as e: + self.bf_perms = stat.S_IMODE(os.lstat(bfile).st_mode) + os.chmod(bfile, stat.S_IREAD | stat.S_IWRITE | stat.S_IEXEC) + try: + self.bf = open(bfile, 'r+b') + except Exception: + os.chmod(bfile, self.bf_perms) + self.bf_perms = None + raise e + + def close_bf(self) -> None: + if self.bf is not None: + if self.bf_perms is not None: + os.fchmod(self.bf.fileno(), self.bf_perms) + self.bf_perms = None + self.bf.close() + self.bf = None + + def __enter__(self) -> 'Elf': + return self + + def __del__(self) -> None: + self.close_bf() + + def __exit__(self, exc_type: T.Any, exc_value: T.Any, traceback: T.Any) -> None: + self.close_bf() + + def detect_elf_type(self) -> T.Tuple[int, bool]: + data = self.bf.read(6) + if data[1:4] != b'ELF': + # This script gets called to non-elf targets too + # so just ignore them. + if self.verbose: + print('File "%s" is not an ELF file.' % self.bfile) + sys.exit(0) + if data[4] == 1: + ptrsize = 32 + elif data[4] == 2: + ptrsize = 64 + else: + sys.exit('File "%s" has unknown ELF class.' % self.bfile) + if data[5] == 1: + is_le = True + elif data[5] == 2: + is_le = False + else: + sys.exit('File "%s" has unknown ELF endianness.' % self.bfile) + return ptrsize, is_le + + def parse_header(self) -> None: + self.bf.seek(0) + self.e_ident = struct.unpack('16s', self.bf.read(16))[0] + self.e_type = struct.unpack(self.Half, self.bf.read(self.HalfSize))[0] + self.e_machine = struct.unpack(self.Half, self.bf.read(self.HalfSize))[0] + self.e_version = struct.unpack(self.Word, self.bf.read(self.WordSize))[0] + self.e_entry = struct.unpack(self.Addr, self.bf.read(self.AddrSize))[0] + self.e_phoff = struct.unpack(self.Off, self.bf.read(self.OffSize))[0] + self.e_shoff = struct.unpack(self.Off, self.bf.read(self.OffSize))[0] + self.e_flags = struct.unpack(self.Word, self.bf.read(self.WordSize))[0] + self.e_ehsize = struct.unpack(self.Half, self.bf.read(self.HalfSize))[0] + self.e_phentsize = struct.unpack(self.Half, self.bf.read(self.HalfSize))[0] + self.e_phnum = struct.unpack(self.Half, self.bf.read(self.HalfSize))[0] + self.e_shentsize = struct.unpack(self.Half, self.bf.read(self.HalfSize))[0] + self.e_shnum = struct.unpack(self.Half, self.bf.read(self.HalfSize))[0] + self.e_shstrndx = struct.unpack(self.Half, self.bf.read(self.HalfSize))[0] + + def parse_sections(self) -> None: + self.bf.seek(self.e_shoff) + for _ in range(self.e_shnum): + self.sections.append(SectionHeader(self.bf, self.ptrsize, self.is_le)) + + def read_str(self) -> bytes: + arr = [] + x = self.bf.read(1) + while x != b'\0': + arr.append(x) + x = self.bf.read(1) + if x == b'': + raise RuntimeError('Tried to read past the end of the file') + return b''.join(arr) + + def find_section(self, target_name: bytes) -> T.Optional[SectionHeader]: + section_names = self.sections[self.e_shstrndx] + for i in self.sections: + self.bf.seek(section_names.sh_offset + i.sh_name) + name = self.read_str() + if name == target_name: + return i + return None + + def parse_dynamic(self) -> None: + sec = self.find_section(b'.dynamic') + if sec is None: + return + self.bf.seek(sec.sh_offset) + while True: + e = DynamicEntry(self.bf, self.ptrsize, self.is_le) + self.dynamic.append(e) + if e.d_tag == 0: + break + + def print_section_names(self) -> None: + section_names = self.sections[self.e_shstrndx] + for i in self.sections: + self.bf.seek(section_names.sh_offset + i.sh_name) + name = self.read_str() + print(name.decode()) + + def print_soname(self) -> None: + soname = None + strtab = None + for i in self.dynamic: + if i.d_tag == DT_SONAME: + soname = i + if i.d_tag == DT_STRTAB: + strtab = i + if soname is None or strtab is None: + print("This file does not have a soname") + return + self.bf.seek(strtab.val + soname.val) + print(self.read_str()) + + def get_entry_offset(self, entrynum: int) -> T.Optional[int]: + sec = self.find_section(b'.dynstr') + for i in self.dynamic: + if i.d_tag == entrynum: + res = sec.sh_offset + i.val + assert isinstance(res, int) + return res + return None + + def print_rpath(self) -> None: + offset = self.get_entry_offset(DT_RPATH) + if offset is None: + print("This file does not have an rpath.") + else: + self.bf.seek(offset) + print(self.read_str()) + + def print_runpath(self) -> None: + offset = self.get_entry_offset(DT_RUNPATH) + if offset is None: + print("This file does not have a runpath.") + else: + self.bf.seek(offset) + print(self.read_str()) + + def print_deps(self) -> None: + sec = self.find_section(b'.dynstr') + deps = [] + for i in self.dynamic: + if i.d_tag == DT_NEEDED: + deps.append(i) + for i in deps: + offset = sec.sh_offset + i.val + self.bf.seek(offset) + name = self.read_str() + print(name) + + def fix_deps(self, prefix: bytes) -> None: + sec = self.find_section(b'.dynstr') + deps = [] + for i in self.dynamic: + if i.d_tag == DT_NEEDED: + deps.append(i) + for i in deps: + offset = sec.sh_offset + i.val + self.bf.seek(offset) + name = self.read_str() + if name.startswith(prefix): + basename = name.split(b'/')[-1] + padding = b'\0' * (len(name) - len(basename)) + newname = basename + padding + assert(len(newname) == len(name)) + self.bf.seek(offset) + self.bf.write(newname) + + def fix_rpath(self, rpath_dirs_to_remove: T.Set[bytes], new_rpath: bytes) -> None: + # The path to search for can be either rpath or runpath. + # Fix both of them to be sure. + self.fix_rpathtype_entry(rpath_dirs_to_remove, new_rpath, DT_RPATH) + self.fix_rpathtype_entry(rpath_dirs_to_remove, new_rpath, DT_RUNPATH) + + def fix_rpathtype_entry(self, rpath_dirs_to_remove: T.Set[bytes], new_rpath: bytes, entrynum: int) -> None: + rp_off = self.get_entry_offset(entrynum) + if rp_off is None: + if self.verbose: + print('File does not have rpath. It should be a fully static executable.') + return + self.bf.seek(rp_off) + + old_rpath = self.read_str() + # Some rpath entries may come from multiple sources. + # Only add each one once. + new_rpaths = OrderedSet() # type: OrderedSet[bytes] + if new_rpath: + new_rpaths.update(new_rpath.split(b':')) + if old_rpath: + # Filter out build-only rpath entries + # added by get_link_dep_subdirs() or + # specified by user with build_rpath. + for rpath_dir in old_rpath.split(b':'): + if not (rpath_dir in rpath_dirs_to_remove or + rpath_dir == (b'X' * len(rpath_dir))): + if rpath_dir: + new_rpaths.add(rpath_dir) + + # Prepend user-specified new entries while preserving the ones that came from pkgconfig etc. + new_rpath = b':'.join(new_rpaths) + + if len(old_rpath) < len(new_rpath): + msg = "New rpath must not be longer than the old one.\n Old: {}\n New: {}".format(old_rpath.decode('utf-8'), new_rpath.decode('utf-8')) + sys.exit(msg) + # The linker does read-only string deduplication. If there is a + # string that shares a suffix with the rpath, they might get + # dedupped. This means changing the rpath string might break something + # completely unrelated. This has already happened once with X.org. + # Thus we want to keep this change as small as possible to minimize + # the chance of obliterating other strings. It might still happen + # but our behavior is identical to what chrpath does and it has + # been in use for ages so based on that this should be rare. + if not new_rpath: + self.remove_rpath_entry(entrynum) + else: + self.bf.seek(rp_off) + self.bf.write(new_rpath) + self.bf.write(b'\0') + + def remove_rpath_entry(self, entrynum: int) -> None: + sec = self.find_section(b'.dynamic') + if sec is None: + return None + for (i, entry) in enumerate(self.dynamic): + if entry.d_tag == entrynum: + rpentry = self.dynamic[i] + rpentry.d_tag = 0 + self.dynamic = self.dynamic[:i] + self.dynamic[i + 1:] + [rpentry] + break + # DT_MIPS_RLD_MAP_REL is relative to the offset of the tag. Adjust it consequently. + for entry in self.dynamic[i:]: + if entry.d_tag == DT_MIPS_RLD_MAP_REL: + entry.val += 2 * (self.ptrsize // 8) + break + self.bf.seek(sec.sh_offset) + for entry in self.dynamic: + entry.write(self.bf) + return None + +def fix_elf(fname: str, rpath_dirs_to_remove: T.Set[bytes], new_rpath: T.Optional[bytes], verbose: bool = True) -> None: + with Elf(fname, verbose) as e: + if new_rpath is None: + e.print_rpath() + e.print_runpath() + else: + e.fix_rpath(rpath_dirs_to_remove, new_rpath) + +def get_darwin_rpaths_to_remove(fname: str) -> T.List[str]: + out = subprocess.check_output(['otool', '-l', fname], + universal_newlines=True, + stderr=subprocess.DEVNULL) + result = [] + current_cmd = 'FOOBAR' + for line in out.split('\n'): + line = line.strip() + if ' ' not in line: + continue + key, value = line.strip().split(' ', 1) + if key == 'cmd': + current_cmd = value + if key == 'path' and current_cmd == 'LC_RPATH': + rp = value.split('(', 1)[0].strip() + result.append(rp) + return result + +def fix_darwin(fname: str, new_rpath: str, final_path: str, install_name_mappings: T.Dict[str, str]) -> None: + try: + rpaths = get_darwin_rpaths_to_remove(fname) + except subprocess.CalledProcessError: + # Otool failed, which happens when invoked on a + # non-executable target. Just return. + return + try: + args = [] + if rpaths: + # TODO: fix this properly, not totally clear how + # + # removing rpaths from binaries on macOS has tons of + # weird edge cases. For instance, if the user provided + # a '-Wl,-rpath' argument in LDFLAGS that happens to + # coincide with an rpath generated from a dependency, + # this would cause installation failures, as meson would + # generate install_name_tool calls with two identical + # '-delete_rpath' arguments, which install_name_tool + # fails on. Because meson itself ensures that it never + # adds duplicate rpaths, duplicate rpaths necessarily + # come from user variables. The idea of using OrderedSet + # is to remove *at most one* duplicate RPATH entry. This + # is not optimal, as it only respects the user's choice + # partially: if they provided a non-duplicate '-Wl,-rpath' + # argument, it gets removed, if they provided a duplicate + # one, it remains in the final binary. A potentially optimal + # solution would split all user '-Wl,-rpath' arguments from + # LDFLAGS, and later add them back with '-add_rpath'. + for rp in OrderedSet(rpaths): + args += ['-delete_rpath', rp] + subprocess.check_call(['install_name_tool', fname] + args, + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL) + args = [] + if new_rpath: + args += ['-add_rpath', new_rpath] + # Rewrite -install_name @rpath/libfoo.dylib to /path/to/libfoo.dylib + if fname.endswith('dylib'): + args += ['-id', final_path] + if install_name_mappings: + for old, new in install_name_mappings.items(): + args += ['-change', old, new] + if args: + subprocess.check_call(['install_name_tool', fname] + args, + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL) + except Exception as err: + raise SystemExit(err) + +def fix_jar(fname: str) -> None: + subprocess.check_call(['jar', 'xfv', fname, 'META-INF/MANIFEST.MF']) + with open('META-INF/MANIFEST.MF', 'r+', encoding='utf-8') as f: + lines = f.readlines() + f.seek(0) + for line in lines: + if not line.startswith('Class-Path:'): + f.write(line) + f.truncate() + subprocess.check_call(['jar', 'ufm', fname, 'META-INF/MANIFEST.MF']) + +def fix_rpath(fname: str, rpath_dirs_to_remove: T.Set[bytes], new_rpath: T.Union[str, bytes], final_path: str, install_name_mappings: T.Dict[str, str], verbose: bool = True) -> None: + global INSTALL_NAME_TOOL + # Static libraries, import libraries, debug information, headers, etc + # never have rpaths + # DLLs and EXE currently do not need runtime path fixing + if fname.endswith(('.a', '.lib', '.pdb', '.h', '.hpp', '.dll', '.exe')): + return + try: + if fname.endswith('.jar'): + fix_jar(fname) + return + if isinstance(new_rpath, str): + new_rpath = new_rpath.encode('utf8') + fix_elf(fname, rpath_dirs_to_remove, new_rpath, verbose) + return + except SystemExit as e: + if isinstance(e.code, int) and e.code == 0: + pass + else: + raise + # We don't look for this on import because it will do a useless PATH lookup + # on non-mac platforms. That can be expensive on some Windows machines + # (upto 30ms), which is significant with --only-changed. For details, see: + # https://github.com/mesonbuild/meson/pull/6612#discussion_r378581401 + if INSTALL_NAME_TOOL is False: + INSTALL_NAME_TOOL = bool(shutil.which('install_name_tool')) + if INSTALL_NAME_TOOL: + if isinstance(new_rpath, bytes): + new_rpath = new_rpath.decode('utf8') + fix_darwin(fname, new_rpath, final_path, install_name_mappings) diff --git a/meson/mesonbuild/scripts/depscan.py b/meson/mesonbuild/scripts/depscan.py new file mode 100644 index 000000000..9fc435b5d --- /dev/null +++ b/meson/mesonbuild/scripts/depscan.py @@ -0,0 +1,201 @@ +# Copyright 2020 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pathlib +import pickle +import re +import os +import sys +import typing as T + +from ..backend.ninjabackend import TargetDependencyScannerInfo, ninja_quote +from ..compilers.compilers import lang_suffixes + +CPP_IMPORT_RE = re.compile(r'\w*import ([a-zA-Z0-9]+);') +CPP_EXPORT_RE = re.compile(r'\w*export module ([a-zA-Z0-9]+);') + +FORTRAN_INCLUDE_PAT = r"^\s*include\s*['\"](\w+\.\w+)['\"]" +FORTRAN_MODULE_PAT = r"^\s*\bmodule\b\s+(\w+)\s*(?:!+.*)*$" +FORTRAN_SUBMOD_PAT = r"^\s*\bsubmodule\b\s*\((\w+:?\w+)\)\s*(\w+)" +FORTRAN_USE_PAT = r"^\s*use,?\s*(?:non_intrinsic)?\s*(?:::)?\s*(\w+)" + +FORTRAN_MODULE_RE = re.compile(FORTRAN_MODULE_PAT, re.IGNORECASE) +FORTRAN_SUBMOD_RE = re.compile(FORTRAN_SUBMOD_PAT, re.IGNORECASE) +FORTRAN_USE_RE = re.compile(FORTRAN_USE_PAT, re.IGNORECASE) + +class DependencyScanner: + def __init__(self, pickle_file: str, outfile: str, sources: T.List[str]): + with open(pickle_file, 'rb') as pf: + self.target_data = pickle.load(pf) # type: TargetDependencyScannerInfo + self.outfile = outfile + self.sources = sources + self.provided_by = {} # type: T.Dict[str, str] + self.exports = {} # type: T.Dict[str, str] + self.needs = {} # type: T.Dict[str, T.List[str]] + self.sources_with_exports = [] # type: T.List[str] + + def scan_file(self, fname: str) -> None: + suffix = os.path.splitext(fname)[1][1:].lower() + if suffix in lang_suffixes['fortran']: + self.scan_fortran_file(fname) + elif suffix in lang_suffixes['cpp']: + self.scan_cpp_file(fname) + else: + sys.exit(f'Can not scan files with suffix .{suffix}.') + + def scan_fortran_file(self, fname: str) -> None: + fpath = pathlib.Path(fname) + modules_in_this_file = set() + for line in fpath.read_text(encoding='utf-8').split('\n'): + import_match = FORTRAN_USE_RE.match(line) + export_match = FORTRAN_MODULE_RE.match(line) + submodule_export_match = FORTRAN_SUBMOD_RE.match(line) + if import_match: + needed = import_match.group(1).lower() + # In Fortran you have an using declaration also for the module + # you define in the same file. Prevent circular dependencies. + if needed not in modules_in_this_file: + if fname in self.needs: + self.needs[fname].append(needed) + else: + self.needs[fname] = [needed] + if export_match: + exported_module = export_match.group(1).lower() + assert(exported_module not in modules_in_this_file) + modules_in_this_file.add(exported_module) + if exported_module in self.provided_by: + raise RuntimeError(f'Multiple files provide module {exported_module}.') + self.sources_with_exports.append(fname) + self.provided_by[exported_module] = fname + self.exports[fname] = exported_module + if submodule_export_match: + # Store submodule "Foo" "Bar" as "foo:bar". + # A submodule declaration can be both an import and an export declaration: + # + # submodule (a1:a2) a3 + # - requires a1@a2.smod + # - produces a1@a3.smod + parent_module_name_full = submodule_export_match.group(1).lower() + parent_module_name = parent_module_name_full.split(':')[0] + submodule_name = submodule_export_match.group(2).lower() + concat_name = f'{parent_module_name}:{submodule_name}' + self.sources_with_exports.append(fname) + self.provided_by[concat_name] = fname + self.exports[fname] = concat_name + # Fortran requires that the immediate parent module must be built + # before the current one. Thus: + # + # submodule (parent) parent <- requires parent.mod (really parent.smod, but they are created at the same time) + # submodule (a1:a2) a3 <- requires a1@a2.smod + # + # a3 does not depend on the a1 parent module directly, only transitively. + if fname in self.needs: + self.needs[fname].append(parent_module_name_full) + else: + self.needs[fname] = [parent_module_name_full] + + + def scan_cpp_file(self, fname: str) -> None: + fpath = pathlib.Path(fname) + for line in fpath.read_text(encoding='utf-8').split('\n'): + import_match = CPP_IMPORT_RE.match(line) + export_match = CPP_EXPORT_RE.match(line) + if import_match: + needed = import_match.group(1) + if fname in self.needs: + self.needs[fname].append(needed) + else: + self.needs[fname] = [needed] + if export_match: + exported_module = export_match.group(1) + if exported_module in self.provided_by: + raise RuntimeError(f'Multiple files provide module {exported_module}.') + self.sources_with_exports.append(fname) + self.provided_by[exported_module] = fname + self.exports[fname] = exported_module + + def objname_for(self, src: str) -> str: + objname = self.target_data.source2object[src] + assert(isinstance(objname, str)) + return objname + + def module_name_for(self, src: str) -> str: + suffix = os.path.splitext(src)[1][1:].lower() + if suffix in lang_suffixes['fortran']: + exported = self.exports[src] + # Module foo:bar goes to a file name foo@bar.smod + # Module Foo goes to a file name foo.mod + namebase = exported.replace(':', '@') + if ':' in exported: + extension = 'smod' + else: + extension = 'mod' + return os.path.join(self.target_data.private_dir, f'{namebase}.{extension}') + elif suffix in lang_suffixes['cpp']: + return '{}.ifc'.format(self.exports[src]) + else: + raise RuntimeError('Unreachable code.') + + def scan(self) -> int: + for s in self.sources: + self.scan_file(s) + with open(self.outfile, 'w', encoding='utf-8') as ofile: + ofile.write('ninja_dyndep_version = 1\n') + for src in self.sources: + objfilename = self.objname_for(src) + mods_and_submods_needed = [] + module_files_generated = [] + module_files_needed = [] + if src in self.sources_with_exports: + module_files_generated.append(self.module_name_for(src)) + if src in self.needs: + for modname in self.needs[src]: + if modname not in self.provided_by: + # Nothing provides this module, we assume that it + # comes from a dependency library somewhere and is + # already built by the time this compilation starts. + pass + else: + mods_and_submods_needed.append(modname) + + for modname in mods_and_submods_needed: + provider_src = self.provided_by[modname] + provider_modfile = self.module_name_for(provider_src) + # Prune self-dependencies + if provider_src != src: + module_files_needed.append(provider_modfile) + + quoted_objfilename = ninja_quote(objfilename, True) + quoted_module_files_generated = [ninja_quote(x, True) for x in module_files_generated] + quoted_module_files_needed = [ninja_quote(x, True) for x in module_files_needed] + if quoted_module_files_generated: + mod_gen = '| ' + ' '.join(quoted_module_files_generated) + else: + mod_gen = '' + if quoted_module_files_needed: + mod_dep = '| ' + ' '.join(quoted_module_files_needed) + else: + mod_dep = '' + build_line = 'build {} {}: dyndep {}'.format(quoted_objfilename, + mod_gen, + mod_dep) + ofile.write(build_line + '\n') + return 0 + +def run(args: T.List[str]) -> int: + pickle_file = args[0] + outfile = args[1] + sources = args[2:] + scanner = DependencyScanner(pickle_file, outfile, sources) + return scanner.scan() diff --git a/meson/mesonbuild/scripts/dirchanger.py b/meson/mesonbuild/scripts/dirchanger.py new file mode 100644 index 000000000..21632cd89 --- /dev/null +++ b/meson/mesonbuild/scripts/dirchanger.py @@ -0,0 +1,29 @@ +# Copyright 2015-2016 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +'''CD into dir given as first argument and execute +the command given in the rest of the arguments.''' + +import os, subprocess, sys +import typing as T + +def run(args: T.List[str]) -> int: + dirname = args[0] + command = args[1:] + + os.chdir(dirname) + return subprocess.call(command) + +if __name__ == '__main__': + sys.exit(run(sys.argv[1:])) diff --git a/meson/mesonbuild/scripts/externalproject.py b/meson/mesonbuild/scripts/externalproject.py new file mode 100644 index 000000000..a8e3bfe2f --- /dev/null +++ b/meson/mesonbuild/scripts/externalproject.py @@ -0,0 +1,109 @@ +# Copyright 2019 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import argparse +import multiprocessing +import subprocess +from pathlib import Path +import typing as T + +from ..mesonlib import Popen_safe + +class ExternalProject: + def __init__(self, options: argparse.Namespace): + self.name = options.name + self.src_dir = options.srcdir + self.build_dir = options.builddir + self.install_dir = options.installdir + self.log_dir = options.logdir + self.verbose = options.verbose + self.stampfile = options.stampfile + self.depfile = options.depfile + self.make = options.make + + def write_depfile(self) -> None: + with open(self.depfile, 'w', encoding='utf-8') as f: + f.write(f'{self.stampfile}: \\\n') + for dirpath, dirnames, filenames in os.walk(self.src_dir): + dirnames[:] = [d for d in dirnames if not d.startswith('.')] + for fname in filenames: + if fname.startswith('.'): + continue + path = Path(dirpath, fname) + f.write(' {} \\\n'.format(path.as_posix().replace(' ', '\\ '))) + + def write_stampfile(self) -> None: + with open(self.stampfile, 'w', encoding='utf-8') as f: + pass + + def gnu_make(self) -> bool: + p, o, e = Popen_safe([self.make, '--version']) + if p.returncode == 0 and 'GNU Make' in o: + return True + return False + + def build(self) -> int: + make_cmd = [self.make] + if self.gnu_make(): + make_cmd.append('-j' + str(multiprocessing.cpu_count())) + + rc = self._run('build', make_cmd) + if rc != 0: + return rc + + install_cmd = make_cmd + ['DESTDIR= ' + self.install_dir, 'install'] + rc = self._run('install', install_cmd) + if rc != 0: + return rc + + self.write_depfile() + self.write_stampfile() + + return 0 + + def _run(self, step: str, command: T.List[str]) -> int: + m = 'Running command ' + str(command) + ' in directory ' + str(self.build_dir) + '\n' + log_filename = Path(self.log_dir, f'{self.name}-{step}.log') + output = None + if not self.verbose: + output = open(log_filename, 'w', encoding='utf-8') + output.write(m + '\n') + output.flush() + else: + print(m) + p, o, e = Popen_safe(command, stderr=subprocess.STDOUT, stdout=output, + cwd=self.build_dir) + if p.returncode != 0: + m = f'{step} step returned error code {p.returncode}.' + if not self.verbose: + m += '\nSee logs: ' + str(log_filename) + print(m) + return p.returncode + +def run(args: T.List[str]) -> int: + parser = argparse.ArgumentParser() + parser.add_argument('--name') + parser.add_argument('--srcdir') + parser.add_argument('--builddir') + parser.add_argument('--installdir') + parser.add_argument('--logdir') + parser.add_argument('--make') + parser.add_argument('--verbose', action='store_true') + parser.add_argument('stampfile') + parser.add_argument('depfile') + + options = parser.parse_args(args) + ep = ExternalProject(options) + return ep.build() diff --git a/meson/mesonbuild/scripts/gettext.py b/meson/mesonbuild/scripts/gettext.py new file mode 100644 index 000000000..b1ce6af1c --- /dev/null +++ b/meson/mesonbuild/scripts/gettext.py @@ -0,0 +1,125 @@ +# Copyright 2016 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import shutil +import argparse +import subprocess +from . import destdir_join +import typing as T + +parser = argparse.ArgumentParser() +parser.add_argument('command') +parser.add_argument('--pkgname', default='') +parser.add_argument('--datadirs', default='') +parser.add_argument('--langs', default='') +parser.add_argument('--localedir', default='') +parser.add_argument('--subdir', default='') +parser.add_argument('--extra-args', default='') + +def read_linguas(src_sub: str) -> T.List[str]: + # Syntax of this file is documented here: + # https://www.gnu.org/software/gettext/manual/html_node/po_002fLINGUAS.html + linguas = os.path.join(src_sub, 'LINGUAS') + try: + langs = [] + with open(linguas, encoding='utf-8') as f: + for line in f: + line = line.strip() + if line and not line.startswith('#'): + langs += line.split() + return langs + except (FileNotFoundError, PermissionError): + print(f'Could not find file LINGUAS in {src_sub}') + return [] + +def run_potgen(src_sub: str, pkgname: str, datadirs: str, args: T.List[str]) -> int: + listfile = os.path.join(src_sub, 'POTFILES.in') + if not os.path.exists(listfile): + listfile = os.path.join(src_sub, 'POTFILES') + if not os.path.exists(listfile): + print('Could not find file POTFILES in %s' % src_sub) + return 1 + + child_env = os.environ.copy() + if datadirs: + child_env['GETTEXTDATADIRS'] = datadirs + + ofile = os.path.join(src_sub, pkgname + '.pot') + return subprocess.call(['xgettext', '--package-name=' + pkgname, '-p', src_sub, '-f', listfile, + '-D', os.environ['MESON_SOURCE_ROOT'], '-k_', '-o', ofile] + args, + env=child_env) + +def gen_gmo(src_sub: str, bld_sub: str, langs: T.List[str]) -> int: + for l in langs: + subprocess.check_call(['msgfmt', os.path.join(src_sub, l + '.po'), + '-o', os.path.join(bld_sub, l + '.gmo')]) + return 0 + +def update_po(src_sub: str, pkgname: str, langs: T.List[str]) -> int: + potfile = os.path.join(src_sub, pkgname + '.pot') + for l in langs: + pofile = os.path.join(src_sub, l + '.po') + if os.path.exists(pofile): + subprocess.check_call(['msgmerge', '-q', '-o', pofile, pofile, potfile]) + else: + subprocess.check_call(['msginit', '--input', potfile, '--output-file', pofile, '--locale', l, '--no-translator']) + return 0 + +def do_install(src_sub: str, bld_sub: str, dest: str, pkgname: str, langs: T.List[str]) -> int: + for l in langs: + srcfile = os.path.join(bld_sub, l + '.gmo') + outfile = os.path.join(dest, l, 'LC_MESSAGES', + pkgname + '.mo') + tempfile = outfile + '.tmp' + os.makedirs(os.path.dirname(outfile), exist_ok=True) + shutil.copy2(srcfile, tempfile) + os.replace(tempfile, outfile) + if not os.getenv('MESON_INSTALL_QUIET', False): + print(f'Installing {srcfile} to {outfile}') + return 0 + +def run(args: T.List[str]) -> int: + options = parser.parse_args(args) + subcmd = options.command + langs = options.langs.split('@@') if options.langs else None + extra_args = options.extra_args.split('@@') if options.extra_args else [] + subdir = os.environ.get('MESON_SUBDIR', '') + if options.subdir: + subdir = options.subdir + src_sub = os.path.join(os.environ['MESON_SOURCE_ROOT'], subdir) + bld_sub = os.path.join(os.environ['MESON_BUILD_ROOT'], subdir) + + if not langs: + langs = read_linguas(src_sub) + + if subcmd == 'pot': + return run_potgen(src_sub, options.pkgname, options.datadirs, extra_args) + elif subcmd == 'gen_gmo': + return gen_gmo(src_sub, bld_sub, langs) + elif subcmd == 'update_po': + if run_potgen(src_sub, options.pkgname, options.datadirs, extra_args) != 0: + return 1 + return update_po(src_sub, options.pkgname, langs) + elif subcmd == 'install': + destdir = os.environ.get('DESTDIR', '') + dest = destdir_join(destdir, os.path.join(os.environ['MESON_INSTALL_PREFIX'], + options.localedir)) + if gen_gmo(src_sub, bld_sub, langs) != 0: + return 1 + do_install(src_sub, bld_sub, dest, options.pkgname, langs) + else: + print('Unknown subcommand.') + return 1 + return 0 diff --git a/meson/mesonbuild/scripts/gtkdochelper.py b/meson/mesonbuild/scripts/gtkdochelper.py new file mode 100644 index 000000000..153c3d933 --- /dev/null +++ b/meson/mesonbuild/scripts/gtkdochelper.py @@ -0,0 +1,295 @@ +# Copyright 2015-2016 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys, os +import subprocess +import shutil +import argparse +from ..mesonlib import MesonException, Popen_safe, is_windows, is_cygwin, split_args +from . import destdir_join +import typing as T + +parser = argparse.ArgumentParser() + +parser.add_argument('--sourcedir', dest='sourcedir') +parser.add_argument('--builddir', dest='builddir') +parser.add_argument('--subdir', dest='subdir') +parser.add_argument('--headerdirs', dest='headerdirs') +parser.add_argument('--mainfile', dest='mainfile') +parser.add_argument('--modulename', dest='modulename') +parser.add_argument('--moduleversion', dest='moduleversion') +parser.add_argument('--htmlargs', dest='htmlargs', default='') +parser.add_argument('--scanargs', dest='scanargs', default='') +parser.add_argument('--scanobjsargs', dest='scanobjsargs', default='') +parser.add_argument('--gobjects-types-file', dest='gobject_typesfile', default='') +parser.add_argument('--fixxrefargs', dest='fixxrefargs', default='') +parser.add_argument('--mkdbargs', dest='mkdbargs', default='') +parser.add_argument('--ld', dest='ld', default='') +parser.add_argument('--cc', dest='cc', default='') +parser.add_argument('--ldflags', dest='ldflags', default='') +parser.add_argument('--cflags', dest='cflags', default='') +parser.add_argument('--content-files', dest='content_files', default='') +parser.add_argument('--expand-content-files', dest='expand_content_files', default='') +parser.add_argument('--html-assets', dest='html_assets', default='') +parser.add_argument('--ignore-headers', dest='ignore_headers', default='') +parser.add_argument('--namespace', dest='namespace', default='') +parser.add_argument('--mode', dest='mode', default='') +parser.add_argument('--installdir', dest='install_dir') +parser.add_argument('--run', dest='run', default='') +for tool in ['scan', 'scangobj', 'mkdb', 'mkhtml', 'fixxref']: + program_name = 'gtkdoc-' + tool + parser.add_argument('--' + program_name, dest=program_name.replace('-', '_')) + +def gtkdoc_run_check(cmd: T.List[str], cwd: str, library_paths: T.Optional[T.List[str]] = None) -> None: + if library_paths is None: + library_paths = [] + + env = dict(os.environ) + if is_windows() or is_cygwin(): + if 'PATH' in env: + library_paths.extend(env['PATH'].split(os.pathsep)) + env['PATH'] = os.pathsep.join(library_paths) + else: + if 'LD_LIBRARY_PATH' in env: + library_paths.extend(env['LD_LIBRARY_PATH'].split(os.pathsep)) + env['LD_LIBRARY_PATH'] = os.pathsep.join(library_paths) + + if is_windows(): + cmd.insert(0, sys.executable) + + # Put stderr into stdout since we want to print it out anyway. + # This preserves the order of messages. + p, out = Popen_safe(cmd, cwd=cwd, env=env, stderr=subprocess.STDOUT)[0:2] + if p.returncode != 0: + err_msg = [f"{cmd!r} failed with status {p.returncode:d}"] + if out: + err_msg.append(out) + raise MesonException('\n'.join(err_msg)) + elif out: + # Unfortunately Windows cmd.exe consoles may be using a codepage + # that might choke print() with a UnicodeEncodeError, so let's + # ignore such errors for now, as a compromise as we are outputting + # console output here... + try: + print(out) + except UnicodeEncodeError: + pass + +def build_gtkdoc(source_root: str, build_root: str, doc_subdir: str, src_subdirs: T.List[str], + main_file: str, module: str, module_version: str, + html_args: T.List[str], scan_args: T.List[str], fixxref_args: T.List[str], mkdb_args: T.List[str], + gobject_typesfile: str, scanobjs_args: T.List[str], run: str, ld: str, cc: str, ldflags: str, cflags: str, + html_assets: T.List[str], content_files: T.List[str], ignore_headers: T.List[str], namespace: str, + expand_content_files: T.List[str], mode: str, options: argparse.Namespace) -> None: + print("Building documentation for %s" % module) + + src_dir_args = [] + for src_dir in src_subdirs: + if not os.path.isabs(src_dir): + dirs = [os.path.join(source_root, src_dir), + os.path.join(build_root, src_dir)] + else: + dirs = [src_dir] + src_dir_args += ['--source-dir=' + d for d in dirs] + + doc_src = os.path.join(source_root, doc_subdir) + abs_out = os.path.join(build_root, doc_subdir) + htmldir = os.path.join(abs_out, 'html') + + content_files += [main_file] + sections = os.path.join(doc_src, module + "-sections.txt") + if os.path.exists(sections): + content_files.append(sections) + + overrides = os.path.join(doc_src, module + "-overrides.txt") + if os.path.exists(overrides): + content_files.append(overrides) + + # Copy files to build directory + for f in content_files: + # FIXME: Use mesonlib.File objects so we don't need to do this + if not os.path.isabs(f): + f = os.path.join(doc_src, f) + elif os.path.commonpath([f, build_root]) == build_root: + continue + shutil.copyfile(f, os.path.join(abs_out, os.path.basename(f))) + + shutil.rmtree(htmldir, ignore_errors=True) + try: + os.mkdir(htmldir) + except Exception: + pass + + for f in html_assets: + f_abs = os.path.join(doc_src, f) + shutil.copyfile(f_abs, os.path.join(htmldir, os.path.basename(f_abs))) + + scan_cmd = [options.gtkdoc_scan, '--module=' + module] + src_dir_args + if ignore_headers: + scan_cmd.append('--ignore-headers=' + ' '.join(ignore_headers)) + # Add user-specified arguments + scan_cmd += scan_args + gtkdoc_run_check(scan_cmd, abs_out) + + # Use the generated types file when available, otherwise gobject_typesfile + # would often be a path to source dir instead of build dir. + if '--rebuild-types' in scan_args: + gobject_typesfile = os.path.join(abs_out, module + '.types') + + if gobject_typesfile: + scanobjs_cmd = [options.gtkdoc_scangobj] + scanobjs_args + scanobjs_cmd += ['--types=' + gobject_typesfile, + '--module=' + module, + '--run=' + run, + '--cflags=' + cflags, + '--ldflags=' + ldflags, + '--cc=' + cc, + '--ld=' + ld, + '--output-dir=' + abs_out] + + library_paths = [] + for ldflag in split_args(ldflags): + if ldflag.startswith('-Wl,-rpath,'): + library_paths.append(ldflag[11:]) + + gtkdoc_run_check(scanobjs_cmd, build_root, library_paths) + + # Make docbook files + if mode == 'auto': + # Guessing is probably a poor idea but these keeps compat + # with previous behavior + if main_file.endswith('sgml'): + modeflag = '--sgml-mode' + else: + modeflag = '--xml-mode' + elif mode == 'xml': + modeflag = '--xml-mode' + elif mode == 'sgml': + modeflag = '--sgml-mode' + else: # none + modeflag = None + + mkdb_cmd = [options.gtkdoc_mkdb, + '--module=' + module, + '--output-format=xml', + '--expand-content-files=' + ' '.join(expand_content_files), + ] + src_dir_args + if namespace: + mkdb_cmd.append('--name-space=' + namespace) + if modeflag: + mkdb_cmd.append(modeflag) + if main_file: + # Yes, this is the flag even if the file is in xml. + mkdb_cmd.append('--main-sgml-file=' + main_file) + # Add user-specified arguments + mkdb_cmd += mkdb_args + gtkdoc_run_check(mkdb_cmd, abs_out) + + # Make HTML documentation + mkhtml_cmd = [options.gtkdoc_mkhtml, + '--path=' + ':'.join((doc_src, abs_out)), + module, + ] + html_args + if main_file: + mkhtml_cmd.append('../' + main_file) + else: + mkhtml_cmd.append('%s-docs.xml' % module) + # html gen must be run in the HTML dir + gtkdoc_run_check(mkhtml_cmd, htmldir) + + # Fix cross-references in HTML files + fixref_cmd = [options.gtkdoc_fixxref, + '--module=' + module, + '--module-dir=html'] + fixxref_args + gtkdoc_run_check(fixref_cmd, abs_out) + + if module_version: + shutil.move(os.path.join(htmldir, f'{module}.devhelp2'), + os.path.join(htmldir, f'{module}-{module_version}.devhelp2')) + +def install_gtkdoc(build_root: str, doc_subdir: str, install_prefix: str, datadir: str, module: str) -> None: + source = os.path.join(build_root, doc_subdir, 'html') + final_destination = os.path.join(install_prefix, datadir, module) + shutil.rmtree(final_destination, ignore_errors=True) + shutil.copytree(source, final_destination) + +def run(args: T.List[str]) -> int: + options = parser.parse_args(args) + if options.htmlargs: + htmlargs = options.htmlargs.split('@@') + else: + htmlargs = [] + if options.scanargs: + scanargs = options.scanargs.split('@@') + else: + scanargs = [] + if options.scanobjsargs: + scanobjsargs = options.scanobjsargs.split('@@') + else: + scanobjsargs = [] + if options.fixxrefargs: + fixxrefargs = options.fixxrefargs.split('@@') + else: + fixxrefargs = [] + if options.mkdbargs: + mkdbargs = options.mkdbargs.split('@@') + else: + mkdbargs = [] + build_gtkdoc( + options.sourcedir, + options.builddir, + options.subdir, + options.headerdirs.split('@@'), + options.mainfile, + options.modulename, + options.moduleversion, + htmlargs, + scanargs, + fixxrefargs, + mkdbargs, + options.gobject_typesfile, + scanobjsargs, + options.run, + options.ld, + options.cc, + options.ldflags, + options.cflags, + options.html_assets.split('@@') if options.html_assets else [], + options.content_files.split('@@') if options.content_files else [], + options.ignore_headers.split('@@') if options.ignore_headers else [], + options.namespace, + options.expand_content_files.split('@@') if options.expand_content_files else [], + options.mode, + options) + + if 'MESON_INSTALL_PREFIX' in os.environ: + destdir = os.environ.get('DESTDIR', '') + install_prefix = destdir_join(destdir, os.environ['MESON_INSTALL_PREFIX']) + if options.install_dir: + install_dir = options.install_dir + else: + install_dir = options.modulename + if options.moduleversion: + install_dir += '-' + options.moduleversion + if os.path.isabs(install_dir): + install_dir = destdir_join(destdir, install_dir) + install_gtkdoc(options.builddir, + options.subdir, + install_prefix, + 'share/gtk-doc/html', + install_dir) + return 0 + +if __name__ == '__main__': + sys.exit(run(sys.argv[1:])) diff --git a/meson/mesonbuild/scripts/hotdochelper.py b/meson/mesonbuild/scripts/hotdochelper.py new file mode 100644 index 000000000..a96a34afa --- /dev/null +++ b/meson/mesonbuild/scripts/hotdochelper.py @@ -0,0 +1,38 @@ +import os +import shutil +import subprocess + +from . import destdir_join + +import argparse +import typing as T + +parser = argparse.ArgumentParser() +parser.add_argument('--install') +parser.add_argument('--extra-extension-path', action="append", default=[]) +parser.add_argument('--name') +parser.add_argument('--builddir') +parser.add_argument('--project-version') + + +def run(argv: T.List[str]) -> int: + options, args = parser.parse_known_args(argv) + subenv = os.environ.copy() + + for ext_path in options.extra_extension_path: + subenv['PYTHONPATH'] = subenv.get('PYTHONPATH', '') + ':' + ext_path + + res = subprocess.call(args, cwd=options.builddir, env=subenv) + if res != 0: + return res + + if options.install: + source_dir = os.path.join(options.builddir, options.install) + destdir = os.environ.get('DESTDIR', '') + installdir = destdir_join(destdir, + os.path.join(os.environ['MESON_INSTALL_PREFIX'], + 'share/doc/', options.name, "html")) + + shutil.rmtree(installdir, ignore_errors=True) + shutil.copytree(source_dir, installdir) + return 0 diff --git a/meson/mesonbuild/scripts/meson_exe.py b/meson/mesonbuild/scripts/meson_exe.py new file mode 100644 index 000000000..cd3534cb0 --- /dev/null +++ b/meson/mesonbuild/scripts/meson_exe.py @@ -0,0 +1,125 @@ +# Copyright 2013-2016 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import sys +import argparse +import pickle +import subprocess +import typing as T +import locale + +from .. import mesonlib +from ..backend.backends import ExecutableSerialisation + +options = None + +def buildparser() -> argparse.ArgumentParser: + parser = argparse.ArgumentParser(description='Custom executable wrapper for Meson. Do not run on your own, mmm\'kay?') + parser.add_argument('--unpickle') + parser.add_argument('--capture') + parser.add_argument('--feed') + return parser + +def run_exe(exe: ExecutableSerialisation, extra_env: T.Optional[dict] = None) -> int: + if exe.exe_runner: + if not exe.exe_runner.found(): + raise AssertionError('BUG: Can\'t run cross-compiled exe {!r} with not-found ' + 'wrapper {!r}'.format(exe.cmd_args[0], exe.exe_runner.get_path())) + cmd_args = exe.exe_runner.get_command() + exe.cmd_args + else: + cmd_args = exe.cmd_args + child_env = os.environ.copy() + if extra_env: + child_env.update(extra_env) + if exe.env: + child_env = exe.env.get_env(child_env) + if exe.extra_paths: + child_env['PATH'] = (os.pathsep.join(exe.extra_paths + ['']) + + child_env['PATH']) + if exe.exe_runner and mesonlib.substring_is_in_list('wine', exe.exe_runner.get_command()): + child_env['WINEPATH'] = mesonlib.get_wine_shortpath( + exe.exe_runner.get_command(), + ['Z:' + p for p in exe.extra_paths] + child_env.get('WINEPATH', '').split(';') + ) + + stdin = None + if exe.feed: + stdin = open(exe.feed, 'rb') + + pipe = subprocess.PIPE + if exe.verbose: + assert not exe.capture, 'Cannot capture and print to console at the same time' + pipe = None + + p = subprocess.Popen(cmd_args, env=child_env, cwd=exe.workdir, + close_fds=False, stdin=stdin, stdout=pipe, stderr=pipe) + stdout, stderr = p.communicate() + + if stdin is not None: + stdin.close() + + if p.returncode == 0xc0000135: + # STATUS_DLL_NOT_FOUND on Windows indicating a common problem that is otherwise hard to diagnose + raise FileNotFoundError('due to missing DLLs') + + if p.returncode != 0: + if exe.pickled: + print(f'while executing {cmd_args!r}') + if exe.verbose: + return p.returncode + encoding = locale.getpreferredencoding() + if not exe.capture: + print('--- stdout ---') + print(stdout.decode(encoding=encoding, errors='replace')) + print('--- stderr ---') + print(stderr.decode(encoding=encoding, errors='replace')) + return p.returncode + + if exe.capture: + skip_write = False + try: + with open(exe.capture, 'rb') as cur: + skip_write = cur.read() == stdout + except OSError: + pass + if not skip_write: + with open(exe.capture, 'wb') as output: + output.write(stdout) + + return 0 + +def run(args: T.List[str]) -> int: + global options + parser = buildparser() + options, cmd_args = parser.parse_known_args(args) + # argparse supports double dash to separate options and positional arguments, + # but the user has to remove it manually. + if cmd_args and cmd_args[0] == '--': + cmd_args = cmd_args[1:] + if not options.unpickle and not cmd_args: + parser.error('either --unpickle or executable and arguments are required') + if options.unpickle: + if cmd_args or options.capture or options.feed: + parser.error('no other arguments can be used with --unpickle') + with open(options.unpickle, 'rb') as f: + exe = pickle.load(f) + exe.pickled = True + else: + exe = ExecutableSerialisation(cmd_args, capture=options.capture, feed=options.feed) + + return run_exe(exe) + +if __name__ == '__main__': + sys.exit(run(sys.argv[1:])) diff --git a/meson/mesonbuild/scripts/msgfmthelper.py b/meson/mesonbuild/scripts/msgfmthelper.py new file mode 100644 index 000000000..3ddc9e6a9 --- /dev/null +++ b/meson/mesonbuild/scripts/msgfmthelper.py @@ -0,0 +1,37 @@ +# Copyright 2016 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import subprocess +import os +import typing as T + +parser = argparse.ArgumentParser() +parser.add_argument('input') +parser.add_argument('output') +parser.add_argument('type') +parser.add_argument('podir') +parser.add_argument('--datadirs', default='') +parser.add_argument('args', default=[], metavar='extra msgfmt argument', nargs='*') + + +def run(args: T.List[str]) -> int: + options = parser.parse_args(args) + env = None + if options.datadirs: + env = os.environ.copy() + env.update({'GETTEXTDATADIRS': options.datadirs}) + return subprocess.call(['msgfmt', '--' + options.type, '-d', options.podir, + '--template', options.input, '-o', options.output] + options.args, + env=env) diff --git a/meson/mesonbuild/scripts/regen_checker.py b/meson/mesonbuild/scripts/regen_checker.py new file mode 100644 index 000000000..c96bdc1e5 --- /dev/null +++ b/meson/mesonbuild/scripts/regen_checker.py @@ -0,0 +1,64 @@ +# Copyright 2015-2016 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys, os +import pickle, subprocess +import typing as T +from ..coredata import CoreData +from ..backend.backends import RegenInfo +from ..mesonlib import OptionKey + +# This could also be used for XCode. + +def need_regen(regeninfo: RegenInfo, regen_timestamp: float) -> bool: + for i in regeninfo.depfiles: + curfile = os.path.join(regeninfo.build_dir, i) + curtime = os.stat(curfile).st_mtime + if curtime > regen_timestamp: + return True + # The timestamp file gets automatically deleted by MSBuild during a 'Clean' build. + # We must make sure to recreate it, even if we do not regenerate the solution. + # Otherwise, Visual Studio will always consider the REGEN project out of date. + print("Everything is up-to-date, regeneration of build files is not needed.") + from ..backend.vs2010backend import Vs2010Backend + Vs2010Backend.touch_regen_timestamp(regeninfo.build_dir) + return False + +def regen(regeninfo: RegenInfo, meson_command: T.List[str], backend: str) -> None: + cmd = meson_command + ['--internal', + 'regenerate', + regeninfo.build_dir, + regeninfo.source_dir, + '--backend=' + backend] + subprocess.check_call(cmd) + +def run(args: T.List[str]) -> int: + private_dir = args[0] + dumpfile = os.path.join(private_dir, 'regeninfo.dump') + coredata_file = os.path.join(private_dir, 'coredata.dat') + with open(dumpfile, 'rb') as f: + regeninfo = pickle.load(f) + assert isinstance(regeninfo, RegenInfo) + with open(coredata_file, 'rb') as f: + coredata = pickle.load(f) + assert isinstance(coredata, CoreData) + backend = coredata.get_option(OptionKey('backend')) + assert isinstance(backend, str) + regen_timestamp = os.stat(dumpfile).st_mtime + if need_regen(regeninfo, regen_timestamp): + regen(regeninfo, coredata.meson_command, backend) + return 0 + +if __name__ == '__main__': + sys.exit(run(sys.argv[1:])) diff --git a/meson/mesonbuild/scripts/scanbuild.py b/meson/mesonbuild/scripts/scanbuild.py new file mode 100644 index 000000000..bb8e30ce6 --- /dev/null +++ b/meson/mesonbuild/scripts/scanbuild.py @@ -0,0 +1,65 @@ +# Copyright 2016 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import subprocess +import shutil +import tempfile +from ..environment import detect_ninja, detect_scanbuild +from ..coredata import get_cmd_line_file, CmdLineFileParser +from ..mesonlib import windows_proof_rmtree +from pathlib import Path +import typing as T +from ast import literal_eval +import os + +def scanbuild(exelist: T.List[str], srcdir: Path, blddir: Path, privdir: Path, logdir: Path, args: T.List[str]) -> int: + # In case of problems leave the temp directory around + # so it can be debugged. + scandir = tempfile.mkdtemp(dir=str(privdir)) + meson_cmd = exelist + args + build_cmd = exelist + ['-o', str(logdir)] + detect_ninja() + ['-C', scandir] + rc = subprocess.call(meson_cmd + [str(srcdir), scandir]) + if rc != 0: + return rc + rc = subprocess.call(build_cmd) + if rc == 0: + windows_proof_rmtree(scandir) + return rc + +def run(args: T.List[str]) -> int: + srcdir = Path(args[0]) + bldpath = Path(args[1]) + blddir = args[1] + meson_cmd = args[2:] + privdir = bldpath / 'meson-private' + logdir = bldpath / 'meson-logs' / 'scanbuild' + shutil.rmtree(str(logdir), ignore_errors=True) + + # if any cross or native files are specified we should use them + cmd = get_cmd_line_file(blddir) + data = CmdLineFileParser() + data.read(cmd) + + if 'cross_file' in data['properties']: + meson_cmd.extend([f'--cross-file={os.path.abspath(f)}' for f in literal_eval(data['properties']['cross_file'])]) + + if 'native_file' in data['properties']: + meson_cmd.extend([f'--native-file={os.path.abspath(f)}' for f in literal_eval(data['properties']['native_file'])]) + + exelist = detect_scanbuild() + if not exelist: + print('Could not execute scan-build "%s"' % ' '.join(exelist)) + return 1 + + return scanbuild(exelist, srcdir, bldpath, privdir, logdir, meson_cmd) diff --git a/meson/mesonbuild/scripts/symbolextractor.py b/meson/mesonbuild/scripts/symbolextractor.py new file mode 100644 index 000000000..17501e28b --- /dev/null +++ b/meson/mesonbuild/scripts/symbolextractor.py @@ -0,0 +1,331 @@ +# Copyright 2013-2016 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This script extracts the symbols of a given shared library +# into a file. If the symbols have not changed, the file is not +# touched. This information is used to skip link steps if the +# ABI has not changed. + +# This file is basically a reimplementation of +# http://cgit.freedesktop.org/libreoffice/core/commit/?id=3213cd54b76bc80a6f0516aac75a48ff3b2ad67c + +import typing as T +import os, sys +from .. import mesonlib +from .. import mlog +from ..mesonlib import Popen_safe +import argparse + +parser = argparse.ArgumentParser() + +parser.add_argument('--cross-host', default=None, dest='cross_host', + help='cross compilation host platform') +parser.add_argument('args', nargs='+') + +TOOL_WARNING_FILE = None +RELINKING_WARNING = 'Relinking will always happen on source changes.' + +def dummy_syms(outfilename: str) -> None: + """Just touch it so relinking happens always.""" + with open(outfilename, 'w', encoding='utf-8'): + pass + +def write_if_changed(text: str, outfilename: str) -> None: + try: + with open(outfilename, encoding='utf-8') as f: + oldtext = f.read() + if text == oldtext: + return + except FileNotFoundError: + pass + with open(outfilename, 'w', encoding='utf-8') as f: + f.write(text) + +def print_tool_warning(tools: T.List[str], msg: str, stderr: T.Optional[str] = None) -> None: + global TOOL_WARNING_FILE + if os.path.exists(TOOL_WARNING_FILE): + return + m = f'{tools!r} {msg}. {RELINKING_WARNING}' + if stderr: + m += '\n' + stderr + mlog.warning(m) + # Write it out so we don't warn again + with open(TOOL_WARNING_FILE, 'w', encoding='utf-8'): + pass + +def get_tool(name: str) -> T.List[str]: + evar = name.upper() + if evar in os.environ: + import shlex + return shlex.split(os.environ[evar]) + return [name] + +def call_tool(name: str, args: T.List[str], **kwargs: T.Any) -> str: + tool = get_tool(name) + try: + p, output, e = Popen_safe(tool + args, **kwargs) + except FileNotFoundError: + print_tool_warning(tool, 'not found') + return None + except PermissionError: + print_tool_warning(tool, 'not usable') + return None + if p.returncode != 0: + print_tool_warning(tool, 'does not work', e) + return None + return output + +def call_tool_nowarn(tool: T.List[str], **kwargs: T.Any) -> T.Tuple[str, str]: + try: + p, output, e = Popen_safe(tool, **kwargs) + except FileNotFoundError: + return None, '{!r} not found\n'.format(tool[0]) + except PermissionError: + return None, '{!r} not usable\n'.format(tool[0]) + if p.returncode != 0: + return None, e + return output, None + +def gnu_syms(libfilename: str, outfilename: str) -> None: + # Get the name of the library + output = call_tool('readelf', ['-d', libfilename]) + if not output: + dummy_syms(outfilename) + return + result = [x for x in output.split('\n') if 'SONAME' in x] + assert(len(result) <= 1) + # Get a list of all symbols exported + output = call_tool('nm', ['--dynamic', '--extern-only', '--defined-only', + '--format=posix', libfilename]) + if not output: + dummy_syms(outfilename) + return + for line in output.split('\n'): + if not line: + continue + line_split = line.split() + entry = line_split[0:2] + # Store the size of symbols pointing to data objects so we relink + # when those change, which is needed because of copy relocations + # https://github.com/mesonbuild/meson/pull/7132#issuecomment-628353702 + if line_split[1].upper() in ('B', 'G', 'D') and len(line_split) >= 4: + entry += [line_split[3]] + result += [' '.join(entry)] + write_if_changed('\n'.join(result) + '\n', outfilename) + +def solaris_syms(libfilename: str, outfilename: str) -> None: + # gnu_syms() works with GNU nm & readelf, not Solaris nm & elfdump + origpath = os.environ['PATH'] + try: + os.environ['PATH'] = '/usr/gnu/bin:' + origpath + gnu_syms(libfilename, outfilename) + finally: + os.environ['PATH'] = origpath + +def osx_syms(libfilename: str, outfilename: str) -> None: + # Get the name of the library + output = call_tool('otool', ['-l', libfilename]) + if not output: + dummy_syms(outfilename) + return + arr = output.split('\n') + for (i, val) in enumerate(arr): + if 'LC_ID_DYLIB' in val: + match = i + break + result = [arr[match + 2], arr[match + 5]] # Libreoffice stores all 5 lines but the others seem irrelevant. + # Get a list of all symbols exported + output = call_tool('nm', ['--extern-only', '--defined-only', + '--format=posix', libfilename]) + if not output: + dummy_syms(outfilename) + return + result += [' '.join(x.split()[0:2]) for x in output.split('\n')] + write_if_changed('\n'.join(result) + '\n', outfilename) + +def openbsd_syms(libfilename: str, outfilename: str) -> None: + # Get the name of the library + output = call_tool('readelf', ['-d', libfilename]) + if not output: + dummy_syms(outfilename) + return + result = [x for x in output.split('\n') if 'SONAME' in x] + assert(len(result) <= 1) + # Get a list of all symbols exported + output = call_tool('nm', ['-D', '-P', '-g', libfilename]) + if not output: + dummy_syms(outfilename) + return + # U = undefined (cope with the lack of --defined-only option) + result += [' '.join(x.split()[0:2]) for x in output.split('\n') if x and not x.endswith('U ')] + write_if_changed('\n'.join(result) + '\n', outfilename) + +def freebsd_syms(libfilename: str, outfilename: str) -> None: + # Get the name of the library + output = call_tool('readelf', ['-d', libfilename]) + if not output: + dummy_syms(outfilename) + return + result = [x for x in output.split('\n') if 'SONAME' in x] + assert(len(result) <= 1) + # Get a list of all symbols exported + output = call_tool('nm', ['--dynamic', '--extern-only', '--defined-only', + '--format=posix', libfilename]) + if not output: + dummy_syms(outfilename) + return + + result += [' '.join(x.split()[0:2]) for x in output.split('\n')] + write_if_changed('\n'.join(result) + '\n', outfilename) + +def cygwin_syms(impfilename: str, outfilename: str) -> None: + # Get the name of the library + output = call_tool('dlltool', ['-I', impfilename]) + if not output: + dummy_syms(outfilename) + return + result = [output] + # Get the list of all symbols exported + output = call_tool('nm', ['--extern-only', '--defined-only', + '--format=posix', impfilename]) + if not output: + dummy_syms(outfilename) + return + for line in output.split('\n'): + if ' T ' not in line: + continue + result.append(line.split(maxsplit=1)[0]) + write_if_changed('\n'.join(result) + '\n', outfilename) + +def _get_implib_dllname(impfilename: str) -> T.Tuple[T.List[str], str]: + all_stderr = '' + # First try lib.exe, which is provided by MSVC. Then llvm-lib.exe, by LLVM + # for clang-cl. + # + # We cannot call get_tool on `lib` because it will look at the `LIB` env + # var which is the list of library paths MSVC will search for import + # libraries while linking. + for lib in (['lib'], get_tool('llvm-lib')): + output, e = call_tool_nowarn(lib + ['-list', impfilename]) + if output: + # The output is a list of DLLs that each symbol exported by the import + # library is available in. We only build import libraries that point to + # a single DLL, so we can pick any of these. Pick the last one for + # simplicity. Also skip the last line, which is empty. + return output.split('\n')[-2:-1], None + all_stderr += e + # Next, try dlltool.exe which is provided by MinGW + output, e = call_tool_nowarn(get_tool('dlltool') + ['-I', impfilename]) + if output: + return [output], None + all_stderr += e + return ([], all_stderr) + +def _get_implib_exports(impfilename: str) -> T.Tuple[T.List[str], str]: + all_stderr = '' + # Force dumpbin.exe to use en-US so we can parse its output + env = os.environ.copy() + env['VSLANG'] = '1033' + output, e = call_tool_nowarn(get_tool('dumpbin') + ['-exports', impfilename], env=env) + if output: + lines = output.split('\n') + start = lines.index('File Type: LIBRARY') + end = lines.index(' Summary') + return lines[start:end], None + all_stderr += e + # Next, try llvm-nm.exe provided by LLVM, then nm.exe provided by MinGW + for nm in ('llvm-nm', 'nm'): + output, e = call_tool_nowarn(get_tool(nm) + ['--extern-only', '--defined-only', + '--format=posix', impfilename]) + if output: + result = [] + for line in output.split('\n'): + if ' T ' not in line or line.startswith('.text'): + continue + result.append(line.split(maxsplit=1)[0]) + return result, None + all_stderr += e + return ([], all_stderr) + +def windows_syms(impfilename: str, outfilename: str) -> None: + # Get the name of the library + result, e = _get_implib_dllname(impfilename) + if not result: + print_tool_warning(['lib', 'llvm-lib', 'dlltool'], 'do not work or were not found', e) + dummy_syms(outfilename) + return + # Get a list of all symbols exported + symbols, e = _get_implib_exports(impfilename) + if not symbols: + print_tool_warning(['dumpbin', 'llvm-nm', 'nm'], 'do not work or were not found', e) + dummy_syms(outfilename) + return + result += symbols + write_if_changed('\n'.join(result) + '\n', outfilename) + +def gen_symbols(libfilename: str, impfilename: str, outfilename: str, cross_host: str) -> None: + if cross_host is not None: + # In case of cross builds just always relink. In theory we could + # determine the correct toolset, but we would need to use the correct + # `nm`, `readelf`, etc, from the cross info which requires refactoring. + dummy_syms(outfilename) + elif mesonlib.is_linux() or mesonlib.is_hurd(): + gnu_syms(libfilename, outfilename) + elif mesonlib.is_osx(): + osx_syms(libfilename, outfilename) + elif mesonlib.is_openbsd(): + openbsd_syms(libfilename, outfilename) + elif mesonlib.is_freebsd(): + freebsd_syms(libfilename, outfilename) + elif mesonlib.is_windows(): + if os.path.isfile(impfilename): + windows_syms(impfilename, outfilename) + else: + # No import library. Not sure how the DLL is being used, so just + # rebuild everything that links to it every time. + dummy_syms(outfilename) + elif mesonlib.is_cygwin(): + if os.path.isfile(impfilename): + cygwin_syms(impfilename, outfilename) + else: + # No import library. Not sure how the DLL is being used, so just + # rebuild everything that links to it every time. + dummy_syms(outfilename) + elif mesonlib.is_sunos(): + solaris_syms(libfilename, outfilename) + else: + if not os.path.exists(TOOL_WARNING_FILE): + mlog.warning('Symbol extracting has not been implemented for this ' + 'platform. ' + RELINKING_WARNING) + # Write it out so we don't warn again + with open(TOOL_WARNING_FILE, 'w', encoding='utf-8'): + pass + dummy_syms(outfilename) + +def run(args: T.List[str]) -> int: + global TOOL_WARNING_FILE + options = parser.parse_args(args) + if len(options.args) != 4: + print('symbolextractor.py <shared library file> <import library> <output file>') + sys.exit(1) + privdir = os.path.join(options.args[0], 'meson-private') + TOOL_WARNING_FILE = os.path.join(privdir, 'symbolextractor_tool_warning_printed') + libfile = options.args[1] + impfile = options.args[2] # Only used on Windows + outfile = options.args[3] + gen_symbols(libfile, impfile, outfile, options.cross_host) + return 0 + +if __name__ == '__main__': + sys.exit(run(sys.argv[1:])) diff --git a/meson/mesonbuild/scripts/tags.py b/meson/mesonbuild/scripts/tags.py new file mode 100644 index 000000000..9098efb39 --- /dev/null +++ b/meson/mesonbuild/scripts/tags.py @@ -0,0 +1,53 @@ +# Copyright 2019 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import subprocess +from pathlib import Path +import typing as T + +def ls_as_bytestream() -> bytes: + if os.path.exists('.git'): + return subprocess.run(['git', 'ls-tree', '-r', '--name-only', 'HEAD'], + stdout=subprocess.PIPE).stdout + + files = [str(p) for p in Path('.').glob('**/*') + if not p.is_dir() and + not next((x for x in p.parts if x.startswith('.')), None)] + return '\n'.join(files).encode() + + +def cscope() -> int: + ls = b'\n'.join([b'"%s"' % f for f in ls_as_bytestream().split()]) + return subprocess.run(['cscope', '-v', '-b', '-i-'], input=ls).returncode + + +def ctags() -> int: + ls = ls_as_bytestream() + return subprocess.run(['ctags', '-L-'], input=ls).returncode + + +def etags() -> int: + ls = ls_as_bytestream() + return subprocess.run(['etags', '-'], input=ls).returncode + + +def run(args: T.List[str]) -> int: + tool_name = args[0] + srcdir_name = args[1] + os.chdir(srcdir_name) + assert tool_name in ['cscope', 'ctags', 'etags'] + res = globals()[tool_name]() + assert isinstance(res, int) + return res diff --git a/meson/mesonbuild/scripts/uninstall.py b/meson/mesonbuild/scripts/uninstall.py new file mode 100644 index 000000000..f08490fbd --- /dev/null +++ b/meson/mesonbuild/scripts/uninstall.py @@ -0,0 +1,50 @@ +# Copyright 2016 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import typing as T + +logfile = 'meson-logs/install-log.txt' + +def do_uninstall(log: str) -> None: + failures = 0 + successes = 0 + for line in open(log, encoding='utf-8'): + if line.startswith('#'): + continue + fname = line.strip() + try: + if os.path.isdir(fname) and not os.path.islink(fname): + os.rmdir(fname) + else: + os.unlink(fname) + print('Deleted:', fname) + successes += 1 + except Exception as e: + print(f'Could not delete {fname}: {e}.') + failures += 1 + print('\nUninstall finished.\n') + print('Deleted:', successes) + print('Failed:', failures) + print('\nRemember that files created by custom scripts have not been removed.') + +def run(args: T.List[str]) -> int: + if args: + print('Weird error.') + return 1 + if not os.path.exists(logfile): + print('Log file does not exist, no installation has been done.') + return 0 + do_uninstall(logfile) + return 0 diff --git a/meson/mesonbuild/scripts/vcstagger.py b/meson/mesonbuild/scripts/vcstagger.py new file mode 100644 index 000000000..18cf5f7f0 --- /dev/null +++ b/meson/mesonbuild/scripts/vcstagger.py @@ -0,0 +1,44 @@ +# Copyright 2015-2016 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys, os, subprocess, re +import typing as T + +def config_vcs_tag(infile: str, outfile: str, fallback: str, source_dir: str, replace_string: str, regex_selector: str, cmd: T.List[str]) -> None: + try: + output = subprocess.check_output(cmd, cwd=source_dir) + new_string = re.search(regex_selector, output.decode()).group(1).strip() + except Exception: + new_string = fallback + + with open(infile, encoding='utf-8') as f: + new_data = f.read().replace(replace_string, new_string) + if os.path.exists(outfile): + with open(outfile, encoding='utf-8') as f: + needs_update = (f.read() != new_data) + else: + needs_update = True + if needs_update: + with open(outfile, 'w', encoding='utf-8') as f: + f.write(new_data) + + +def run(args: T.List[str]) -> int: + infile, outfile, fallback, source_dir, replace_string, regex_selector = args[0:6] + command = args[6:] + config_vcs_tag(infile, outfile, fallback, source_dir, replace_string, regex_selector, command) + return 0 + +if __name__ == '__main__': + sys.exit(run(sys.argv[1:])) diff --git a/meson/mesonbuild/scripts/yelphelper.py b/meson/mesonbuild/scripts/yelphelper.py new file mode 100644 index 000000000..374104bea --- /dev/null +++ b/meson/mesonbuild/scripts/yelphelper.py @@ -0,0 +1,133 @@ +# Copyright 2016 The Meson development team + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# http://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import subprocess +import shutil +import argparse +from .. import mlog +from ..mesonlib import has_path_sep +from . import destdir_join +from .gettext import read_linguas +import typing as T + +parser = argparse.ArgumentParser() +parser.add_argument('command') +parser.add_argument('--id', dest='project_id') +parser.add_argument('--subdir', dest='subdir') +parser.add_argument('--installdir', dest='install_dir') +parser.add_argument('--sources', dest='sources') +parser.add_argument('--media', dest='media', default='') +parser.add_argument('--langs', dest='langs', default='') +parser.add_argument('--symlinks', type=bool, dest='symlinks', default=False) + +def build_pot(srcdir: str, project_id: str, sources: T.List[str]) -> None: + # Must be relative paths + sources = [os.path.join('C', source) for source in sources] + outfile = os.path.join(srcdir, project_id + '.pot') + subprocess.call(['itstool', '-o', outfile] + sources) + +def update_po(srcdir: str, project_id: str, langs: T.List[str]) -> None: + potfile = os.path.join(srcdir, project_id + '.pot') + for lang in langs: + pofile = os.path.join(srcdir, lang, lang + '.po') + subprocess.call(['msgmerge', '-q', '-o', pofile, pofile, potfile]) + +def build_translations(srcdir: str, blddir: str, langs: T.List[str]) -> None: + for lang in langs: + outdir = os.path.join(blddir, lang) + os.makedirs(outdir, exist_ok=True) + subprocess.call([ + 'msgfmt', os.path.join(srcdir, lang, lang + '.po'), + '-o', os.path.join(outdir, lang + '.gmo') + ]) + +def merge_translations(blddir: str, sources: T.List[str], langs: T.List[str]) -> None: + for lang in langs: + subprocess.call([ + 'itstool', '-m', os.path.join(blddir, lang, lang + '.gmo'), + '-o', os.path.join(blddir, lang) + ] + sources) + +def install_help(srcdir: str, blddir: str, sources: T.List[str], media: T.List[str], langs: T.List[str], install_dir: str, destdir: str, project_id: str, symlinks: bool) -> None: + c_install_dir = os.path.join(install_dir, 'C', project_id) + for lang in langs + ['C']: + indir = destdir_join(destdir, os.path.join(install_dir, lang, project_id)) + os.makedirs(indir, exist_ok=True) + for source in sources: + infile = os.path.join(srcdir if lang == 'C' else blddir, lang, source) + outfile = os.path.join(indir, source) + mlog.log(f'Installing {infile} to {outfile}') + shutil.copy2(infile, outfile) + for m in media: + infile = os.path.join(srcdir, lang, m) + outfile = os.path.join(indir, m) + c_infile = os.path.join(srcdir, 'C', m) + if not os.path.exists(infile): + if not os.path.exists(c_infile): + mlog.warning('Media file "%s" did not exist in C directory' % m) + continue + elif symlinks: + srcfile = os.path.join(c_install_dir, m) + mlog.log(f'Symlinking {outfile} to {srcfile}.') + if has_path_sep(m): + os.makedirs(os.path.dirname(outfile), exist_ok=True) + try: + try: + os.symlink(srcfile, outfile) + except FileExistsError: + os.remove(outfile) + os.symlink(srcfile, outfile) + continue + except (NotImplementedError, OSError): + mlog.warning('Symlinking not supported, falling back to copying') + infile = c_infile + else: + # Lang doesn't have media file so copy it over 'C' one + infile = c_infile + mlog.log(f'Installing {infile} to {outfile}') + if has_path_sep(m): + os.makedirs(os.path.dirname(outfile), exist_ok=True) + shutil.copyfile(infile, outfile) + shutil.copystat(infile, outfile) + +def run(args: T.List[str]) -> int: + options = parser.parse_args(args) + langs = options.langs.split('@@') if options.langs else [] + media = options.media.split('@@') if options.media else [] + sources = options.sources.split('@@') + destdir = os.environ.get('DESTDIR', '') + src_subdir = os.path.join(os.environ['MESON_SOURCE_ROOT'], options.subdir) + build_subdir = os.path.join(os.environ['MESON_BUILD_ROOT'], options.subdir) + abs_sources = [os.path.join(src_subdir, 'C', source) for source in sources] + + if not langs: + langs = read_linguas(src_subdir) + + if options.command == 'pot': + build_pot(src_subdir, options.project_id, sources) + elif options.command == 'update-po': + build_pot(src_subdir, options.project_id, sources) + update_po(src_subdir, options.project_id, langs) + elif options.command == 'build': + if langs: + build_translations(src_subdir, build_subdir, langs) + elif options.command == 'install': + install_dir = os.path.join(os.environ['MESON_INSTALL_PREFIX'], options.install_dir) + if langs: + build_translations(src_subdir, build_subdir, langs) + merge_translations(build_subdir, abs_sources, langs) + install_help(src_subdir, build_subdir, sources, media, langs, install_dir, + destdir, options.project_id, options.symlinks) + return 0 |